[
  {
    "path": ".github/draft-release-notes-config.yml",
    "content": "# The overall template of the release notes\ntemplate: |\n  $CHANGES\n\n# Setting the formatting and sorting for the release notes body\nname-template: Version (set version here)\nchange-template: \"* $TITLE ([#$NUMBER]($URL))\"\nsort-by: merged_at\nsort-direction: ascending\nreplacers:\n  - search: \"##\"\n    replace: \"###\"\n\n# Organizing the tagged PRs into unified categories\ncategories:\n  - title: \"Breaking changes\"\n    labels:\n      - \"breaking change\"\n  - title: \"Features\"\n    labels:\n      - \"feature\"\n  - title: \"Enhancements\"\n    labels:\n      - \"enhancement\"\n  - title: \"Bug Fixes\"\n    labels:\n      - \"bug\"\n      - \"bug fix\"\n  - title: \"Infrastructure\"\n    labels:\n      - \"infra\"\n      - \"test\"\n      - \"dependencies\"\n      - \"github actions\"\n  - title: \"Documentation\"\n    labels:\n      - \"documentation\"\n  - title: \"Maintenance\"\n    labels:\n      - \"version upgrade\"\n      - \"odfe release\"\n  - title: \"Refactoring\"\n    labels:\n      - \"refactor\"\n      - \"code quality\"\n"
  },
  {
    "path": ".github/workflows/draft-release-notes-workflow.yml",
    "content": "name: Release Drafter\n\non:\n  push:\n    branches:\n      - main\n\njobs:\n  update_release_draft:\n    name: Update draft release notes\n    runs-on: ubuntu-latest\n    steps:\n      - name: Update draft release notes\n        uses: release-drafter/release-drafter@v5\n        with:\n          config-name: draft-release-notes-config.yml\n          name: Version (set here)\n          tag: (None)\n        env:\n          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}"
  },
  {
    "path": ".github/workflows/maven-release.yml",
    "content": "# This workflow will build a package using Maven and then publish it to\n# to our staging repo for a final release to maven central\n\n# This workflow will also create a tag and github release for the current commit\n# The github release will be have '-java' added to name to distinguish rust vs java releases\n# Example:\n# current version in POM.XML and in maven central = 3.1.0\n# tag and github release = 3.1.0-java\nname: Publish Official Release to Maven Staging\n\ndefaults:\n  run:\n    working-directory: Java/\n\non:\n  workflow_dispatch:\n\n\npermissions:\n  contents: write\n\njobs:\n  build:\n    name: Build project and publish release\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v3\n    - name: Set up JDK 17\n      uses: actions/setup-java@v1\n      with:\n        java-version: 17\n        cache: maven\n        server-id: ossrh\n        server-username: MAVEN_USERNAME\n        server-password: MAVEN_PASSWORD\n        gpg-private-key: ${{ secrets.MAVEN_GPG_PRIVATE_KEY }}\n        gpg-passphrase: MAVEN_GPG_PASSPHRASE\n    - name: Extract project version\n      id: project\n      run: echo \"version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)\" >> $GITHUB_OUTPUT\n    - name: Build with Maven\n      run: mvn -B package --file pom.xml\n    - name: Publish to Maven central\n      if: ${{ !endsWith(steps.project.outputs.version, '-SNAPSHOT') }}\n      run: mvn -B deploy --file pom.xml -Pgpg-sign\n      env:\n        MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}\n        MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}\n        MAVEN_GPG_PASSPHRASE: ${{ secrets.MAVEN_GPG_PASSPHRASE }}\n    \n    - name: Create Release\n      id: create_release\n      uses: actions/create-release@v1\n      env:\n        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n      with:\n        tag_name: ${{ steps.project.outputs.version }}-java\n        release_name: ${{ steps.project.outputs.version }}-java\n        draft: false\n        prerelease: false\n"
  },
  {
    "path": ".github/workflows/maven-snapshot.yml",
    "content": "name: Build and publish snapshot on push to main\non:\n  push:\n    branches:\n      - main\n\n\npermissions:\n  contents: read\n\ndefaults:\n  run:\n    working-directory: Java/\n\njobs:\n  build:\n    name: Build project and publish SNAPSHOT\n    runs-on: ubuntu-latest  \n    steps:\n      - name: Checkout Repository\n        uses: actions/checkout@v1\n      - name: Set up java for publishing snapshot\n        uses: actions/setup-java@v1\n        with:\n          java-version: 17\n          server-id: ossrh-snapshot\n          server-username: MAVEN_USERNAME\n          server-password: MAVEN_PASSWORD\n          gpg-private-key: ${{ secrets.MAVEN_GPG_PRIVATE_KEY }}\n          gpg-passphrase: MAVEN_GPG_PASSPHRASE\n      - name: Build with Maven\n        run: mvn -B package --file pom.xml\n      - name: Extract project version\n        id: project\n        run: echo \"version=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)\" >> $GITHUB_OUTPUT\n      - name: Publish to snapshot repo\n        if: ${{ endsWith(steps.project.outputs.version, '-SNAPSHOT') }}\n        run: mvn -B deploy --file pom.xml -Pgpg-sign\n        env:\n          MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}\n          MAVEN_PASSWORD: ${{ secrets.OSSRH_TOKEN }}\n          MAVEN_GPG_PASSPHRASE: ${{ secrets.MAVEN_GPG_PASSPHRASE }}"
  },
  {
    "path": ".github/workflows/maven.yml",
    "content": "name: Java CI\n\non:\n  pull_request:\n    branches: \n      - '*'\n    paths:\n      - Java/**\n\n\npermissions:\n  contents: read\n\ndefaults:\n  run:\n    working-directory: Java/\n\njobs:\n  build:\n    runs-on: ubuntu-latest\n    steps:\n    - uses: actions/checkout@v1\n    - name: Set up JDK 17\n      uses: actions/setup-java@v1\n      with:\n        java-version: 17\n    - name: Build with Maven\n      run: mvn -B package --file pom.xml\n"
  },
  {
    "path": ".github/workflows/rust.yml",
    "content": "name: Rust CI\n\non:\n  pull_request:\n    branches: [ main ]\n    paths: [ Rust/** ]\n\n\npermissions:\n  contents: read\n\nenv:\n  CARGO_TERM_COLOR: always\n  \ndefaults:\n  run:\n    working-directory: Rust/\n\njobs:\n  build:\n\n    runs-on: ubuntu-latest\n\n    steps:\n    - uses: actions/checkout@v2\n    - name: Build Rust\n      run: cargo build --verbose\n    - name: Run Rust Tests\n      run: cargo test --verbose\n"
  },
  {
    "path": ".gitignore",
    "content": "build\ntarget\n.idea\n*.iml\n.project\n.settings\n.classpath\n._.DS_Store\n.DS_Store\nJava/*/bin/\n\n"
  },
  {
    "path": "CODE_OF_CONDUCT.md",
    "content": "## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).\nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact\nopensource-codeofconduct@amazon.com with any additional questions or comments.\n"
  },
  {
    "path": "CONTRIBUTING.md",
    "content": "# Contributing Guidelines\n\nThank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional\ndocumentation, we greatly value feedback and contributions from our community.\n\nPlease read through this document before submitting any issues or pull requests to ensure we have all the necessary\ninformation to effectively respond to your bug report or contribution.\n\n\n## Reporting Bugs/Feature Requests\n\nWe welcome you to use the GitHub issue tracker to report bugs or suggest features.\n\nWhen filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already\nreported the issue. Please try to include as much information as you can. Details like these are incredibly useful:\n\n* A reproducible test case or series of steps\n* The version of our code being used\n* Any modifications you've made relevant to the bug\n* Anything unusual about your environment or deployment\n\n\n## Contributing via Pull Requests\nContributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:\n\n1. You are working against the latest source on the *master* branch.\n2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.\n3. You open an issue to discuss any significant work - we would hate for your time to be wasted.\n\nTo send us a pull request, please:\n\n1. Fork the repository.\n2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.\n3. Ensure local tests pass.\n4. Commit to your fork using clear commit messages.\n5. Send us a pull request, answering any default questions in the pull request interface.\n6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.\n\nGitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and\n[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).\n\n\n## Finding contributions to work on\nLooking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.\n\n\n## Code of Conduct\nThis project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).\nFor more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact\nopensource-codeofconduct@amazon.com with any additional questions or comments.\n\n\n## Security issue notifications\nIf you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.\n\n\n## Licensing\n\nSee the [LICENSE](LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.\n\nWe may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.\n"
  },
  {
    "path": "Java/README.md",
    "content": "# Random Cut Forest\n\nThis directory contains a Java implementation of the Random Cut Forest data structure and algorithms\nfor anomaly detection, density estimation, imputation, and forecast. The goal of this library \nis to be easy to use and to strike a balance between efficiency and extensibility. Please do not forget \nto look into the ParkServices package that provide many augmented functionalities such as explicit determination \nof anomaly grade based on the first hand understanding of the core algorithm. Please also see randomcutforest-examples \nfor a few detailed examples and extensions. Please do not hesitate to creat an issue for any discussion item.\n\n## Basic operations\n\nTo create a RandomCutForest instance with all parameters set to defaults:\n\n```java\nint dimensions = 5; // The number of dimensions in the input data, required\nRandomCutForest forest = RandomCutForest.defaultForest(dimensions);\n```\nWe recommend using shingle size which correspond to contextual analysis of data, \nand RCF uses ideas not dissimilar from higher order Markov Chains to improve its \naccuracy. An option is provided to have the shingles be constructed internally. \nTo explicitly set optional parameters like number of trees in the forest or \nsample size, RandomCutForest provides a builder (for example with 4 input dimensions for \na 4-way multivariate analysis):\n\n```java\nRandomCutForest forest = RandomCutForest.builder()\n        .numberOfTrees(90)\n        .sampleSize(200) // use this cover the phenomenon of interest\n                         // for analysis of 5 minute aggregations, a week has\n                         // about 12 * 24 * 7 starting points of interest\n                         // larger sample sizes will be larger models \n        .dimensions(inputDimension*4) // still required!\n        .timeDecay(0.2) // determines half life of data\n        .randomSeed(123)\n        .internalShingleEnabled(true)\n        .shingleSize(7)\n        .build();\n```\n\nTypical usage of a forest is to compute a statistic on an input data point and then update the forest with that point \nin a loop.\n\n```java\nSupplier<double[]> input = ...;\n\nwhile (true) {\n    double[] point = input.get();\n    double score = forest.getAnomalyScore(point);\n    forest.update(point);\n    System.out.println(\"Anomaly Score: \" + score);\n}\n```\n\n## Limitations\n\n* Update operations in a forest are *not thread-safe*. Running concurrent updates or running an update concurrently\n  with a traversal may result in errors.\n\n\n## Forest Configuration\n\nThe following parameters can be configured in the RandomCutForest builder. \n\n| Parameter Name              | Type    | Description                                                                                                                                                                                                                                                                                                                                                    | Default Value                                                                         |\n|-----------------------------|---------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------|\n| dimensions                  | int     | The number of dimensions in the input data.                                                                                                                                                                                                                                                                                                                    | Required, no default value. Should be the product of input dimensions and shingleSize |\n| shingleSize                 | int     | The number of contiguous observations across all the input variables that would be used for analysis                                                                                                                                                                                                                                                           | Strongly recommended for contextual anomalies. Required for Forecast/Extrapolate      |\n| lambda                      | double  | The decay factor used by stream samplers in this forest. See the next section for guidance.                                                                                                                                                                                                                                                                    | 1 / (10 * sampleSize)                                                                 |\n| numberOfTrees               | int     | The number of trees in this forest.                                                                                                                                                                                                                                                                                                                            | 50                                                                                    |\n| outputAfter                 | int     | The number of points required by stream samplers before results are returned.                                                                                                                                                                                                                                                                                  | 0.25 * sampleSize                                                                     |\n| internalShinglingEnabled    | boolean | Whether the shingling is performed by RCF itself since it has already seen previous values.                                                                                                                                                                                                                                                                    | false (for historical reasons). Recommended : true, will result in smaller models.    |\n| parallelExecutionEnabled    | boolean | If true, then the forest will create an internal threadpool. Forest updates and traversals will be submitted to this threadpool, and individual trees will be updated or traversed in parallel. For larger shingle sizes, dimensions, and number of trees, parallelization may improve throughput. We recommend users benchmark against their target use case. | false                                                                                 |\n| randomSeed                  | long    | A seed value used to initialize the random number generators in this forest.                                                                                                                                                                                                                                                                                   |                                                                                       |\n| sampleSize                  | int     | The sample size used by stream samplers in this forest                                                                                                                                                                                                                                                                                                         | 256                                                                                   |\n| centerOfMassEnabled         | boolean | If true, then tree nodes in the forest will compute their center of mass as part of tree update operations.                                                                                                                                                                                                                                                    | false                                                                                 |\n| storeSequenceIndexesEnabled | boolean | If true, then sequence indexes (ordinals indicating when a point was added to a tree) will be stored in the forest along with poitn values.                                                                                                                                                                                                                    | false                                                                                 |\n| threadPoolSize              | int     | The number of threads to use in the internal threadpool.                                                                                                                                                                                                                                                                                                       | Number of available processors - 1                                                    |\n\nThe above parameters are the most common and historical. Please use the issues to request additions/discussions of other parameters of interest.\n\nRandomCutForest primarily provides an estimation (say anomaly score, or extrapolation over a forecast horizon) and using that raw estimation can be challenging. The ParkServices package provides \nseveral capabilities (ThresholdedRandomCutForest, RCFCaster, respectively) for distilling the scores to a determination of \nanomaly/otherwise (an assesment of grade) or calibrated conformal forecasts. These have natural parameter choices that are different \nfrom the core RandomCutForest -- for example internalShinglingEnabled defaults to true since that is more natural in those contexts.\nThe package examples provides a collection of examples and uses of parameters, we draw the attention to ThresholdedMultiDimensionalExample \nand RCFCasterExample. If one is interested in sequential analysis of a series of consecutive inputs, check out SequentialAnomalyExample. \nParkServices also exposes many other functionalities of RCF which were covert, such as clustering (including multi-centroid representations) \n-- see NumericGLADExample for instance. \n\n## Choosing a `timeDecay` value for your application\n\nWhen we submit a point to the sampler, it is included into the sample with some probability, and \nit will remain in the for some number of steps before being replaced. Call the number of steps that\na point is included in the sample the \"lifetime\" of the point (which may be 0). Over a finite time\nwindow, the distribution of the lifetime of a point is approximately exponential with parameter\n`lambda`. Thus, `1 / timmeDecay` is approximately the average number of steps that a point will be included\nin the sample. By default, we set `timeDecay` equal to `1 / (10 * sampleSize)`.\n\nAlternatively, if you want the probability that a point survives longer than n steps to be 0.05,\nyou can solve for `lambda` in the equation `exp(-lambda * n) = 0.05`.\n\nWe note again that this is heuristic and not mathematically rigorous. We refer the interested reader\nto [Weighted Random Sampling (2005;  Efraimidis, Spirakis)](http://citeseerx.ist.psu.edu/viewdoc/download;jsessionid=BEB1FE0AB3C0129B822D2CE5EABBFD42?doi=10.1.1.591.4194&rep=rep1&type=pdf).\n\n## Setup\n\n1. Checkout this package from our GitHub repository.\n1. Install [Apache Maven](https://maven.apache.org/) by following the directions on that site.\n1. Set your `JAVA_HOME` environment variable to a JDK version 8 or greater.\n\n## Build\n\nBuild the modules in this package and run the full test suite by running\n\n```text\nmvn package\n```\n\nFor a faster build that excludes the long-running \"functional\" tests, run\n\n```text\nmvn package -DexcludedGroups=functional\n```\n\n## Build Command-line (CLI) usage\n\n> **Important.** The CLI applications use `String::split` to read delimited data\n> and as such are **not intended for production use**.\n\nFor some of the algorithms included in this package there are CLI applications\nthat can be used for experimentation as well as a way to learn about these\nalgorithms and their hyperparameters. After building the project you can invoke\nan example CLI application by adding the core jar file to your classpath.\n\nIn the example below we train and score a Random Cut Forest model on the\nthree-dimensional data shown in Figure 3 in the original RCF paper.\n([PDF][rcf-paper]) These example data can be\nfound at `../example-data/rcf-paper.csv`:\n\n```text\n$ tail data/example.csv\n-5.0074,-0.0038,-0.0237\n-5.0029,0.0170,-0.0057\n-4.9975,-0.0102,-0.0065\n4.9878,0.0136,-0.0087\n5.0118,0.0098,-0.0057\n0.0158,0.0061,0.0091\n5.0167,0.0041,0.0054\n-4.9947,0.0126,-0.0010\n-5.0209,0.0004,-0.0033\n4.9923,-0.0142,0.0030\n```\n\n(Note that there is one data point above that is not like the others.) The\n`AnomalyScoreRunner` application reads in each line of the input data as a\nvector data point, scores the data point, and then updates the model with this\npoint. The program output appends a column of anomaly scores to the input:\n\n```text\n$ java -cp core/target/randomcutforest-core-4.4.0.jar com.amazon.randomcutforest.runner.AnomalyScoreRunner < ../example-data/rcf-paper.csv > example_output.csv\n$ tail example_output.csv\n-5.0029,0.0170,-0.0057,0.8129401629464965\n-4.9975,-0.0102,-0.0065,0.6591046054520615\n4.9878,0.0136,-0.0087,0.8552217070518414\n5.0118,0.0098,-0.0057,0.7224686064066762\n0.0158,0.0061,0.0091,2.8299054033889814\n5.0167,0.0041,0.0054,0.7571453322237215\n-4.9947,0.0126,-0.0010,0.7259960347128676\n-5.0209,0.0004,-0.0033,0.9119498264685114\n4.9923,-0.0142,0.0030,0.7310102658466711\nDone.\n```\n\n(As you can see the anomalous data point was given large anomaly score.) You can\nread additional usage instructions, including options for setting model\nhyperparameters, using the `--help` flag:\n\n```text\n$ java -cp core/target/randomcutforest-core-4.4.0.jar com.amazon.randomcutforest.runner.AnomalyScoreRunner --help\nUsage: java -cp target/random-cut-forest-4.4.0.jar com.amazon.randomcutforest.runner.AnomalyScoreRunner [options] < input_file > output_file\n\nCompute scalar anomaly scores from the input rows and append them to the output rows.\n\nOptions:\n        --delimiter, -d: The character or string used as a field delimiter. (default: ,)\n        --header-row: Set to 'true' if the data contains a header row. (default: false)\n        --number-of-trees, -n: Number of trees to use in the forest. (default: 100)\n        --random-seed: Random seed to use in the Random Cut Forest (default: 42)\n        --sample-size, -s: Number of points to keep in sample for each tree. (default: 256)\n        --shingle-cyclic, -c: Set to 'true' to use cyclic shingles instead of linear shingles. (default: false)\n        --shingle-size, -g: Shingle size to use. (default: 1)\n        --window-size, -w: Window size of the sample or 0 for no window. (default: 0)\n\n        --help, -h: Print this help message and exit.\n```\n\nOther CLI applications are available in the `com.amazon.randomcutforest.runner`\npackage.\n\n## Testing\n\nThe core library test suite is divided into unit tests and \"functional\" tests. By \"functional\", we mean tests that \nverify the expected behavior of the algorithms defined in the package. For example, a functional test for the anomaly \ndetection algorithm will first train a forest on a pre-defined distribution and then verify that the forest assigns a \nhigh anomaly score to anomalous points (where \"anomalous\" is with respect to the specified distribution). Functional \ntests are indicated both in the test class name (e.g., `RandomCutForestFunctionalTest`) and in a `@Tag` annotation on \nthe test class.\n\nThe full test suite including functional tests currently takes over 10 minutes to complete. If you are contributing to\nthis package, we recommend excluding the functional tests while actively developing, and only running the full test\nsuite before creating a pull request. Functional tests can be excluded from Maven build targets by passing\n`-DexcludedGroups=functional` at the command line. For example:\n\n```text\n% mvn test -DexcludedGroups=functional\n```\n\nIn the core library we have 90% line coverage with the full test suite, and 80% line coverage when running the unit \ntests only (i.e., when excluding functional tests). Our goal is to reach 100% unit test coverage, and we welcome (and \nencourage!) test contributions. After running tests with Maven, you can see the test coverage broken out by class by \nopening `target/site/jacoco/index.html` in a web browser.\n\nOur tests are implemented in [JUnit 5](https://junit.org/junit5/) with [Mockito](https://site.mockito.org/), [Powermock](https://github.com/powermock/powermock), and [Hamcrest](http://hamcrest.org/) for testing. \nTest dependencies will be downloaded automatically when invoking `mvn test` or `mvn package`.\n\n## Benchmarks\n\nThe benchmark modules defines microbenchmarks using the [JMH](https://openjdk.java.net/projects/code-tools/jmh/) \nframework. Build an executable jar containing the benchmark code by running\n\n```text\n% # (Optional) To benchmark the code in your local repository, build and install to your local Maven repository\n% # Otherwise, benchmark dependencies will be pulled from Maven central\n% mvn package install -DexcludedGroups=functional\n% \n% mvn -pl benchmark package assembly:single\n```\n\nTo invoke the full benchmark suite:\n\n```text\n% java -jar benchmark/target/randomcutforest-benchmark-4.4.0-jar-with-dependencies.jar\n```\n\nThe full benchmark suite takes a long time to run. You can also pass a regex at the command-line, then only matching\nbenchmark methods will be executed.\n\n```text\n% java -jar benchmark/target/randomcutforest-benchmark-4.4.0-jar-with-dependencies.jar RandomCutForestBenchmark\\.updateAndGetAnomalyScore\n```\n\n[rcf-paper]: http://proceedings.mlr.press/v48/guha16.pdf\n"
  },
  {
    "path": "Java/RELEASING.md",
    "content": "- [Overview](#overview)\n- [Feature Branches](#feature-branches)\n- [Release Labels](#release-labels)\n- [Releasing](#releasing)\n\n## Overview\n\nThis document explains the release strategy for the Random Cut Forest project.\n\n## Feature Branches\n\nDo not create branches in the upstream repo, use your fork, for the exception of long lasting feature branches that require active collaboration from multiple developers. Name feature branches `feature/<thing>`. Once the work is merged to `main`, please make sure to delete the feature branch.\n\n## Release Labels\n\nRepositories create consistent release labels, such as `3.0.0-java`. Use release labels to target an issue or a PR for a given release.\n\n## Releasing\n\nThe release process is run by a release manager volunteering from amongst the maintainers.\n\n1. Create a PR to bump version to desired release candidate (e.g. 3.0.0).\n2. Click run on the maven-release workflow in Github Actions which uploads the artifacts to our staging repository, creates a new tag, and a new Github release.\n3. Login into the nexus staging repository, verify artifact was signed successfully and click release to officially push to maven central."
  },
  {
    "path": "Java/benchmark/pom.xml",
    "content": "<?xml version=\"1.0\"?>\n<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n    xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <modelVersion>4.0.0</modelVersion>\n\n  <parent>\n    <groupId>software.amazon.randomcutforest</groupId>\n    <artifactId>randomcutforest-parent</artifactId>\n    <version>4.4.0</version>\n  </parent>\n\n  <artifactId>randomcutforest-benchmark</artifactId>\n  <packaging>jar</packaging>\n\n  <properties>\n    <jmh.version>1.22</jmh.version>\n  </properties>\n\n  <dependencies>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-core</artifactId>\n      <version>${project.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-testutils</artifactId>\n      <version>${project.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-serialization</artifactId>\n      <version>${project.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>org.openjdk.jmh</groupId>\n      <artifactId>jmh-core</artifactId>\n      <version>${jmh.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>org.openjdk.jmh</groupId>\n      <artifactId>jmh-generator-annprocess</artifactId>\n      <version>${jmh.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>com.github.jbellis</groupId>\n      <artifactId>jamm</artifactId>\n      <version>0.3.3</version>\n    </dependency>\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-core</artifactId>\n      <version>2.16.0</version>\n    </dependency>\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-databind</artifactId>\n      <version>2.16.0</version>\n    </dependency>\n    <dependency>\n      <groupId>io.protostuff</groupId>\n      <artifactId>protostuff-core</artifactId>\n      <version>1.8.0</version>\n    </dependency>\n    <dependency>\n      <groupId>io.protostuff</groupId>\n      <artifactId>protostuff-runtime</artifactId>\n      <version>1.8.0</version>\n    </dependency>\n  </dependencies>\n\n  <build>\n    <plugins>\n      <plugin>\n        <artifactId>maven-assembly-plugin</artifactId>\n        <version>3.2.0</version>\n        <configuration>\n          <descriptorRefs>\n            <descriptorRef>jar-with-dependencies</descriptorRef>\n          </descriptorRefs>\n          <archive>\n            <manifest>\n              <mainClass>org.openjdk.jmh.Main</mainClass>\n            </manifest>\n          </archive>\n        </configuration>\n      </plugin>\n    </plugins>\n  </build>\n\n</project>\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/RandomCutForestBenchmark.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport org.openjdk.jmh.annotations.Benchmark;\nimport org.openjdk.jmh.annotations.Fork;\nimport org.openjdk.jmh.annotations.Level;\nimport org.openjdk.jmh.annotations.Measurement;\nimport org.openjdk.jmh.annotations.OperationsPerInvocation;\nimport org.openjdk.jmh.annotations.Param;\nimport org.openjdk.jmh.annotations.Scope;\nimport org.openjdk.jmh.annotations.Setup;\nimport org.openjdk.jmh.annotations.State;\nimport org.openjdk.jmh.annotations.Warmup;\nimport org.openjdk.jmh.infra.Blackhole;\n\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n@Warmup(iterations = 2)\n@Measurement(iterations = 5)\n@Fork(value = 1)\n@State(Scope.Thread)\npublic class RandomCutForestBenchmark {\n\n    public final static int DATA_SIZE = 50_000;\n    public final static int INITIAL_DATA_SIZE = 25_000;\n\n    @State(Scope.Benchmark)\n    public static class BenchmarkState {\n        @Param({ \"40\" })\n        int baseDimensions;\n\n        @Param({ \"1\" })\n        int shingleSize;\n\n        @Param({ \"30\" })\n        int numberOfTrees;\n\n        @Param({ \"1.0\", \"0.9\", \"0.8\", \"0.7\", \"0.6\", \"0.5\", \"0.4\", \"0.3\", \"0.2\", \"0.1\", \"0.0\" })\n        double boundingBoxCacheFraction;\n\n        @Param({ \"false\", \"true\" })\n        boolean parallel;\n\n        double[][] data;\n        RandomCutForest forest;\n\n        @Setup(Level.Trial)\n        public void setUpData() {\n            int dimensions = baseDimensions * shingleSize;\n            NormalMixtureTestData gen = new NormalMixtureTestData();\n            data = gen.generateTestData(INITIAL_DATA_SIZE + DATA_SIZE, dimensions);\n        }\n\n        @Setup(Level.Invocation)\n        public void setUpForest() {\n            forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).dimensions(baseDimensions * shingleSize)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).parallelExecutionEnabled(parallel)\n                    .boundingBoxCacheFraction(boundingBoxCacheFraction).randomSeed(99).build();\n\n            for (int i = 0; i < INITIAL_DATA_SIZE; i++) {\n                forest.update(data[i]);\n            }\n        }\n    }\n\n    private RandomCutForest forest;\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest updateOnly(BenchmarkState state) {\n        double[][] data = state.data;\n        forest = state.forest;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            forest.update(data[i]);\n        }\n\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest scoreOnly(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double score = 0.0;\n        Random rnd = new Random(0);\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            score += forest.getAnomalyScore(data[i]);\n            if (rnd.nextDouble() < 0.01) {\n                forest.update(data[i]); // this should execute sparingly\n            }\n        }\n\n        blackhole.consume(score);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest scoreAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double score = 0.0;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            score = forest.getAnomalyScore(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(score);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest attributionAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        DiVector vector = new DiVector(forest.getDimensions());\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            vector = forest.getAnomalyAttribution(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(vector);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest basicDensityAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        DensityOutput output = new DensityOutput(forest.getDimensions(), forest.getSampleSize());\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.getSimpleDensity(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest basicNeighborAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        List<Neighbor> output = null;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.getNearNeighborsInSample(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest imputeAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double[] output = null;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.imputeMissingValues(data[i], 1, new int[] { forest.dimensions - 1 });\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n}\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/RandomCutForestShingledBenchmark.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport org.openjdk.jmh.annotations.Benchmark;\nimport org.openjdk.jmh.annotations.Fork;\nimport org.openjdk.jmh.annotations.Level;\nimport org.openjdk.jmh.annotations.Measurement;\nimport org.openjdk.jmh.annotations.OperationsPerInvocation;\nimport org.openjdk.jmh.annotations.Param;\nimport org.openjdk.jmh.annotations.Scope;\nimport org.openjdk.jmh.annotations.Setup;\nimport org.openjdk.jmh.annotations.State;\nimport org.openjdk.jmh.annotations.Warmup;\nimport org.openjdk.jmh.infra.Blackhole;\n\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n@Warmup(iterations = 2)\n@Measurement(iterations = 5)\n@Fork(value = 1)\n@State(Scope.Thread)\npublic class RandomCutForestShingledBenchmark {\n\n    public final static int DATA_SIZE = 50_000;\n    public final static int INITIAL_DATA_SIZE = 25_000;\n\n    @State(Scope.Benchmark)\n    public static class BenchmarkState {\n        @Param({ \"5\" })\n        int baseDimensions;\n\n        @Param({ \"8\" })\n        int shingleSize;\n\n        @Param({ \"30\" })\n        int numberOfTrees;\n\n        @Param({ \"1.0\", \"0.9\", \"0.8\", \"0.7\", \"0.6\", \"0.5\", \"0.4\", \"0.3\", \"0.2\", \"0.1\", \"0.0\" })\n        double boundingBoxCacheFraction;\n\n        @Param({ \"false\", \"true\" })\n        boolean parallel;\n\n        double[][] data;\n        RandomCutForest forest;\n\n        @Setup(Level.Trial)\n        public void setUpData() {\n            data = ShingledMultiDimDataWithKeys.getMultiDimData(DATA_SIZE + INITIAL_DATA_SIZE, 50, 100, 5, 17,\n                    baseDimensions).data;\n        }\n\n        @Setup(Level.Invocation)\n        public void setUpForest() {\n            forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).dimensions(baseDimensions * shingleSize)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).parallelExecutionEnabled(parallel)\n                    .boundingBoxCacheFraction(boundingBoxCacheFraction).randomSeed(99).build();\n\n            for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n                forest.update(data[i]);\n            }\n        }\n    }\n\n    private RandomCutForest forest;\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest updateOnly(BenchmarkState state) {\n        double[][] data = state.data;\n        forest = state.forest;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            forest.update(data[i]);\n        }\n\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest scoreOnly(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double score = 0.0;\n        Random rnd = new Random(0);\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            score += forest.getAnomalyScore(data[i]);\n            if (rnd.nextDouble() < 0.01) {\n                forest.update(data[i]); // this should execute sparingly\n            }\n        }\n\n        blackhole.consume(score);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest scoreAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double score = 0.0;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            score = forest.getAnomalyScore(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(score);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest attributionAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        DiVector vector = new DiVector(forest.getDimensions());\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            vector = forest.getAnomalyAttribution(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(vector);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest basicDensityAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        DensityOutput output = new DensityOutput(forest.getDimensions(), forest.getSampleSize());\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.getSimpleDensity(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest neighborAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        List<Neighbor> output = null;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.getNearNeighborsInSample(data[i]);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest imputeAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double[] output = null;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.imputeMissingValues(data[i], 1, new int[] { state.baseDimensions - 1 });\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(DATA_SIZE)\n    public RandomCutForest extrapolateAndUpdate(BenchmarkState state, Blackhole blackhole) {\n        double[][] data = state.data;\n        forest = state.forest;\n        double[] output = null;\n\n        for (int i = INITIAL_DATA_SIZE; i < data.length; i++) {\n            output = forest.extrapolate(1);\n            forest.update(data[i]);\n        }\n\n        blackhole.consume(output);\n        return forest;\n    }\n}\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/StateMapperBenchmark.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport org.openjdk.jmh.annotations.Benchmark;\nimport org.openjdk.jmh.annotations.Fork;\nimport org.openjdk.jmh.annotations.Level;\nimport org.openjdk.jmh.annotations.Measurement;\nimport org.openjdk.jmh.annotations.OperationsPerInvocation;\nimport org.openjdk.jmh.annotations.Param;\nimport org.openjdk.jmh.annotations.Scope;\nimport org.openjdk.jmh.annotations.Setup;\nimport org.openjdk.jmh.annotations.State;\nimport org.openjdk.jmh.annotations.TearDown;\nimport org.openjdk.jmh.annotations.Warmup;\nimport org.openjdk.jmh.infra.Blackhole;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.profilers.ObjectGraphSizeProfiler;\nimport com.amazon.randomcutforest.profilers.OutputSizeProfiler;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport io.protostuff.LinkedBuffer;\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\n@Warmup(iterations = 2)\n@Measurement(iterations = 5)\n@Fork(value = 1)\n@State(Scope.Benchmark)\npublic class StateMapperBenchmark {\n    public static final int NUM_TRAIN_SAMPLES = 2048;\n    public static final int NUM_TEST_SAMPLES = 50;\n\n    @State(Scope.Thread)\n    public static class BenchmarkState {\n        @Param({ \"10\" })\n        int dimensions;\n\n        @Param({ \"50\" })\n        int numberOfTrees;\n\n        @Param({ \"256\" })\n        int sampleSize;\n\n        @Param({ \"false\", \"true\" })\n        boolean saveTreeState;\n\n        @Param({ \"FLOAT_32\", \"FLOAT_64\" })\n        Precision precision;\n\n        double[][] trainingData;\n        double[][] testData;\n        RandomCutForestState forestState;\n        String json;\n        byte[] protostuff;\n\n        @Setup(Level.Trial)\n        public void setUpData() {\n            NormalMixtureTestData gen = new NormalMixtureTestData();\n            trainingData = gen.generateTestData(NUM_TRAIN_SAMPLES, dimensions);\n            testData = gen.generateTestData(NUM_TEST_SAMPLES, dimensions);\n        }\n\n        @Setup(Level.Invocation)\n        public void setUpForest() throws JsonProcessingException {\n            RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision)\n                    .boundingBoxCacheFraction(0.0).build();\n\n            for (int i = 0; i < NUM_TRAIN_SAMPLES; i++) {\n                forest.update(trainingData[i]);\n            }\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(saveTreeState);\n            forestState = mapper.toState(forest);\n\n            ObjectMapper jsonMapper = new ObjectMapper();\n            json = jsonMapper.writeValueAsString(forestState);\n\n            Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n            LinkedBuffer buffer = LinkedBuffer.allocate(512);\n            try {\n                protostuff = ProtostuffIOUtil.toByteArray(forestState, schema, buffer);\n            } finally {\n                buffer.clear();\n            }\n        }\n    }\n\n    private RandomCutForest forest;\n    private byte[] bytes;\n\n    @TearDown(Level.Iteration)\n    public void tearDown() {\n        OutputSizeProfiler.setTestArray(bytes);\n        ObjectGraphSizeProfiler.setObject(forest);\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public RandomCutForestState roundTripFromState(BenchmarkState state, Blackhole blackhole) {\n        RandomCutForestState forestState = state.forestState;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            forest = mapper.toModel(forestState);\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            forestState = mapper.toState(forest);\n        }\n\n        return forestState;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public String roundTripFromJson(BenchmarkState state, Blackhole blackhole) throws JsonProcessingException {\n        String json = state.json;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            ObjectMapper jsonMapper = new ObjectMapper();\n            RandomCutForestState forestState = jsonMapper.readValue(json, RandomCutForestState.class);\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            forest = mapper.toModel(forestState);\n\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            json = jsonMapper.writeValueAsString(mapper.toState(forest));\n        }\n\n        bytes = json.getBytes();\n        return json;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public byte[] roundTripFromProtostuff(BenchmarkState state, Blackhole blackhole) {\n        bytes = state.protostuff;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n            RandomCutForestState forestState = schema.newMessage();\n            ProtostuffIOUtil.mergeFrom(bytes, forestState, schema);\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            forest = mapper.toModel(forestState);\n\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            forestState = mapper.toState(forest);\n\n            LinkedBuffer buffer = LinkedBuffer.allocate(512);\n            try {\n                bytes = ProtostuffIOUtil.toByteArray(forestState, schema, buffer);\n            } finally {\n                buffer.clear();\n            }\n        }\n\n        return bytes;\n    }\n}\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/StateMapperShingledBenchmark.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static java.lang.Math.PI;\nimport static java.lang.Math.cos;\n\nimport java.util.Random;\n\nimport org.openjdk.jmh.annotations.Benchmark;\nimport org.openjdk.jmh.annotations.Fork;\nimport org.openjdk.jmh.annotations.Level;\nimport org.openjdk.jmh.annotations.Measurement;\nimport org.openjdk.jmh.annotations.OperationsPerInvocation;\nimport org.openjdk.jmh.annotations.Param;\nimport org.openjdk.jmh.annotations.Scope;\nimport org.openjdk.jmh.annotations.Setup;\nimport org.openjdk.jmh.annotations.State;\nimport org.openjdk.jmh.annotations.TearDown;\nimport org.openjdk.jmh.annotations.Warmup;\nimport org.openjdk.jmh.infra.Blackhole;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.profilers.OutputSizeProfiler;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport io.protostuff.LinkedBuffer;\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\n@Warmup(iterations = 2)\n@Measurement(iterations = 5)\n@Fork(value = 1)\n@State(Scope.Benchmark)\npublic class StateMapperShingledBenchmark {\n    public static final int NUM_TRAIN_SAMPLES = 2048;\n    public static final int NUM_TEST_SAMPLES = 50;\n\n    @State(Scope.Thread)\n    public static class BenchmarkState {\n        @Param({ \"10\" })\n        int dimensions;\n\n        @Param({ \"50\" })\n        int numberOfTrees;\n\n        @Param({ \"256\" })\n        int sampleSize;\n\n        @Param({ \"false\", \"true\" })\n        boolean saveTreeState;\n\n        @Param({ \"FLOAT_32\", \"FLOAT_64\" })\n        Precision precision;\n\n        double[][] trainingData;\n        double[][] testData;\n        RandomCutForestState forestState;\n        String json;\n        byte[] protostuff;\n\n        @Setup(Level.Trial)\n        public void setUpData() {\n            trainingData = genShingledData(NUM_TRAIN_SAMPLES, dimensions, 0);\n            testData = genShingledData(NUM_TEST_SAMPLES, dimensions, 1);\n        }\n\n        @Setup(Level.Invocation)\n        public void setUpForest() throws JsonProcessingException {\n            RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).shingleSize(dimensions)\n                    .build();\n\n            for (int i = 0; i < NUM_TRAIN_SAMPLES; i++) {\n                forest.update(trainingData[i]);\n            }\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(saveTreeState);\n            forestState = mapper.toState(forest);\n\n            ObjectMapper jsonMapper = new ObjectMapper();\n            json = jsonMapper.writeValueAsString(forestState);\n\n            Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n            LinkedBuffer buffer = LinkedBuffer.allocate(512);\n            try {\n                protostuff = ProtostuffIOUtil.toByteArray(forestState, schema, buffer);\n            } finally {\n                buffer.clear();\n            }\n        }\n    }\n\n    private byte[] bytes;\n\n    @TearDown(Level.Iteration)\n    public void tearDown() {\n        OutputSizeProfiler.setTestArray(bytes);\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public RandomCutForestState roundTripFromState(BenchmarkState state, Blackhole blackhole) {\n        RandomCutForestState forestState = state.forestState;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            RandomCutForest forest = mapper.toModel(forestState);\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            forestState = mapper.toState(forest);\n        }\n\n        return forestState;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public String roundTripFromJson(BenchmarkState state, Blackhole blackhole) throws JsonProcessingException {\n        String json = state.json;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            ObjectMapper jsonMapper = new ObjectMapper();\n            RandomCutForestState forestState = jsonMapper.readValue(json, RandomCutForestState.class);\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            RandomCutForest forest = mapper.toModel(forestState);\n\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            json = jsonMapper.writeValueAsString(mapper.toState(forest));\n        }\n\n        bytes = json.getBytes();\n        return json;\n    }\n\n    @Benchmark\n    @OperationsPerInvocation(NUM_TEST_SAMPLES)\n    public byte[] roundTripFromProtostuff(BenchmarkState state, Blackhole blackhole) {\n        bytes = state.protostuff;\n        double[][] testData = state.testData;\n\n        for (int i = 0; i < NUM_TEST_SAMPLES; i++) {\n            Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n            RandomCutForestState forestState = schema.newMessage();\n            ProtostuffIOUtil.mergeFrom(bytes, forestState, schema);\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(state.saveTreeState);\n            RandomCutForest forest = mapper.toModel(forestState);\n\n            double score = forest.getAnomalyScore(testData[i]);\n            blackhole.consume(score);\n            forest.update(testData[i]);\n            forestState = mapper.toState(forest);\n\n            LinkedBuffer buffer = LinkedBuffer.allocate(512);\n            try {\n                bytes = ProtostuffIOUtil.toByteArray(forestState, schema, buffer);\n            } finally {\n                buffer.clear();\n            }\n        }\n\n        return bytes;\n    }\n\n    private static double[][] genShingledData(int size, int dimensions, long seed) {\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[dimensions];\n        int count = 0;\n        double[] data = getDataD(size + dimensions - 1, 100, 5, seed);\n        for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % dimensions;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                // System.out.println(\"Adding \" + j);\n                answer[count++] = getShinglePoint(history, entryIndex, dimensions);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {\n        double[] shingledPoint = new double[shingleLength];\n        int i = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            shingledPoint[i++] = point;\n\n        }\n        return shingledPoint;\n    }\n\n    private static double[] getDataD(int num, double amplitude, double noise, long seed) {\n\n        double[] data = new double[num];\n        Random noiseprg = new Random(seed);\n        for (int i = 0; i < num; i++) {\n            data[i] = amplitude * cos(2 * PI * (i + 50) / 1000) + noise * noiseprg.nextDouble();\n        }\n\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/profilers/ObjectGraphSizeProfiler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.profilers;\n\nimport java.util.Collection;\nimport java.util.Collections;\n\nimport org.github.jamm.MemoryMeter;\nimport org.openjdk.jmh.infra.BenchmarkParams;\nimport org.openjdk.jmh.infra.IterationParams;\nimport org.openjdk.jmh.profile.InternalProfiler;\nimport org.openjdk.jmh.results.AggregationPolicy;\nimport org.openjdk.jmh.results.IterationResult;\nimport org.openjdk.jmh.results.Result;\nimport org.openjdk.jmh.results.ScalarResult;\n\n/**\n * A profiler that uses the JAMM memory meter to measure the size of an object\n * graph.\n */\npublic class ObjectGraphSizeProfiler implements InternalProfiler {\n\n    private static Object object;\n    private static MemoryMeter meter = new MemoryMeter();\n\n    public static void setObject(Object object) {\n        ObjectGraphSizeProfiler.object = object;\n    }\n\n    @Override\n    public void beforeIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams) {\n    }\n\n    @Override\n    public Collection<? extends Result> afterIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams,\n            IterationResult iterationResult) {\n        long size = 0;\n        if (object != null) {\n            size = meter.measureDeep(object);\n            object = null;\n        }\n        ScalarResult result = new ScalarResult(\"+object-graph-size.bytes\", size, \"bytes\", AggregationPolicy.AVG);\n        return Collections.singleton(result);\n    }\n\n    @Override\n    public String getDescription() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Java/benchmark/src/main/java/com/amazon/randomcutforest/profilers/OutputSizeProfiler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.profilers;\n\nimport java.util.Collection;\nimport java.util.Collections;\n\nimport org.openjdk.jmh.infra.BenchmarkParams;\nimport org.openjdk.jmh.infra.IterationParams;\nimport org.openjdk.jmh.profile.InternalProfiler;\nimport org.openjdk.jmh.results.AggregationPolicy;\nimport org.openjdk.jmh.results.IterationResult;\nimport org.openjdk.jmh.results.Result;\nimport org.openjdk.jmh.results.ScalarResult;\n\n/**\n * This simple profile outputs the size of a provided byte array or string as\n * part of the JMH metrics. We use it to measure the size of output in\n * {@link com.amazon.randomcutforest.StateMapperBenchmark}.\n */\npublic class OutputSizeProfiler implements InternalProfiler {\n\n    private static byte[] bytes;\n\n    public static void setTestString(String s) {\n        bytes = s.getBytes();\n    }\n\n    public static void setTestArray(byte[] bytes) {\n        OutputSizeProfiler.bytes = bytes;\n    }\n\n    @Override\n    public void beforeIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams) {\n    }\n\n    @Override\n    public Collection<? extends Result> afterIteration(BenchmarkParams benchmarkParams, IterationParams iterationParams,\n            IterationResult iterationResult) {\n        int length = 0;\n        if (bytes != null) {\n            length = bytes.length;\n            bytes = null;\n        }\n        ScalarResult result = new ScalarResult(\"+output-size.bytes\", length, \"bytes\", AggregationPolicy.AVG);\n        return Collections.singleton(result);\n    }\n\n    @Override\n    public String getDescription() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Java/core/pom.xml",
    "content": "<?xml version=\"1.0\"?>\n<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n    xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <modelVersion>4.0.0</modelVersion>\n\n  <parent>\n    <groupId>software.amazon.randomcutforest</groupId>\n    <artifactId>randomcutforest-parent</artifactId>\n    <version>4.4.0</version>\n  </parent>\n\n  <artifactId>randomcutforest-core</artifactId>\n  <packaging>jar</packaging>\n\n  <dependencies>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-testutils</artifactId>\n      <version>${project.version}</version>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.projectlombok</groupId>\n      <artifactId>lombok</artifactId>\n      <version>1.18.30</version>\n      <scope>provided</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.junit.jupiter</groupId>\n      <artifactId>junit-jupiter-engine</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.junit.jupiter</groupId>\n      <artifactId>junit-jupiter-params</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.hamcrest</groupId>\n      <artifactId>hamcrest</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-core</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-junit-jupiter</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.powermock</groupId>\n      <artifactId>powermock-api-easymock</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-core</artifactId>\n      <version>2.16.0</version>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-databind</artifactId>\n      <version>2.16.0</version>\n      <scope>test</scope>\n    </dependency>\n  </dependencies>\n</project>\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/CommonUtils.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.Objects;\nimport java.util.function.Supplier;\n\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\n\n/**\n * A collection of common utility functions.\n */\npublic class CommonUtils {\n\n    private CommonUtils() {\n    }\n\n    /**\n     * Throws an {@link IllegalArgumentException} with the specified message if the\n     * specified input is false.\n     *\n     * @param condition A condition to test.\n     * @param message   The error message to include in the\n     *                  {@code IllegalArgumentException} if {@code condition} is\n     *                  false.\n     * @throws IllegalArgumentException if {@code condition} is false.\n     */\n    public static void checkArgument(boolean condition, String message) {\n\n        if (!condition) {\n            throw new IllegalArgumentException(message);\n        }\n    }\n\n    // a lazy equivalent of the above, which avoids parameter evaluation\n    public static void checkArgument(boolean condition, Supplier<String> messageSupplier) {\n        if (!condition) {\n            throw new IllegalArgumentException(messageSupplier.get());\n        }\n    }\n\n    /**\n     * Throws an {@link IllegalStateException} with the specified message if the\n     * specified input is false.\n     *\n     * @param condition A condition to test.\n     * @param message   The error message to include in the\n     *                  {@code IllegalStateException} if {@code condition} is false.\n     * @throws IllegalStateException if {@code condition} is false.\n     */\n    public static void checkState(boolean condition, String message) {\n        if (!condition) {\n            throw new IllegalStateException(message);\n        }\n    }\n\n    /**\n     * Throws an {@link IllegalStateException} with the specified message if the\n     * specified input is false. This would eventually become asserts.\n     *\n     * @param condition A condition to test.\n     * @param message   The error message to include in the\n     *                  {@code IllegalStateException} if {@code condition} is false.\n     * @throws IllegalStateException if {@code condition} is false.\n     */\n    public static void validateInternalState(boolean condition, String message) {\n        if (!condition) {\n            throw new IllegalStateException(message);\n        }\n    }\n\n    /**\n     * Throws a {@link NullPointerException} with the specified message if the\n     * specified input is null.\n     *\n     * @param <T>     An arbitrary type.\n     * @param object  An object reference to test for nullity.\n     * @param message The error message to include in the\n     *                {@code NullPointerException} if {@code object} is null.\n     * @return {@code object} if not null.\n     * @throws NullPointerException if the supplied object is null.\n     */\n    public static <T> T checkNotNull(T object, String message) {\n        Objects.requireNonNull(object, message);\n        return object;\n    }\n\n    /**\n     * Compute the probability of separation for a bounding box adn a point. This\n     * method considers the bounding box created by merging the query point into the\n     * existing bounding box, and computes the probability that a random cut would\n     * separate the query point from the merged bounding box.\n     *\n     * @param boundingBox is the bounding box used in RandomCutTree\n     * @param queryPoint  is the multidimensional point\n     * @return the probability of separation choosing a random cut\n     */\n\n    public static double getProbabilityOfSeparation(final IBoundingBoxView boundingBox, float[] queryPoint) {\n        double sumOfNewRange = 0d;\n        double sumOfDifferenceInRange = 0d;\n\n        for (int i = 0; i < queryPoint.length; ++i) {\n            double maxVal = boundingBox.getMaxValue(i);\n            double minVal = boundingBox.getMinValue(i);\n            double oldRange = maxVal - minVal;\n\n            if (maxVal < queryPoint[i]) {\n                maxVal = queryPoint[i];\n            } else if (minVal > queryPoint[i]) {\n                minVal = queryPoint[i];\n            } else {\n                sumOfNewRange += oldRange;\n                continue;\n            }\n\n            double newRange = maxVal - minVal;\n            sumOfNewRange += newRange;\n            sumOfDifferenceInRange += (newRange - oldRange);\n        }\n\n        if (sumOfNewRange <= 0) {\n            return 0;\n        } else\n            return sumOfDifferenceInRange / sumOfNewRange;\n    }\n\n    /**\n     * The default anomaly scoring function for points that contained in a tree.\n     *\n     * @param depth The depth of the leaf node where this method is invoked\n     * @param mass  The number of times the point has been seen before\n     * @return The score contribution from this previously-seen point\n     */\n    public static double defaultScoreSeenFunction(double depth, double mass) {\n        return 1.0 / (depth + Math.log(mass + 1.0) / Math.log(2.0));\n    }\n\n    /**\n     * The default anomaly scoring function for points not already contained in a\n     * tree.\n     *\n     * @param depth The depth of the leaf node where this method is invoked\n     * @param mass  The number of times the point has been seen before\n     * @return The score contribution from this point\n     */\n    public static double defaultScoreUnseenFunction(double depth, double mass) {\n        return 1.0 / (depth + 1);\n    }\n\n    public static double defaultDampFunction(double leafMass, double treeMass) {\n        return 1.0 - leafMass / (2 * treeMass);\n    }\n\n    /**\n     * Some algorithms which return a scalar value need to scale that value by tree\n     * mass for consistency. This is the default method for computing the scale\n     * factor in these cases. The function has to be associative in its first\n     * argument (when the second is fixed) That is, fn (x1, y) + fn (x2, y) = fn (x1\n     * + x2, y)\n     * \n     * @param scalarValue The value being scaled\n     * @param mass        The mass of the tree where this method is invoked\n     * @return The original value scaled appropriately for this tree\n     */\n    public static double defaultScalarNormalizerFunction(double scalarValue, double mass) {\n        return scalarValue * Math.log(mass + 1) / Math.log(2.0);\n    }\n\n    /**\n     * The following function forms the core of RCFs, given a BoundingBox it\n     * produces the probability of cutting in different dimensions. While this\n     * function is absorbed in the logic of the different simpler scoring methods,\n     * the scoring methods that are mode advanced (for example, trying to simulate\n     * an Transductive Isolation Forest with streaming) require this function. A\n     * different function can be used to simulate via non-RCFs.\n     * \n     * @param boundingBox bounding box of a set of points\n     * @return array of probabilities of cutting in that specific dimension\n     */\n\n    public static double[] defaultRCFgVecFunction(IBoundingBoxView boundingBox) {\n        double[] answer = new double[boundingBox.getDimensions()];\n\n        for (int i = 0; i < boundingBox.getDimensions(); ++i) {\n            double maxVal = boundingBox.getMaxValue(i);\n            double minVal = boundingBox.getMinValue(i);\n            double oldRange = maxVal - minVal;\n\n            if (oldRange > 0) {\n                answer[i] = oldRange;\n            }\n        }\n        return answer;\n\n    };\n\n    public static double[] toDoubleArray(float[] array) {\n        checkNotNull(array, \"array must not be null\");\n        double[] result = new double[array.length];\n        for (int i = 0; i < array.length; i++) {\n            result[i] = array[i];\n        }\n        return result;\n    }\n\n    public static double[] toDoubleArrayNullable(float[] array) {\n        return (array == null) ? null : toDoubleArray(array);\n    }\n\n    public static float[] toFloatArray(double[] array) {\n        checkNotNull(array, \"array must not be null\");\n        float[] result = new float[array.length];\n        for (int i = 0; i < array.length; i++) {\n            result[i] = (array[i] == 0) ? 0 : (float) array[i];\n            // eliminating -0.0 issues\n        }\n        return result;\n    }\n\n    public static float[] toFloatArrayNullable(double[] array) {\n        return (array == null) ? null : toFloatArray(array);\n    }\n\n    public static int[] toIntArray(byte[] values) {\n        checkNotNull(values, \"array must not be null\");\n        int[] result = new int[values.length];\n        for (int i = 0; i < values.length; i++) {\n            result[i] = values[i] & 0xff;\n        }\n        return result;\n    }\n\n    public static int[] toIntArray(char[] values) {\n        checkNotNull(values, \"array must not be null\");\n        int[] result = new int[values.length];\n        for (int i = 0; i < values.length; i++) {\n            result[i] = values[i];\n        }\n        return result;\n    }\n\n    public static char[] toCharArray(int[] values) {\n        checkNotNull(values, \"array must not be null\");\n        char[] result = new char[values.length];\n        for (int i = 0; i < values.length; i++) {\n            result[i] = (char) values[i];\n        }\n        return result;\n    }\n\n    public static byte[] toByteArray(int[] values) {\n        checkNotNull(values, \"array must not be null\");\n        byte[] result = new byte[values.length];\n        for (int i = 0; i < values.length; i++) {\n            result[i] = (byte) values[i];\n        }\n        return result;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/ComponentList.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\n\n/**\n * A ComponentList is an ArrayList specialized to contain IComponentModel\n * instances. Executor classes operate on ComponentLists.\n *\n * @param <PointReference> The internal point representation expected by the\n *                         component models in this list.\n * @param <Point>          The explicit data type of points being passed\n */\npublic class ComponentList<PointReference, Point> extends ArrayList<IComponentModel<PointReference, Point>> {\n    public ComponentList() {\n        super();\n    }\n\n    public ComponentList(Collection<? extends IComponentModel<PointReference, Point>> collection) {\n        super(collection);\n    }\n\n    public ComponentList(int initialCapacity) {\n        super(initialCapacity);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/IComponentModel.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport com.amazon.randomcutforest.config.IDynamicConfig;\nimport com.amazon.randomcutforest.executor.ITraversable;\nimport com.amazon.randomcutforest.executor.IUpdatable;\n\n/**\n *\n * @param <PointReference> The internal point representation expected by the\n *                         component models in this list.\n * @param <Point>          The explicit data type of points being passed\n */\n\npublic interface IComponentModel<PointReference, Point>\n        extends ITraversable, IUpdatable<PointReference>, IDynamicConfig {\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/IMultiVisitorFactory.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport com.amazon.randomcutforest.tree.ITree;\n\n@FunctionalInterface\npublic interface IMultiVisitorFactory<R> {\n    MultiVisitor<R> newVisitor(ITree<?, ?> tree, float[] point);\n\n    default R liftResult(ITree<?, ?> tree, R result) {\n        return result;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/IVisitorFactory.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport com.amazon.randomcutforest.tree.ITree;\n\n@FunctionalInterface\npublic interface IVisitorFactory<R> {\n    Visitor<R> newVisitor(ITree<?, ?> tree, float[] point);\n\n    default R liftResult(ITree<?, ?> tree, R result) {\n        return result;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/MultiVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * This is the interface for a visitor which can be used with\n * {RandomCutTree::traversePathToLeafAndVisitNodesMulti}. In this traversal\n * method, we optionally choose to split the visitor into two copies when\n * visiting nodes. Each copy then visits one of the paths down from that node.\n * The results from both visitors are combined before returning back up the\n * tree.\n */\npublic interface MultiVisitor<R> extends Visitor<R> {\n\n    /**\n     * Returns true of the traversal method should split the visitor (i.e., create a\n     * copy) at this node.\n     *\n     * @param node A node in the tree traversal\n     * @return true if the traversal should split the visitor into two copies at\n     *         this node, false otherwise.\n     */\n    boolean trigger(final INodeView node);\n\n    /**\n     * Return a partial copy of this visitor. The original visitor plus the copy\n     * will each traverse one branch of the tree. The fields not copied will be\n     * filled in by the branches of the tree\n     *\n     * @return a copy of this visitor\n     */\n    MultiVisitor<R> newPartialCopy();\n\n    /**\n     * Combine two visitors. The state of the argument visitor should be combined\n     * with the state of this instance. This method is called after both visitors\n     * have traversed one branch of the tree.\n     *\n     * @param other A second visitor\n     */\n    void combine(MultiVisitor<R> other);\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/MultiVisitorFactory.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.tree.ITree;\n\n/**\n * This is the interface for a visitor which can be used with\n * {RandomCutTree::traversePathToLeafAndVisitNodesMulti}. In this traversal\n * method, we optionally choose to split the visitor into two copies when\n * visiting nodes. Each copy then visits one of the paths down from that node.\n * The results from both visitors are combined before returning back up the\n * tree.\n */\n\npublic class MultiVisitorFactory<R> implements IMultiVisitorFactory<R> {\n    private final BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor;\n    private final BiFunction<ITree<?, ?>, R, R> liftResult;\n\n    public MultiVisitorFactory(BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor,\n            BiFunction<ITree<?, ?>, R, R> liftResult) {\n        this.newVisitor = newVisitor;\n        this.liftResult = liftResult;\n    }\n\n    public MultiVisitorFactory(BiFunction<ITree<?, ?>, float[], MultiVisitor<R>> newVisitor) {\n        this(newVisitor, (tree, x) -> x);\n    }\n\n    @Override\n    public MultiVisitor<R> newVisitor(ITree<?, ?> tree, float[] point) {\n        return newVisitor.apply(tree, point);\n    }\n\n    @Override\n    public R liftResult(ITree<?, ?> tree, R result) {\n        return liftResult.apply(tree, result);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/PredictiveRandomCutForest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_CENTER_OF_MASS_ENABLED;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_NUMBER_OF_TREES;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_OUTPUT_AFTER_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_PARALLEL_EXECUTION_ENABLED;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SHINGLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\nimport static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.DEFAULT_START_NORMALIZATION;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.DEFAULT_STOP_NORMALIZATION;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\nimport java.util.Optional;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.preprocessor.IPreprocessor;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.InterpolationMeasure;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\n\n/**\n * This class provides a predictive imputation based on RCF (respecting the\n * arrow of time) alongside streaming normalization\n *\n * Many of these capabities existed since RCF 1.0 -- however it seems that\n * abstracting them into a single collected class can avoid the messier details\n * and make the capabilites more accessible.\n *\n * We reiterate the observation in\n * <a href=\"https://opensearch.org/blog/random-cut-forests/\">\n * https://opensearch.org/blog/random-cut-forests/</a> that an unsupervised\n * anomaly detection that infers \"not normal\" can be made to predict, \"well,\n * what is normal then\"? That is the basis of the time series forecasting in\n * RCFCaster in parkservices -- one can predict the most likely (approximately\n * minimum not normal score) value. The clustering inherent in RCF (for example,\n * see getRCFDistanceAttribution as an alternate scoring metric) and other\n * multicentroid methods can be then utilized to expose clusters of \"likely\"\n * values.\n *\n */\npublic class PredictiveRandomCutForest {\n\n    protected TransformMethod transformMethod = TransformMethod.NORMALIZE;\n\n    protected RandomCutForest forest;\n\n    protected IPreprocessor preprocessor;\n\n    protected ForestMode forestMode = ForestMode.STANDARD;\n\n    public PredictiveRandomCutForest(Builder<?> builder) {\n        transformMethod = builder.transformMethod;\n        Preprocessor.Builder<?> preprocessorBuilder = Preprocessor.builder().shingleSize(builder.shingleSize)\n                .transformMethod(builder.transformMethod).forestMode(builder.forestMode);\n\n        int dimensions = builder.inputDimensions * builder.shingleSize;\n        if (builder.forestMode == ForestMode.TIME_AUGMENTED) {\n            dimensions += builder.shingleSize;\n            // if time is not differenced, then it can be added as a column\n            // without much difficulty\n            preprocessorBuilder.normalizeTime(true);\n            // force internal shingling for this option\n            builder.internalShinglingEnabled = Optional.of(true);\n        } else if (builder.forestMode == ForestMode.STREAMING_IMPUTE) {\n            preprocessorBuilder.normalizeTime(true);\n            builder.internalShinglingEnabled = Optional.of(true);\n            preprocessorBuilder.imputationMethod(builder.imputationMethod);\n            if (builder.fillValues != null) {\n                preprocessorBuilder.fillValues(builder.fillValues);\n            }\n            preprocessorBuilder.useImputedFraction(builder.useImputedFraction.orElse(0.5));\n        } else {\n            builder.internalShinglingEnabled = Optional.of(true);\n        }\n\n        forestMode = builder.forestMode;\n        forest = builder.buildForest();\n        validateNonNegativeArray(builder.weights);\n\n        preprocessorBuilder.inputLength(builder.inputDimensions);\n        preprocessorBuilder.weights(builder.weights);\n        preprocessorBuilder.weightTime(builder.weightTime.orElse(1.0));\n        preprocessorBuilder.transformDecay(builder.transformDecay.orElse(1.0 / builder.sampleSize));\n        // to be used later\n        preprocessorBuilder.randomSeed(builder.randomSeed.orElse(0L) + 1);\n        preprocessorBuilder.dimensions(dimensions);\n        preprocessorBuilder.stopNormalization(builder.stopNormalization.orElse(DEFAULT_STOP_NORMALIZATION));\n        preprocessorBuilder.startNormalization(builder.startNormalization.orElse(DEFAULT_START_NORMALIZATION));\n\n        preprocessor = preprocessorBuilder.build();\n    }\n\n    public PredictiveRandomCutForest(ForestMode forestMode, TransformMethod method, IPreprocessor preprocessor,\n            RandomCutForest forest) {\n        this.forestMode = forestMode;\n        this.transformMethod = method;\n        this.preprocessor = preprocessor;\n        this.forest = forest;\n    }\n\n    void validateNonNegativeArray(double[] array) {\n        if (array != null) {\n            for (double element : array) {\n                checkArgument(element >= 0, \" has to be non-negative\");\n            }\n        }\n    }\n\n    /**\n     * The following function provides a clustering of the predicted near neighbors\n     * of the input point. Note that all of these functions were always available in\n     * RCF\n     * \n     * @param inputPoint              the input point, can have missing values\n     * @param timestamp               the timestamp for the input -- not useful\n     *                                unless we are using TIME_AUGMENTED mode or\n     *                                STREAMING_IMPUTE (for time series)\n     * @param missingValues           an integer array with the positions in\n     *                                inputPoint which are unknown\n     * @param numberOfRepresentatives a parameter that controls multi-centroid\n     *                                clustering of the (predicted) neighbors -- 5\n     *                                is a good default. Setting this as 1 would\n     *                                have a behavior similar to (but perhaps still\n     *                                better than) k-means\n     * @param shrinkage               a parameter that controls the shape of the\n     *                                clusters -- 1 would indicate behavior similar\n     *                                to centroids\n     * @param centrality              a parameter in [0:1] that controls the\n     *                                randomization/diversity in the prediction -- a\n     *                                value of 1 would correspond to p50 predictions\n     *                                and a value of 0 would correspond to looser\n     *                                random search\n     * @return a SampleSummary of the near neighbors in the same dimension as the\n     *         input (unless TIME_AUGMENTED, when the dimension increases by 1.\n     */\n    public SampleSummary predict(float[] inputPoint, long timestamp, int[] missingValues, int numberOfRepresentatives,\n            double shrinkage, double centrality) {\n        checkArgument(inputPoint.length == preprocessor.getInputLength(), \"incorrect length\");\n        int[] newMissingValues = new int[0]; // avoiding null; allows missingvalues to be null\n        if (missingValues != null) {\n            checkArgument(missingValues.length <= inputPoint.length, \" incorrect data\");\n            newMissingValues = new int[missingValues.length];\n            int startPosition = forest.getDimensions() - forest.getDimensions() / preprocessor.getShingleSize();\n            for (int i = 0; i < missingValues.length; i++) {\n                checkArgument(missingValues[i] >= 0, \" missing values cannot be at negative position\");\n                checkArgument(missingValues[i] <= inputPoint.length,\n                        \"missing values cannot be at position larger than input length\");\n                checkArgument(forestMode == ForestMode.TIME_AUGMENTED || missingValues[i] < inputPoint.length,\n                        \"cannot be equal to input length\");\n                newMissingValues[i] = (forestMode == ForestMode.STREAMING_IMPUTE) ? startPosition + missingValues[i]\n                        : missingValues[i];\n            }\n        }\n        // check when TIME_AUGMENTED and missingValue includes timestamp\n        float[] point = preprocessor.getScaledShingledInput(toDoubleArray(inputPoint), timestamp, missingValues,\n                forest);\n        if (point == null) {\n            return new SampleSummary(preprocessor.getInputLength());\n        }\n        return preprocessor.invertInPlaceRecentSummaryBlock(forest.getConditionalFieldSummary(point, newMissingValues,\n                numberOfRepresentatives, shrinkage, true, false, centrality, preprocessor.getShingleSize()));\n    }\n\n    public SampleSummary predict(float[] inputPoint, long timestamp, int[] missingValues) {\n        return predict(inputPoint, timestamp, missingValues, 5, 0.3, 0.5);\n    }\n\n    /**\n     * Near neighbors is an extreme example of prediction where there aare no\n     * missing values\n     * \n     * @param inputPoint the input\n     * @param timestamp  the timestamp (not used unless for TIME_AUGMENTED and\n     *                   STREAMING_IMPUTE)\n     * @return returns a clustering of the near neighbors in the (time decaying)\n     *         sample maintained by RCF\n     */\n    public SampleSummary neighborSummary(float[] inputPoint, long timestamp) {\n        return predict(inputPoint, timestamp, null, 1, 0, 1);\n    }\n\n    /**\n     * This is the anomalyScore() function in RCF -- that definition of score has\n     * diverged from the original paper and perhaps the more descriptive name is\n     * more useful. Note that it is important that this is Expectation of Inverse\n     * versus Inverse of Expectation -- a la Jensen's Inequality.\n     * \n     * @param inputPoint the input point\n     * @param timestamp  the timestamp\n     * @return a score in the range (0:log_2(sampleSize)) where larger values\n     *         indicate potential anomalies. Note that the score can remain elevated\n     *         due to shingling for a while -- use\n     *         ParkServices/ThresholdedRandomCutForest where the PredictorCorrector\n     *         architecture is used to evaluate the score further and produce a more\n     *         refined anomalyGrade()in [0:1] ; including setting the grade to 0\n     *         (corrector) even though the score (of this function) is high.\n     */\n    public double getExpectedInverseDepthScore(float[] inputPoint, long timestamp) {\n        checkArgument(inputPoint.length == preprocessor.getInputLength(), \"incorrect length\");\n        float[] point = preprocessor.getScaledShingledInput(toDoubleArray(inputPoint), timestamp, null, forest);\n        return (point != null) ? forest.getAnomalyScore(point) : 0;\n    }\n\n    /**\n     * Same as above -- but now the subparts of the score are exposed in the full\n     * RCF space which is inputDimension (add +1 for TIME_AUGMENTED) times the\n     * shingleSize. For each of these, the score can arise from the input value\n     * being HIGH or LOW (as determined by the ensemble of trees) -- which is\n     * returned in the DiVector structure. This is extremely useful in pinpointing\n     * which attribute or which value in the shingle was likely indicator of the\n     * anomaly, and is used exactly such in PredictorCorrector in ParkServices. This\n     * function enables the use of PreProcessor.\n     * \n     * @param inputPoint the input point\n     * @param timestamp  the timestamp for the point (used only in TIME_AUGMENTED\n     *                   and STREAMING_IMPUTE)\n     * @return a divector such that getHighLowSum() would equal (up to floating\n     *         point precision over summing values across the dimensions in RCF\n     *         space) the value of getExpectedInverseDepthScore()\n     */\n    public DiVector getExpectedInverseDepthAttribution(float[] inputPoint, long timestamp) {\n        checkArgument(inputPoint.length == preprocessor.getInputLength(), \"incorrect length\");\n        float[] point = preprocessor.getScaledShingledInput(toDoubleArray(inputPoint), timestamp, null, forest);\n        return (point != null) ? forest.getAnomalyAttribution(point) : new DiVector(forest.getDimensions());\n    }\n\n    /**\n     * One of the visions for RCF (see blog above) was that the same data structure\n     * that can compute anomaly scores somewhat effectively, also has information\n     * regarding other measures (say X) of the point set -- and if that measure X\n     * also is indicative of anomaly then RCF should be able to vend X. One such\n     * example is density around a point set and this was available in RCF and is\n     * carried over here. It is primarily used via the DISTANCE_MODE computation in\n     * ScoringStrategy in ParkServices. This function enables the use of\n     * preprocessor.\n     * \n     * @param inputPoint the input point\n     * @param timestamp  the timestamp of the current point\n     * @return a density output structure -- see\n     *         examples/dynamicinference/dynamicdensity\n     */\n    public DensityOutput getSimpleDensity(float[] inputPoint, long timestamp) {\n        checkArgument(inputPoint.length == preprocessor.getInputLength(), \"incorrect length\");\n        float[] scaled = preprocessor.getScaledShingledInput(toDoubleArray(inputPoint), timestamp, null, forest);\n        DensityOutput answer = (scaled != null) ? forest.getSimpleDensity(scaled)\n                : new DensityOutput(new InterpolationMeasure(inputPoint.length, 0));\n        double[] scale = preprocessor.getScale();\n        for (int i = 0; i < answer.getDimensions(); i++) {\n            answer.distances.high[i] *= scale[i % scale.length];\n            answer.distances.low[i] *= scale[i % scale.length];\n        }\n        return answer;\n    }\n\n    /**\n     * The following provides an alternate scoring (as well as exposing the subparts\n     * of the computation) based on the density output. The function getHighLowSum()\n     * would correspond to a score (where higher corresponds to more unlikely\n     * behavior). This is based on the recursive partitioning in the RCF trees and\n     * the fact that they preserve distances -- this is used in DISTANCE_MODE and\n     * MULTI_MODE in ParkService/ThresholdedRandomCutForest See the example\n     * parkservices/NumericGLADExample\n     * \n     * @param inputPoint the input point\n     * @param timestamp  the timestamp of the point\n     * @return a divector in RCF space of size inputDimension (add +1 for\n     *         TIME_AUGMENTED) times the shingleSize; with the same interpretation\n     *         in getExpectedInverseDepthAttribution(). This score is not calibrated\n     *         to be in any bounded ranges, and should perhaps be used as a\n     *         corroborative signal with getExpectedInverseDepthAttribution\n     */\n    public DiVector getRCFDistanceAttribution(float[] inputPoint, long timestamp) {\n        DensityOutput test = getSimpleDensity(inputPoint, timestamp);\n        return test.distances;\n    }\n\n    public void update(float[] record, long timestamp) {\n        update(record, timestamp, null);\n    }\n\n    public void update(float[] record, long timestamp, int[] missing) {\n        float[] scaled = preprocessor.getScaledShingledInput(toDoubleArray(record), timestamp, missing, forest);\n        preprocessor.update(toDoubleArray(record), scaled, timestamp, missing, forest);\n    }\n\n    public RandomCutForest getForest() {\n        return forest;\n    }\n\n    public IPreprocessor getPreprocessor() {\n        return preprocessor;\n    }\n\n    public ForestMode getForestMode() {\n        return forestMode;\n    }\n\n    public TransformMethod getTransformMethod() {\n        return transformMethod;\n    }\n\n    /**\n     * @return a new builder.\n     */\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        protected int inputDimensions;\n        protected int sampleSize = DEFAULT_SAMPLE_SIZE;\n        protected Optional<Integer> outputAfter = Optional.empty();\n        protected Optional<Integer> startNormalization = Optional.empty();\n        protected Optional<Integer> stopNormalization = Optional.empty();\n        protected int numberOfTrees = DEFAULT_NUMBER_OF_TREES;\n        protected Optional<Double> timeDecay = Optional.empty();\n        protected Optional<Double> lowerThreshold = Optional.empty();\n        protected Optional<Double> weightTime = Optional.empty();\n        protected boolean normalizeTime = true;\n        protected Optional<Long> randomSeed = Optional.empty();\n        protected boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n        protected boolean centerOfMassEnabled = DEFAULT_CENTER_OF_MASS_ENABLED;\n        protected boolean parallelExecutionEnabled = DEFAULT_PARALLEL_EXECUTION_ENABLED;\n        protected Optional<Integer> threadPoolSize = Optional.empty();\n        protected double boundingBoxCacheFraction = DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\n        protected int shingleSize = DEFAULT_SHINGLE_SIZE;\n        protected Optional<Boolean> internalShinglingEnabled = Optional.empty();\n        protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;\n        protected TransformMethod transformMethod = TransformMethod.NONE;\n        protected ImputationMethod imputationMethod = PREVIOUS;\n        protected ForestMode forestMode = ForestMode.STANDARD;\n        protected double[] weights = null;\n        protected double[] fillValues = null;\n        protected Optional<Double> useImputedFraction = Optional.empty();\n        protected Optional<Double> transformDecay = Optional.empty();\n\n        void validate() {\n            if (forestMode == ForestMode.TIME_AUGMENTED) {\n                if (internalShinglingEnabled.isPresent()) {\n                    checkArgument(shingleSize == 1 || internalShinglingEnabled.get(),\n                            \" shingle size has to be 1 or \" + \"internal shingling must turned on\");\n                    checkArgument(transformMethod == TransformMethod.NONE || internalShinglingEnabled.get(),\n                            \" internal shingling must turned on for transforms\");\n                } else {\n                    internalShinglingEnabled = Optional.of(true);\n                }\n                if (useImputedFraction.isPresent()) {\n                    throw new IllegalArgumentException(\" imputation infeasible\");\n                }\n            } else if (forestMode == ForestMode.STREAMING_IMPUTE) {\n                checkArgument(shingleSize > 1, \"imputation with shingle size 1 is not meaningful\");\n                internalShinglingEnabled.ifPresent(x -> checkArgument(x,\n                        \" input cannot be shingled (even if internal representation is different) \"));\n            } else {\n                if (!internalShinglingEnabled.isPresent()) {\n                    internalShinglingEnabled = Optional.of(true);\n                }\n                if (useImputedFraction.isPresent()) {\n                    throw new IllegalArgumentException(\" imputation infeasible\");\n                }\n            }\n            if (startNormalization.isPresent()) {\n                // we should not be setting normalizations unless we are careful\n                if (outputAfter.isPresent()) {\n                    // can be overspecified\n                    checkArgument(outputAfter.get() + shingleSize - 1 > startNormalization.get(),\n                            \"output after has to wait till normalization, reduce normalization\");\n                } else {\n                    int n = startNormalization.get();\n                    checkArgument(n > 0, \" startNormalization has to be positive\");\n                    // if start normalization is low then first few output can be 0\n                    outputAfter = Optional\n                            .of(max(max(1, (int) (sampleSize * DEFAULT_OUTPUT_AFTER_FRACTION)), n - shingleSize + 1));\n                }\n            } else {\n                if (outputAfter.isPresent()) {\n                    startNormalization = Optional.of(min(DEFAULT_START_NORMALIZATION, outputAfter.get()));\n                }\n            }\n        }\n\n        public PredictiveRandomCutForest build() {\n            validate();\n            return new PredictiveRandomCutForest(this);\n        }\n\n        protected RandomCutForest buildForest() {\n            int dimensions = inputDimensions * shingleSize\n                    + ((forestMode == ForestMode.TIME_AUGMENTED) ? shingleSize : 0);\n            RandomCutForest.Builder builder = new RandomCutForest.Builder().dimensions(dimensions)\n                    .sampleSize(sampleSize).numberOfTrees(numberOfTrees)\n                    .storeSequenceIndexesEnabled(storeSequenceIndexesEnabled).centerOfMassEnabled(centerOfMassEnabled)\n                    .parallelExecutionEnabled(parallelExecutionEnabled)\n                    .boundingBoxCacheFraction(boundingBoxCacheFraction).shingleSize(shingleSize)\n                    .internalShinglingEnabled(internalShinglingEnabled.get())\n                    .initialAcceptFraction(initialAcceptFraction);\n\n            outputAfter.ifPresent(builder::outputAfter);\n            timeDecay.ifPresent(builder::timeDecay);\n            randomSeed.ifPresent(builder::randomSeed);\n            threadPoolSize.ifPresent(builder::threadPoolSize);\n            return builder.build();\n        }\n\n        public T inputDimensions(int dimensions) {\n            this.inputDimensions = dimensions;\n            return (T) this;\n        }\n\n        public T sampleSize(int sampleSize) {\n            this.sampleSize = sampleSize;\n            return (T) this;\n        }\n\n        public T startNormalization(int startNormalization) {\n            this.startNormalization = Optional.of(startNormalization);\n            return (T) this;\n        }\n\n        public T stopNormalization(int stopNormalization) {\n            this.stopNormalization = Optional.of(stopNormalization);\n            return (T) this;\n        }\n\n        public T outputAfter(int outputAfter) {\n            this.outputAfter = Optional.of(outputAfter);\n            return (T) this;\n        }\n\n        public T numberOfTrees(int numberOfTrees) {\n            this.numberOfTrees = numberOfTrees;\n            return (T) this;\n        }\n\n        public T shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return (T) this;\n        }\n\n        public T timeDecay(double timeDecay) {\n            this.timeDecay = Optional.of(timeDecay);\n            return (T) this;\n        }\n\n        public T transformDecay(double transformDecay) {\n            this.transformDecay = Optional.of(transformDecay);\n            return (T) this;\n        }\n\n        public T randomSeed(long randomSeed) {\n            this.randomSeed = Optional.of(randomSeed);\n            return (T) this;\n        }\n\n        public T centerOfMassEnabled(boolean centerOfMassEnabled) {\n            this.centerOfMassEnabled = centerOfMassEnabled;\n            return (T) this;\n        }\n\n        public T parallelExecutionEnabled(boolean parallelExecutionEnabled) {\n            this.parallelExecutionEnabled = parallelExecutionEnabled;\n            return (T) this;\n        }\n\n        public T forestMode(ForestMode forestMode) {\n            this.forestMode = forestMode;\n            return (T) this;\n        }\n\n        public T threadPoolSize(int threadPoolSize) {\n            this.threadPoolSize = Optional.of(threadPoolSize);\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {\n            this.boundingBoxCacheFraction = boundingBoxCacheFraction;\n            return (T) this;\n        }\n\n        public T initialAcceptFraction(double initialAcceptFraction) {\n            this.initialAcceptFraction = initialAcceptFraction;\n            return (T) this;\n        }\n\n        public Random getRandom() {\n            // If a random seed was given, use it to create a new Random. Otherwise, call\n            // the 0-argument constructor\n            return randomSeed.map(Random::new).orElseGet(Random::new);\n        }\n\n        public T weights(double[] values) {\n            // values cannot be a null\n            this.weights = Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T imputationMethod(ImputationMethod imputationMethod) {\n            this.imputationMethod = imputationMethod;\n            return (T) this;\n        }\n\n        public T transformMethod(TransformMethod method) {\n            this.transformMethod = method;\n            return (T) this;\n        }\n\n        public T fillValues(double[] values) {\n            // values cannot be a null\n            this.fillValues = Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T useImputedFraction(double fraction) {\n            this.useImputedFraction = Optional.of(fraction);\n            return (T) this;\n        }\n\n        public T weightTime(double value) {\n            this.weightTime = Optional.of(value);\n            return (T) this;\n        }\n\n        public T normalizeTime(boolean normalizeTime) {\n            this.normalizeTime = normalizeTime;\n            return (T) this;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/RandomCutForest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.summarization.Summarizer.DEFAULT_SEPARATION_RATIO_FOR_MERGE;\nimport static java.lang.Math.max;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.Random;\nimport java.util.function.BiFunction;\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\nimport com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;\nimport com.amazon.randomcutforest.anomalydetection.AnomalyScoreVisitor;\nimport com.amazon.randomcutforest.anomalydetection.DynamicAttributionVisitor;\nimport com.amazon.randomcutforest.anomalydetection.DynamicScoreVisitor;\nimport com.amazon.randomcutforest.anomalydetection.SimulatedTransductiveScalarScoreVisitor;\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.executor.AbstractForestTraversalExecutor;\nimport com.amazon.randomcutforest.executor.AbstractForestUpdateExecutor;\nimport com.amazon.randomcutforest.executor.IStateCoordinator;\nimport com.amazon.randomcutforest.executor.ParallelForestTraversalExecutor;\nimport com.amazon.randomcutforest.executor.ParallelForestUpdateExecutor;\nimport com.amazon.randomcutforest.executor.PointStoreCoordinator;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.executor.SequentialForestTraversalExecutor;\nimport com.amazon.randomcutforest.executor.SequentialForestUpdateExecutor;\nimport com.amazon.randomcutforest.imputation.ConditionalSampleSummarizer;\nimport com.amazon.randomcutforest.imputation.ImputeVisitor;\nimport com.amazon.randomcutforest.inspect.NearNeighborVisitor;\nimport com.amazon.randomcutforest.interpolation.SimpleInterpolationVisitor;\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.InterpolationMeasure;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.returntypes.OneSidedConvergingDiVectorAccumulator;\nimport com.amazon.randomcutforest.returntypes.OneSidedConvergingDoubleAccumulator;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.sampler.IStreamSampler;\nimport com.amazon.randomcutforest.store.IPointStore;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.ITree;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\nimport com.amazon.randomcutforest.util.ShingleBuilder;\n\n/**\n * The RandomCutForest class is the interface to the algorithms in this package,\n * and includes methods for anomaly detection, anomaly detection with\n * attribution, density estimation, imputation, and forecasting. A Random Cut\n * Forest is a collection of Random Cut Trees and stream samplers. When an\n * update call is made to a Random Cut Forest, each sampler is independently\n * updated with the submitted (and if the point is accepted by the sampler, then\n * the corresponding Random Cut Tree is also updated. Similarly, when an\n * algorithm method is called, the Random Cut Forest proxies to the trees which\n * implement the actual scoring logic. The Random Cut Forest then combines\n * partial results into a final results.\n */\npublic class RandomCutForest {\n\n    /**\n     * Default sample size. This is the number of points retained by the stream\n     * sampler.\n     */\n    public static final int DEFAULT_SAMPLE_SIZE = 256;\n\n    /**\n     * Default fraction used to compute the amount of points required by stream\n     * samplers before results are returned.\n     */\n    public static final double DEFAULT_OUTPUT_AFTER_FRACTION = 0.25;\n\n    /**\n     * If the user doesn't specify an explicit time decay value, then we set it to\n     * the inverse of this coefficient times sample size.\n     */\n    public static final double DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY = 10.0;\n\n    /**\n     * Default number of trees to use in the forest.\n     */\n    public static final int DEFAULT_NUMBER_OF_TREES = 50;\n\n    /**\n     * By default, trees will not store sequence indexes.\n     */\n    public static final boolean DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED = false;\n\n    /**\n     * By default, trees will accept every point until full.\n     */\n    public static final double DEFAULT_INITIAL_ACCEPT_FRACTION = 1.0;\n\n    /**\n     * By default, the collection of points stored in the forest will increase from\n     * a small size, as needed to maximum capacity\n     */\n    public static final boolean DEFAULT_DYNAMIC_RESIZING_ENABLED = true;\n\n    /**\n     * By default, shingling will be external\n     */\n    public static final boolean DEFAULT_INTERNAL_SHINGLING_ENABLED = false;\n\n    /**\n     * By default, shingles will be a sliding window and not a cyclic buffer\n     */\n    public static final boolean DEFAULT_INTERNAL_ROTATION_ENABLED = false;\n\n    /**\n     * By default, point stores will favor speed of size for larger shingle sizes\n     */\n    public static final boolean DEFAULT_DIRECT_LOCATION_MAP = false;\n\n    /**\n     * Default floating-point precision for internal data structures.\n     */\n    public static final Precision DEFAULT_PRECISION = Precision.FLOAT_32;\n\n    /**\n     * fraction of bounding boxes maintained by each tree\n     */\n    public static final double DEFAULT_BOUNDING_BOX_CACHE_FRACTION = 1.0;\n\n    /**\n     * By default, nodes will not store center of mass.\n     */\n    public static final boolean DEFAULT_CENTER_OF_MASS_ENABLED = false;\n\n    /**\n     * By default RCF is unaware of shingle size\n     */\n    public static final int DEFAULT_SHINGLE_SIZE = 1;\n\n    /**\n     * Parallel execution is enabled by default.\n     */\n    public static final boolean DEFAULT_PARALLEL_EXECUTION_ENABLED = false;\n\n    public static final boolean DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL = true;\n\n    public static final double DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION = 0.1;\n\n    public static final int DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED = 5;\n\n    /**\n     * Random number generator used by the forest.\n     */\n    protected Random random;\n    /**\n     * The number of dimensions in the input data.\n     */\n    protected final int dimensions;\n    /**\n     * The sample size used by stream samplers in this forest.\n     */\n    protected final int sampleSize;\n    /**\n     * The shingle size (if known)\n     */\n    protected final int shingleSize;\n    /**\n     * The input dimensions for known shingle size and internal shingling\n     */\n    protected final int inputDimensions;\n    /**\n     * The number of points required by stream samplers before results are returned.\n     */\n    protected final int outputAfter;\n    /**\n     * The number of trees in this forest.\n     */\n    protected final int numberOfTrees;\n    /**\n     * The decay factor used by stream samplers in this forest.\n     */\n    protected double timeDecay;\n    /**\n     * Store the time information\n     */\n    protected final boolean storeSequenceIndexesEnabled;\n\n    /**\n     * enables internal shingling\n     */\n    protected final boolean internalShinglingEnabled;\n\n    /**\n     * The following can be set between 0 and 1 (inclusive) to achieve tradeoff\n     * between smaller space, lower throughput and larger space, larger throughput\n     */\n    protected final double boundingBoxCacheFraction;\n    /**\n     * Enable center of mass at internal nodes\n     */\n    protected final boolean centerOfMassEnabled;\n    /**\n     * Enable parallel execution.\n     */\n    protected final boolean parallelExecutionEnabled;\n    /**\n     * Number of threads to use in the thread pool if parallel execution is enabled.\n     */\n    protected final int threadPoolSize;\n    /**\n     * A string to define an \"execution mode\" that can be used to set multiple\n     * configuration options. This field is not currently in use.\n     */\n    protected String executionMode;\n\n    protected IStateCoordinator<?, float[]> stateCoordinator;\n    protected ComponentList<?, float[]> components;\n\n    /**\n     * This flag is initialized to false. It is set to true when all component\n     * models are ready.\n     */\n    private boolean outputReady;\n\n    /**\n     * used for initializing the compact forests\n     */\n    private final int initialPointStoreSize;\n    private final int pointStoreCapacity;\n\n    /**\n     * An implementation of forest traversal algorithms.\n     */\n    protected AbstractForestTraversalExecutor traversalExecutor;\n\n    /**\n     * An implementation of forest update algorithms.\n     */\n    protected AbstractForestUpdateExecutor<?, float[]> updateExecutor;\n\n    public <P> RandomCutForest(Builder<?> builder, IStateCoordinator<P, float[]> stateCoordinator,\n            ComponentList<P, float[]> components, Random random) {\n        this(builder, false);\n\n        checkNotNull(stateCoordinator, \"updateCoordinator must not be null\");\n        checkNotNull(components, \"componentModels must not be null\");\n        checkNotNull(random, \"random must not be null\");\n\n        this.stateCoordinator = stateCoordinator;\n        this.components = components;\n        this.random = random;\n        initExecutors(stateCoordinator, components);\n    }\n\n    public RandomCutForest(Builder<?> builder) {\n        this(builder, false);\n        random = builder.getRandom();\n\n        PointStore tempStore = PointStore.builder().internalRotationEnabled(builder.internalRotationEnabled)\n                .capacity(pointStoreCapacity).initialSize(initialPointStoreSize)\n                .internalShinglingEnabled(internalShinglingEnabled).shingleSize(shingleSize).dimensions(dimensions)\n                .build();\n\n        IStateCoordinator<Integer, float[]> stateCoordinator = new PointStoreCoordinator<>(tempStore);\n        ComponentList<Integer, float[]> components = new ComponentList<>(numberOfTrees);\n        for (int i = 0; i < numberOfTrees; i++) {\n            ITree<Integer, float[]> tree = new RandomCutTree.Builder().capacity(sampleSize)\n                    .randomSeed(random.nextLong()).pointStoreView(tempStore)\n                    .boundingBoxCacheFraction(boundingBoxCacheFraction).centerOfMassEnabled(centerOfMassEnabled)\n                    .storeSequenceIndexesEnabled(storeSequenceIndexesEnabled).outputAfter(1).build();\n\n            IStreamSampler<Integer> sampler = CompactSampler.builder().capacity(sampleSize).timeDecay(timeDecay)\n                    .randomSeed(random.nextLong()).storeSequenceIndexesEnabled(storeSequenceIndexesEnabled)\n                    .initialAcceptFraction(builder.initialAcceptFraction).build();\n\n            components.add(new SamplerPlusTree<>(sampler, tree));\n        }\n        this.stateCoordinator = stateCoordinator;\n        this.components = components;\n        initExecutors(stateCoordinator, components);\n    }\n\n    protected <PointReference> void initExecutors(IStateCoordinator<PointReference, float[]> updateCoordinator,\n            ComponentList<PointReference, float[]> components) {\n        if (parallelExecutionEnabled) {\n            traversalExecutor = new ParallelForestTraversalExecutor(components, threadPoolSize);\n            updateExecutor = new ParallelForestUpdateExecutor<>(updateCoordinator, components, threadPoolSize);\n        } else {\n            traversalExecutor = new SequentialForestTraversalExecutor(components);\n            updateExecutor = new SequentialForestUpdateExecutor<>(updateCoordinator, components);\n        }\n    }\n\n    /**\n     * This constructor is responsible for initializing a forest's configuration\n     * variables from a builder. The method signature contains a boolean argument\n     * that isn't used. This argument exists only to create a distinct method\n     * signature so that we can expose {@link #RandomCutForest(Builder)} as a\n     * protected constructor.\n     * \n     * @param builder A Builder instance giving the desired random cut forest\n     *                configuration.\n     * @param notUsed This parameter is not used.\n     */\n    protected RandomCutForest(Builder<?> builder, boolean notUsed) {\n        checkArgument(builder.numberOfTrees > 0, \"numberOfTrees must be greater than 0\");\n        checkArgument(builder.sampleSize > 0, \"sampleSize must be greater than 0\");\n        builder.outputAfter.ifPresent(n -> checkArgument(n > 0, \"outputAfter must be greater than 0\"));\n        checkArgument(builder.dimensions > 0, \"dimensions must be greater than 0\");\n        builder.timeDecay\n                .ifPresent(timeDecay -> checkArgument(timeDecay >= 0, \"timeDecay must be greater than or equal to 0\"));\n        builder.threadPoolSize.ifPresent(n -> {\n            checkArgument(n >= 0, \"cannot be negative\");\n            checkArgument((n > 0) || (!builder.parallelExecutionEnabled),\n                    \"threadPoolSize must be greater/equal than 0. To disable thread pool, set parallel execution to 'false'.\");\n        });\n        checkArgument(builder.shingleSize == 1 || builder.dimensions % builder.shingleSize == 0, \"wrong shingle size\");\n        if (builder.internalRotationEnabled) {\n            checkArgument(builder.internalShinglingEnabled, \" enable internal shingling\");\n        }\n        builder.initialPointStoreSize\n                .ifPresent(n -> checkArgument(n > 0, \"initial point store must be greater than 0\"));\n        checkArgument(builder.boundingBoxCacheFraction >= 0, \"cache cannot be negative\");\n        checkArgument(builder.boundingBoxCacheFraction <= 1, \"incorrect cache fraction range\");\n        numberOfTrees = builder.numberOfTrees;\n        sampleSize = builder.sampleSize;\n        outputAfter = builder.outputAfter.orElse(max(1, (int) (sampleSize * DEFAULT_OUTPUT_AFTER_FRACTION)));\n        internalShinglingEnabled = builder.internalShinglingEnabled;\n        shingleSize = builder.shingleSize;\n        dimensions = builder.dimensions;\n        timeDecay = builder.timeDecay.orElse(1.0 / (DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY * sampleSize));\n        storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;\n        centerOfMassEnabled = builder.centerOfMassEnabled;\n        parallelExecutionEnabled = builder.parallelExecutionEnabled;\n        boundingBoxCacheFraction = builder.boundingBoxCacheFraction;\n        builder.directLocationMapEnabled = builder.directLocationMapEnabled || shingleSize == 1;\n        inputDimensions = (internalShinglingEnabled) ? dimensions / shingleSize : dimensions;\n        pointStoreCapacity = max(sampleSize * numberOfTrees + 1, 2 * sampleSize);\n        initialPointStoreSize = builder.initialPointStoreSize.orElse(2 * sampleSize);\n\n        if (parallelExecutionEnabled) {\n            threadPoolSize = builder.threadPoolSize.orElse(Runtime.getRuntime().availableProcessors() - 1);\n        } else {\n            threadPoolSize = 0;\n        }\n    }\n\n    /**\n     * @return a new RandomCutForest builder.\n     */\n    public static Builder builder() {\n        return new Builder();\n    }\n\n    /**\n     * Create a new RandomCutForest with optional arguments set to default values.\n     *\n     * @param dimensions The number of dimension in the input data.\n     * @param randomSeed The random seed to use to create the forest random number\n     *                   generator\n     * @return a new RandomCutForest with optional arguments set to default values.\n     */\n    public static RandomCutForest defaultForest(int dimensions, long randomSeed) {\n        return builder().dimensions(dimensions).randomSeed(randomSeed).build();\n    }\n\n    /**\n     * Create a new RandomCutForest with optional arguments set to default values.\n     *\n     * @param dimensions The number of dimension in the input data.\n     * @return a new RandomCutForest with optional arguments set to default values.\n     */\n    public static RandomCutForest defaultForest(int dimensions) {\n        return builder().dimensions(dimensions).build();\n    }\n\n    /**\n     * @return the number of trees in the forest.\n     */\n    public int getNumberOfTrees() {\n        return numberOfTrees;\n    }\n\n    /**\n     * @return the sample size used by stream samplers in this forest.\n     */\n    public int getSampleSize() {\n        return sampleSize;\n    }\n\n    /**\n     * @return the shingle size used by the point store.\n     */\n    public int getShingleSize() {\n        return shingleSize;\n    }\n\n    /**\n     * @return the number of points required by stream samplers before results are\n     *         returned.\n     */\n    public int getOutputAfter() {\n        return outputAfter;\n    }\n\n    /**\n     * @return the number of dimensions in the data points accepted by this forest.\n     */\n    public int getDimensions() {\n        return dimensions;\n    }\n\n    /**\n     * @return return the decay factor used by stream samplers in this forest.\n     */\n    public double getTimeDecay() {\n        return timeDecay;\n    }\n\n    /**\n     * @return true if points are saved with sequence indexes, false otherwise.\n     */\n    public boolean isStoreSequenceIndexesEnabled() {\n        return storeSequenceIndexesEnabled;\n    }\n\n    /**\n     * For compact forests, users can choose to specify the desired floating-point\n     * precision to use internally to store points. Choosing single-precision will\n     * reduce the memory size of the model at the cost of requiring double/float\n     * conversions.\n     *\n     * @return the desired precision to use internally to store points.\n     */\n    public Precision getPrecision() {\n        return Precision.FLOAT_32;\n    }\n\n    @Deprecated\n    public boolean isCompact() {\n        return true;\n    }\n\n    /**\n     * @return true if internal shingling is performed, false otherwise.\n     */\n    public boolean isInternalShinglingEnabled() {\n        return internalShinglingEnabled;\n    }\n\n    /**\n     * @return true if tree nodes retain the center of mass, false otherwise.\n     */\n    public boolean isCenterOfMassEnabled() {\n        return centerOfMassEnabled;\n    }\n\n    /**\n     * @return true if parallel execution is enabled, false otherwise.\n     */\n    public boolean isParallelExecutionEnabled() {\n        return parallelExecutionEnabled;\n    }\n\n    public double getBoundingBoxCacheFraction() {\n        return boundingBoxCacheFraction;\n    }\n\n    /**\n     * @return the number of threads in the thread pool if parallel execution is\n     *         enabled, 0 otherwise.\n     */\n    public int getThreadPoolSize() {\n        return threadPoolSize;\n    }\n\n    public IStateCoordinator<?, ?> getUpdateCoordinator() {\n        return stateCoordinator;\n    }\n\n    public ComponentList<?, ?> getComponents() {\n        return components;\n    }\n\n    /**\n     * used for scoring and other function, expands to a shingled point in either\n     * case performs a clean copy\n     * \n     * @param point input point\n     * @return a shingled copy or a clean copy\n     */\n\n    public float[] transformToShingledPoint(float[] point) {\n        return stateCoordinator.getStore().transformToShingledPoint(point);\n    }\n\n    /**\n     * does the pointstore use rotated shingles\n     * \n     * @return true/false based on pointstore\n     */\n    public boolean isRotationEnabled() {\n        return stateCoordinator.getStore().isInternalRotationEnabled();\n    }\n\n    /**\n     * transforms the missing indices on the input point to the corresponding\n     * indices of a shingled point\n     * \n     * @param indexList input array of missing values\n     * @param length    length of the input array\n     * @return output array of missing values corresponding to shingle\n     */\n    protected int[] transformIndices(int[] indexList, int length) {\n        return (internalShinglingEnabled && length == inputDimensions)\n                ? stateCoordinator.getStore().transformIndices(indexList)\n                : indexList;\n    }\n\n    /**\n     *\n     * @return the last known shingled point seen\n     */\n    public float[] lastShingledPoint() {\n        checkArgument(internalShinglingEnabled, \"incorrect use\");\n        return stateCoordinator.getStore().getInternalShingle();\n    }\n\n    /**\n     *\n     * @return the sequence index of the last known shingled point. If internal\n     *         shingling is not enabled, then this would correspond to the number of\n     *         updates\n     */\n    public long nextSequenceIndex() {\n        return stateCoordinator.getStore().getNextSequenceIndex();\n    }\n\n    /**\n     * Update the forest with the given point. The point is submitted to each\n     * sampler in the forest. If the sampler accepts the point, the point is\n     * submitted to the update method in the corresponding Random Cut Tree.\n     *\n     * @param point             The point used to update the forest.\n     * @param updateShingleOnly only update the shingle (true for internal\n     *                          shingling)\n     */\n\n    public void update(float[] point, boolean updateShingleOnly) {\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(internalShinglingEnabled || point.length == dimensions,\n                String.format(\"point.length must equal %d\", dimensions));\n        checkArgument(!internalShinglingEnabled || point.length == inputDimensions,\n                String.format(\"point.length must equal %d for internal shingling\", inputDimensions));\n        checkArgument(!updateShingleOnly || internalShinglingEnabled,\n                \"update shingle setting is only valid for internal shingling\");\n\n        updateExecutor.update(point, updateShingleOnly);\n    }\n\n    @Deprecated\n    public void update(double[] point) {\n        update(toFloatArray(point), false);\n    }\n\n    public void update(float[] point) {\n        update(point, false);\n    }\n\n    /**\n     * Update the forest with the given point and a timestamp. The point is\n     * submitted to each sampler in the forest as if that timestamp was the correct\n     * stamp. storeSequenceIndexes must be false since the algorithm will not verify\n     * the correctness of the timestamp.\n     *\n     * @param point       The point used to update the forest.\n     * @param sequenceNum The timestamp of the corresponding point\n     */\n    public void update(double[] point, long sequenceNum) {\n        checkNotNull(point, \"point must not be null\");\n        update(toFloatArray(point), sequenceNum);\n    }\n\n    public void update(float[] point, long sequenceNum) {\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(!internalShinglingEnabled, \"cannot be applied with internal shingling\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        updateExecutor.update(point, sequenceNum);\n    }\n\n    /**\n     * Update the forest such that each tree caches a fraction of the bounding\n     * boxes. This allows for a tradeoff between speed and storage.\n     *\n     * @param cacheFraction The (approximate) fraction of bounding boxes used in\n     *                      caching.\n     */\n    public void setBoundingBoxCacheFraction(double cacheFraction) {\n        checkArgument(0 <= cacheFraction, \"cache cannot be negative\");\n        checkArgument(cacheFraction <= 1, \"cacheFraction must be between 0 and 1 (inclusive)\");\n        updateExecutor.getComponents().forEach(c -> c.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, cacheFraction));\n    }\n\n    /**\n     * changes the setting of time dependent sampling on the fly\n     * \n     * @param timeDecay new value of sampling rate\n     */\n    public void setTimeDecay(double timeDecay) {\n        checkArgument(0 <= timeDecay, \"timeDecay must be greater than or equal to 0\");\n        this.timeDecay = timeDecay;\n        updateExecutor.getComponents().forEach(c -> c.setConfig(Config.TIME_DECAY, timeDecay));\n    }\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A visitor is constructed for each tree using the visitor\n     * factory, and then submitted to\n     * {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from\n     * all the trees are combined using the accumulator and then transformed using\n     * the finisher before being returned. Trees are visited in parallel using\n     * {@link java.util.Collection#parallelStream()}.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param accumulator    A function that combines the results from individual\n     *                       trees into an aggregate result.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,\n            Function<R, S> finisher) {\n\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        checkNotNull(visitorFactory, \"visitorFactory must not be null\");\n        checkNotNull(accumulator, \"accumulator must not be null\");\n        checkNotNull(finisher, \"finisher must not be null\");\n\n        return traversalExecutor.traverseForest(point, visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A visitor is constructed for each tree using the visitor\n     * factory, and then submitted to\n     * {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from\n     * individual trees are collected using the {@link java.util.stream.Collector}\n     * and returned. Trees are visited in parallel using\n     * {@link java.util.Collection#parallelStream()}.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param collector      A collector used to aggregate individual tree results\n     *                       into a final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {\n\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        checkNotNull(visitorFactory, \"visitorFactory must not be null\");\n        checkNotNull(collector, \"collector must not be null\");\n\n        return traversalExecutor.traverseForest(point, visitorFactory, collector);\n    }\n\n    /**\n     * Visit each of the trees in the forest sequentially and combine the individual\n     * results into an aggregate result. A visitor is constructed for each tree\n     * using the visitor factory, and then submitted to\n     * {@link RandomCutTree#traverse(float[], IVisitorFactory)}. The results from\n     * all the trees are combined using the {@link ConvergingAccumulator}, and the\n     * method stops visiting trees after convergence is reached. The result is\n     * transformed using the finisher before being returned.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param accumulator    An accumulator that combines the results from\n     *                       individual trees into an aggregate result and checks to\n     *                       see if the result can be returned without further\n     *                       processing.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {\n\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        checkNotNull(visitorFactory, \"visitorFactory must not be null\");\n        checkNotNull(accumulator, \"accumulator must not be null\");\n        checkNotNull(finisher, \"finisher must not be null\");\n\n        return traversalExecutor.traverseForest(point, visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A multi-visitor is constructed for each tree using the\n     * visitor factory, and then submitted to\n     * {@link RandomCutTree#traverseMulti(float[], IMultiVisitorFactory)}. The\n     * results from all the trees are combined using the accumulator and then\n     * transformed using the finisher before being returned.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a multi-visitor.\n     * @param accumulator    A function that combines the results from individual\n     *                       trees into an aggregate result.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            BinaryOperator<R> accumulator, Function<R, S> finisher) {\n\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        checkNotNull(visitorFactory, \"visitorFactory must not be null\");\n        checkNotNull(accumulator, \"accumulator must not be null\");\n        checkNotNull(finisher, \"finisher must not be null\");\n\n        return traversalExecutor.traverseForestMulti(point, visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A multi-visitor is constructed for each tree using the\n     * visitor factory, and then submitted to\n     * {@link RandomCutTree#traverseMulti(float[], IMultiVisitorFactory)}. The\n     * results from individual trees are collected using the\n     * {@link java.util.stream.Collector} and returned. Trees are visited in\n     * parallel using {@link java.util.Collection#parallelStream()}.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param collector      A collector used to aggregate individual tree results\n     *                       into a final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            Collector<R, ?, S> collector) {\n\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, () -> \"point.length must equal to \" + dimensions);\n        checkNotNull(visitorFactory, \"visitorFactory must not be null\");\n        checkNotNull(collector, \"collector must not be null\");\n\n        return traversalExecutor.traverseForestMulti(point, visitorFactory, collector);\n    }\n\n    /**\n     * Compute an anomaly score for the given point. The point being scored is\n     * compared with the points in the sample to compute a measure of how anomalous\n     * it is. Scores are greater than 0, with higher scores corresponding to bing\n     * more anomalous. A threshold of 1.0 is commonly used to distinguish anomalous\n     * points from non-anomalous ones.\n     * <p>\n     * See {@link AnomalyScoreVisitor} for more details about the anomaly score\n     * algorithm.\n     *\n     * @param point The point being scored.\n     * @return an anomaly score for the given point.\n     */\n    @Deprecated\n    public double getAnomalyScore(double[] point) {\n        return getAnomalyScore(toFloatArray(point));\n    }\n\n    public double getAnomalyScore(float[] point) {\n        if (!isOutputReady()) {\n            return 0.0;\n        }\n\n        IVisitorFactory<Double> visitorFactory = (tree, x) -> new AnomalyScoreVisitor(tree.projectToTree(x),\n                tree.getMass());\n        BinaryOperator<Double> accumulator = Double::sum;\n        Function<Double, Double> finisher = x -> x / numberOfTrees;\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Anomaly score evaluated sequentially with option of early stopping the early\n     * stopping parameter precision gives an approximate solution in the range\n     * (1-precision)*score(q)- precision, (1+precision)*score(q) + precision for the\n     * score of a point q. In this function z is hardcoded to 0.1. If this function\n     * is used, then not all the trees will be used in evaluation (but they have to\n     * be updated anyways, because they may be used for the next q). The advantage\n     * is that \"almost certainly\" anomalies/non-anomalies can be detected easily\n     * with few trees.\n     *\n     * @param point input point q\n     * @return anomaly score with early stopping with z=0.1\n     */\n    @Deprecated\n    public double getApproximateAnomalyScore(double[] point) {\n        return getApproximateAnomalyScore(toFloatArray(point));\n    }\n\n    public double getApproximateAnomalyScore(float[] point) {\n        if (!isOutputReady()) {\n            return 0.0;\n        }\n\n        IVisitorFactory<Double> visitorFactory = (tree, x) -> new AnomalyScoreVisitor(tree.projectToTree(x),\n                tree.getMass());\n\n        ConvergingAccumulator<Double> accumulator = new OneSidedConvergingDoubleAccumulator(\n                DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,\n                DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Compute an anomaly score attribution DiVector for the given point. The point\n     * being scored is compared with the points in the sample to compute a measure\n     * of how anomalous it is. The result DiVector will contain an anomaly score in\n     * both the positive and negative directions for each dimension of the data.\n     * <p>\n     * See {@link AnomalyAttributionVisitor} for more details about the anomaly\n     * score algorithm.\n     *\n     * @param point The point being scored.\n     * @return an anomaly score for the given point.\n     */\n    public DiVector getAnomalyAttribution(double[] point) {\n        return getAnomalyAttribution(toFloatArray(point));\n    }\n\n    public DiVector getAnomalyAttribution(float[] point) {\n        // this will return the same (modulo floating point summation) L1Norm as\n        // getAnomalyScore\n        if (!isOutputReady()) {\n            return new DiVector(dimensions);\n        }\n\n        IVisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(\n                (tree, y) -> new AnomalyAttributionVisitor(tree.projectToTree(y), tree.getMass()),\n                (tree, x) -> x.lift(tree::liftFromTree));\n        BinaryOperator<DiVector> accumulator = DiVector::addToLeft;\n        Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / numberOfTrees);\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Sequential version of attribution corresponding to getAnomalyScoreSequential;\n     * The high-low sum in the result should be the same as the scalar score\n     * computed by {@link #getAnomalyScore(double[])}.\n     *\n     * @param point The point being scored.\n     * @return anomaly attribution for the given point.\n     */\n    public DiVector getApproximateAnomalyAttribution(double[] point) {\n        return getApproximateAnomalyAttribution(toFloatArray(point));\n    }\n\n    public DiVector getApproximateAnomalyAttribution(float[] point) {\n        if (!isOutputReady()) {\n            return new DiVector(dimensions);\n        }\n\n        IVisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(\n                (tree, y) -> new AnomalyAttributionVisitor(tree.projectToTree(y), tree.getMass()),\n                (tree, x) -> x.lift(tree::liftFromTree));\n\n        ConvergingAccumulator<DiVector> accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions,\n                DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,\n                DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / accumulator.getValuesAccepted());\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Compute a density estimate at the given point.\n     * <p>\n     * See {@link SimpleInterpolationVisitor} and {@link DensityOutput} for more\n     * details about the density computation.\n     *\n     * @param point The point where the density estimate is made.\n     * @return A density estimate.\n     */\n    @Deprecated\n    public DensityOutput getSimpleDensity(double[] point) {\n        return getSimpleDensity(toFloatArray(point));\n    }\n\n    public DensityOutput getSimpleDensity(float[] point) {\n\n        // density estimation should use sufficiently larger number of samples\n        // and only return answers when full\n\n        if (!isOutputReady()) {\n            return new DensityOutput(dimensions, sampleSize);\n        }\n\n        IVisitorFactory<InterpolationMeasure> visitorFactory = new VisitorFactory<>((tree,\n                y) -> new SimpleInterpolationVisitor(tree.projectToTree(y), tree.getMass(), 1.0, centerOfMassEnabled),\n                (tree, x) -> x.lift(tree::liftFromTree));\n        Collector<InterpolationMeasure, ?, InterpolationMeasure> collector = InterpolationMeasure.collector(dimensions,\n                0, numberOfTrees);\n        DensityOutput a = new DensityOutput(traverseForest(transformToShingledPoint(point), visitorFactory, collector));\n        return new DensityOutput(traverseForest(transformToShingledPoint(point), visitorFactory, collector));\n    }\n\n    /**\n     * Given a point with missing values, return a collection of treesamples. These\n     * tree samples can be postprocessed in a variety of ways -- primarily to\n     * produce summaries and imputation. The treesamples correspond to pointstore\n     * index, distances to tree points (excluding the missing values) the actual\n     * point at the leaf and the tree sample is 1 for each tree.\n     *\n     *\n     * @param point          A point with missing values.\n     * @param missingIndexes An array containing the indexes of the missing values\n     *                       in the point. The length of the array should be greater\n     *                       than or equal to the number of missing values.\n     * @param centrality     a parameter that provides a central estimation versus a\n     *                       more random estimation\n     * @return A collection of tree samples\n     */\n    protected List<ConditionalTreeSample> getConditionalField(float[] point, int[] missingIndexes, double centrality) {\n\n        // missing indexes can be null -- but then getNearNeighborsInSample may be more\n        // efficient\n        checkArgument(centrality >= 0, \" cannot be negative\");\n        checkArgument(centrality <= 1, \"centrality needs to be in range [0,1]\");\n        checkArgument(point != null, \" cannot be null\");\n        if (!isOutputReady()) {\n            return new ArrayList<>();\n        }\n\n        int[] liftedIndices = transformIndices(missingIndexes, point.length);\n        IMultiVisitorFactory<ConditionalTreeSample> visitorFactory = (tree, y) -> new ImputeVisitor(y,\n                tree.projectToTree(y), liftedIndices, tree.projectMissingIndices(liftedIndices), centrality,\n                tree.getRandomSeed());\n        return traverseForestMulti(transformToShingledPoint(point), visitorFactory, ConditionalTreeSample.collector);\n    }\n\n    /**\n     * The function returns summary statistics of points close to a query point\n     * (with possible missing values). The statics can perform an optional\n     * multicentroid clustering\n     * \n     * @param point                   the query point\n     * @param missingIndexes          the list of positions which are missing\n     * @param numberOfRepresentatives number of representatives in a cluster\n     * @param shrinkage               controls the shape of clusters (=0 corresponds\n     *                                to spanning trees, and =1 corresponds to\n     *                                centroidal clustering)\n     * @param addtypical              an option to perform the clustering/not\n     * @param project                 should the clustring/statistics be computed\n     *                                only on the data projected to the entries in\n     *                                missingIndexes\n     * @param centrality              how closely should each tree try to predict\n     *                                the missing values =0 implies loosely, =1\n     *                                implies closely\n     * @param shingleSize             the effective shingleSize -- the\n     *                                clustering/statistics would be projected to\n     *                                the last dimension/shinglesize values\n     * @return a summary of the predictions returned by each tree\n     */\n    public SampleSummary getConditionalFieldSummary(float[] point, int[] missingIndexes, int numberOfRepresentatives,\n            double shrinkage, boolean addtypical, boolean project, double centrality, int shingleSize) {\n        // missing indexes can be null -- but then getNearNeighborsInSample may be more\n        // efficient\n        checkArgument(centrality >= 0, \" cannot be negative\");\n        checkArgument(centrality <= 1, \"centrality needs to be in range [0,1]\");\n        checkArgument(point != null, \" cannot be null\");\n        if (!isOutputReady()) {\n            return new SampleSummary(dimensions);\n        }\n\n        int[] liftedIndices = transformIndices(missingIndexes, point.length);\n        ConditionalSampleSummarizer summarizer = new ConditionalSampleSummarizer(liftedIndices,\n                transformToShingledPoint(point), centrality, project, numberOfRepresentatives, shrinkage, shingleSize);\n        return summarizer.summarize(getConditionalField(point, missingIndexes, centrality), addtypical);\n    }\n\n    /**\n     * Given a point with missing values, return a new point with the missing values\n     * imputed. Each tree in the forest individual produces an imputed value. The\n     * median imputed value is returned. This can be improved using\n     * getConditionalSummary or getConditionalField\n     *\n     * @param point          A point with missing values.\n     * @param missingIndexes An array containing the indexes of the missing values\n     *                       in the point. The length of the array should be greater\n     *                       than or equal to the number of missing values.\n     * @return A point with the missing values imputed.\n     */\n\n    public float[] imputeMissingValues(float[] point, int[] missingIndexes) {\n        return getConditionalFieldSummary(point, missingIndexes, 1, 0, false, false, 1.0, 1).median;\n    }\n\n    // number of missing values is redundant\n    @Deprecated\n    public float[] imputeMissingValues(float[] point, int numberOfMissingValues, int[] missingIndexes) {\n        return imputeMissingValues(point, missingIndexes);\n    }\n\n    @Deprecated\n    public double[] imputeMissingValues(double[] point, int numberOfMissingValues, int[] missingIndexes) {\n        return toDoubleArray(imputeMissingValues(toFloatArray(point), numberOfMissingValues, missingIndexes));\n    }\n\n    /**\n     * Given an initial shingled point, extrapolate the stream into the future to\n     * produce a forecast. This method is intended to be called when the input data\n     * is being shingled, and it works by imputing forward one shingle block at a\n     * time.\n     *\n     * @param point        The starting point for extrapolation.\n     * @param horizon      The number of blocks to forecast.\n     * @param blockSize    The number of entries in a block. This should be the same\n     *                     as the size of a single input to the shingle.\n     * @param cyclic       If true then the shingling is cyclic, otherwise it's a\n     *                     sliding shingle.\n     * @param shingleIndex If cyclic is true, then this should be the current index\n     *                     in the shingle. That is, the index where the next point\n     *                     added to the shingle would be written. If cyclic is false\n     *                     then this value is not used.\n     * @return a forecasted time series.\n     */\n    @Deprecated\n    double[] extrapolateBasic(double[] point, int horizon, int blockSize, boolean cyclic, int shingleIndex) {\n        return toDoubleArray(extrapolateBasic(toFloatArray(point), horizon, blockSize, cyclic, shingleIndex));\n    }\n\n    @Deprecated\n    float[] extrapolateBasic(float[] point, int horizon, int blockSize, boolean cyclic, int shingleIndex) {\n        return extrapolateWithRanges(point, horizon, blockSize, cyclic, shingleIndex, 1.0).values;\n    }\n\n    // the following is provided for maximum flexibilty from the calling entity;\n    // but likely use is extrapolateFromShingle(), which abstracts away rotation\n    // etc.\n    public RangeVector extrapolateWithRanges(float[] point, int horizon, int blockSize, boolean cyclic,\n            int shingleIndex, double centrality) {\n        checkArgument(0 < blockSize && blockSize < dimensions,\n                \"blockSize must be between 0 and dimensions (exclusive)\");\n        checkArgument(dimensions % blockSize == 0, \"dimensions must be evenly divisible by blockSize\");\n        checkArgument(0 <= shingleIndex && shingleIndex < dimensions / blockSize,\n                \"shingleIndex must be between 0 (inclusive) and dimensions / blockSize\");\n\n        RangeVector result = new RangeVector(blockSize * horizon);\n        int[] missingIndexes = new int[blockSize];\n        float[] queryPoint = Arrays.copyOf(point, dimensions);\n\n        if (cyclic) {\n            extrapolateBasicCyclic(result, horizon, blockSize, shingleIndex, queryPoint, missingIndexes, centrality);\n        } else {\n            extrapolateBasicSliding(result, horizon, blockSize, queryPoint, missingIndexes, centrality);\n        }\n\n        return result;\n    }\n\n    // external management of shingle; can function for both internal and external\n    // shingling\n    // however blocksize has to be externally managed\n\n    @Deprecated\n    RangeVector extrapolateFromShingle(float[] shingle, int horizon, int blockSize, double centrality) {\n        return extrapolateWithRanges(shingle, horizon, blockSize, isRotationEnabled(),\n                ((int) nextSequenceIndex()) % shingleSize, centrality);\n    }\n\n    /**\n     * Given an initial shingled point, extrapolate the stream into the future to\n     * produce a forecast. This method is intended to be called when the input data\n     * is being shingled, and it works by imputing forward one shingle block at a\n     * time. If the shingle is cyclic, then this method uses 0 as the shingle index.\n     *\n     * @param point     The starting point for extrapolation.\n     * @param horizon   The number of blocks to forecast.\n     * @param blockSize The number of entries in a block. This should be the same as\n     *                  the size of a single input to the shingle.\n     * @param cyclic    If true then the shingling is cyclic, otherwise it's a\n     *                  sliding shingle.\n     * @return a forecasted time series.\n     */\n    @Deprecated\n    double[] extrapolateBasic(double[] point, int horizon, int blockSize, boolean cyclic) {\n        return toDoubleArray(extrapolateBasic(toFloatArray(point), horizon, blockSize, cyclic, 0));\n    }\n\n    protected float[] extrapolateBasic(float[] point, int horizon, int blockSize, boolean cyclic) {\n        return extrapolateBasic(point, horizon, blockSize, cyclic, 0);\n    }\n\n    /**\n     * Given a shingle builder, extrapolate the stream into the future to produce a\n     * forecast. This method assumes you are passing in the shingle builder used to\n     * preprocess points before adding them to this forest.\n     *\n     * @param builder The shingle builder used to process points before adding them\n     *                to the forest.\n     * @param horizon The number of blocks to forecast.\n     * @return a forecasted time series.\n     */\n    @Deprecated\n    public double[] extrapolateBasic(ShingleBuilder builder, int horizon) {\n        return toDoubleArray(extrapolateBasic(toFloatArray(builder.getShingle()), horizon, builder.getInputPointSize(),\n                builder.isCyclic(), builder.getShingleIndex()));\n    }\n\n    void extrapolateBasicSliding(RangeVector result, int horizon, int blockSize, float[] queryPoint,\n            int[] missingIndexes, double centrality) {\n        int resultIndex = 0;\n\n        Arrays.fill(missingIndexes, 0);\n        for (int y = 0; y < blockSize; y++) {\n            missingIndexes[y] = dimensions - blockSize + y;\n        }\n\n        for (int k = 0; k < horizon; k++) {\n            // shift all entries in the query point left by 1 block\n            System.arraycopy(queryPoint, blockSize, queryPoint, 0, dimensions - blockSize);\n\n            SampleSummary imputedSummary = getConditionalFieldSummary(queryPoint, missingIndexes, 1, 0, false, false,\n                    centrality, dimensions / blockSize);\n            for (int y = 0; y < blockSize; y++) {\n                result.values[resultIndex] = queryPoint[dimensions - blockSize + y] = imputedSummary.median[y];\n                result.lower[resultIndex] = imputedSummary.lower[y];\n                result.upper[resultIndex] = imputedSummary.upper[y];\n                resultIndex++;\n            }\n        }\n    }\n\n    void extrapolateBasicCyclic(RangeVector result, int horizon, int blockSize, int shingleIndex, float[] queryPoint,\n            int[] missingIndexes, double centrality) {\n\n        int resultIndex = 0;\n        int currentPosition = shingleIndex;\n        Arrays.fill(missingIndexes, 0);\n\n        for (int k = 0; k < horizon; k++) {\n            for (int y = 0; y < blockSize; y++) {\n                missingIndexes[y] = (currentPosition + y) % dimensions;\n            }\n\n            SampleSummary imputedSummary = getConditionalFieldSummary(queryPoint, missingIndexes, 1, 0, false, false,\n                    centrality, 1);\n\n            for (int y = 0; y < blockSize; y++) {\n                result.values[resultIndex] = queryPoint[(currentPosition + y)\n                        % dimensions] = imputedSummary.median[(currentPosition + y) % dimensions];\n                result.lower[resultIndex] = imputedSummary.lower[(currentPosition + y) % dimensions];\n                result.upper[resultIndex] = imputedSummary.upper[(currentPosition + y) % dimensions];\n                resultIndex++;\n            }\n\n            currentPosition = (currentPosition + blockSize) % dimensions;\n        }\n    }\n\n    /**\n     * Extrapolate the stream into the future to produce a forecast. This method is\n     * intended to be called when the input data is being shingled internally, and\n     * it works by imputing forward one shingle block at a time.\n     *\n     * @param horizon The number of blocks to forecast.\n     * @return a forecasted time series.\n     */\n    public double[] extrapolate(int horizon) {\n        return toDoubleArray(extrapolateFromCurrentTime(horizon));\n    }\n\n    public float[] extrapolateFromCurrentTime(int horizon) {\n        checkArgument(internalShinglingEnabled, \"incorrect use\");\n        IPointStore<?, ?> store = stateCoordinator.getStore();\n        return extrapolateBasic(lastShingledPoint(), horizon, inputDimensions, store.isInternalRotationEnabled(),\n                ((int) nextSequenceIndex()) % shingleSize);\n    }\n\n    /**\n     * For each tree in the forest, follow the tree traversal path and return the\n     * leaf node if the standard Euclidean distance between the query point and the\n     * leaf point is smaller than the given threshold. Note that this will not\n     * necessarily be the nearest point in the tree, because the traversal path is\n     * determined by the random cuts in the tree. If the same leaf point is found in\n     * multiple trees, those results will be combined into a single Neighbor in the\n     * result.\n     *\n     * If sequence indexes are disabled for this forest, then the list of sequence\n     * indexes will be empty in returned Neighbors.\n     *\n     * @param point             A point whose neighbors we want to find.\n     * @param distanceThreshold The maximum Euclidean distance for a point to be\n     *                          considered a neighbor.\n     * @return a list of Neighbors, ordered from closest to furthest.\n     */\n    @Deprecated\n    public List<Neighbor> getNearNeighborsInSample(double[] point, double distanceThreshold) {\n        return getNearNeighborsInSample(toFloatArray(point), distanceThreshold);\n    }\n\n    public List<Neighbor> getNearNeighborsInSample(float[] point, double distanceThreshold) {\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(distanceThreshold > 0, \"distanceThreshold must be greater than 0\");\n\n        if (!isOutputReady()) {\n            return Collections.emptyList();\n        }\n\n        IVisitorFactory<Optional<Neighbor>> visitorFactory = (tree, x) -> new NearNeighborVisitor(x, distanceThreshold);\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, Neighbor.collector());\n    }\n\n    /**\n     * For each tree in the forest, follow the tree traversal path and return the\n     * leaf node. Note that this will not necessarily be the nearest point in the\n     * tree, because the traversal path is determined by the random cuts in the\n     * tree. If the same leaf point is found in multiple trees, those results will\n     * be combined into a single Neighbor in the result.\n     *\n     * If sequence indexes are disabled for this forest, then sequenceIndexes will\n     * be empty in the returned Neighbors.\n     *\n     * @param point A point whose neighbors we want to find.\n     * @return a list of Neighbors, ordered from closest to furthest.\n     */\n    @Deprecated\n    public List<Neighbor> getNearNeighborsInSample(double[] point) {\n        return getNearNeighborsInSample(toFloatArray(point));\n    }\n\n    public List<Neighbor> getNearNeighborsInSample(float[] point) {\n        return getNearNeighborsInSample(point, Double.POSITIVE_INFINITY);\n    }\n\n    /**\n     * @return true if all samplers are ready to output results.\n     */\n    public boolean isOutputReady() {\n        return outputReady || (outputReady = stateCoordinator.getTotalUpdates() >= outputAfter\n                && components.stream().allMatch(IComponentModel::isOutputReady));\n    }\n\n    /**\n     * @return true if all samplers in the forest are full.\n     */\n    public boolean samplersFull() {\n        return stateCoordinator.getTotalUpdates() >= sampleSize;\n    }\n\n    /**\n     * Returns the total number updates to the forest.\n     *\n     * The count of updates is represented with long type and may overflow.\n     *\n     * @return the total number of updates to the forest.\n     */\n    public long getTotalUpdates() {\n        return stateCoordinator.getTotalUpdates();\n    }\n\n    public void pauseSampling() {\n        updateExecutor.setCurrentlySampling(false);\n    }\n\n    public void resumeSampling() {\n        updateExecutor.setCurrentlySampling(true);\n    }\n\n    public boolean isCurrentlySampling() {\n        return updateExecutor.isCurrentlySampling();\n    }\n\n    /**\n     * an L1 clustering primitive that shows the aggregation of the points stored in\n     * RCF the clustering uses multi-centroid clustering introduced in CURE\n     * https://en.wikipedia.org/wiki/CURE_algorithm However CURE also shrunk the\n     * well scattered points by a fraction alpha (there by creating new points);\n     * while that concept is used herein, the (multi) summarization algorithm\n     * changes the distance metric as opposed to creating new points since\n     * continuity of values is not an useful assumption in context of RCFs. The\n     * usage of distance metric is similar to the discussion in\n     * https://en.wikipedia.org/wiki/Data_stream_clustering See the examples package\n     * for an example of dynamic summarization. /\n     * \n     * @param maxAllowed              maximum number of clusters one is willing to\n     *                                see\n     * @param shrinkage               a parameter that controls between spherical\n     *                                nature (=1) and MST (=0), this corresponds to\n     *                                the parameter alpha in the description above\n     * @param numberOfRepresentatives number of centroids used to represent a\n     *                                cluster, this is the parameter c in the\n     *                                description of CURE\n     * @param separationRatio         a parameter in [0,1] that controls how\n     *                                zealously should the algorithm reduce the\n     *                                number of clusters a default value of 0.8 is a\n     *                                reasonable value for many settings. A value\n     *                                close to 0 would tend to merge eveything into\n     *                                a single cluster. The option is provided since\n     *                                it can be of use in the future to produce\n     *                                dendograms and similar information.\n     * @param distance                a distance function for points\n     * @param previous                a (possibly null) list of previous clustering\n     *                                obtained. If the list is non-null then the\n     *                                representatives of the previous cluster would\n     *                                be added as zero weight points, ensuring that\n     *                                the summarization is more smooth (in contrast\n     *                                to two independent summarizations). The zero\n     *                                weight points of the past can serve as\n     *                                representatives of the current clustering.\n     * @return a list of clusters\n     */\n    public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,\n            double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous) {\n        return stateCoordinator.getStore().summarize(maxAllowed, shrinkage, numberOfRepresentatives, separationRatio,\n                distance, previous);\n    }\n\n    // same as above with default filled in\n    public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,\n            List<ICluster<float[]>> previous) {\n        return summarize(maxAllowed, shrinkage, numberOfRepresentatives, DEFAULT_SEPARATION_RATIO_FOR_MERGE,\n                Summarizer::L1distance, previous);\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        private int dimensions;\n        private int sampleSize = DEFAULT_SAMPLE_SIZE;\n        private Optional<Integer> outputAfter = Optional.empty();\n        private int numberOfTrees = DEFAULT_NUMBER_OF_TREES;\n        private Optional<Double> timeDecay = Optional.empty();\n        private Optional<Long> randomSeed = Optional.empty();\n        private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n        private boolean centerOfMassEnabled = DEFAULT_CENTER_OF_MASS_ENABLED;\n        private boolean parallelExecutionEnabled = DEFAULT_PARALLEL_EXECUTION_ENABLED;\n        private Optional<Integer> threadPoolSize = Optional.empty();\n        private boolean directLocationMapEnabled = DEFAULT_DIRECT_LOCATION_MAP;\n        private double boundingBoxCacheFraction = DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\n        private int shingleSize = DEFAULT_SHINGLE_SIZE;\n\n        private boolean internalShinglingEnabled = DEFAULT_INTERNAL_SHINGLING_ENABLED;\n        protected boolean internalRotationEnabled = DEFAULT_INTERNAL_ROTATION_ENABLED;\n        protected Optional<Integer> initialPointStoreSize = Optional.empty();\n        protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;\n\n        public T dimensions(int dimensions) {\n            this.dimensions = dimensions;\n            return (T) this;\n        }\n\n        public T sampleSize(int sampleSize) {\n            this.sampleSize = sampleSize;\n            return (T) this;\n        }\n\n        public T outputAfter(int outputAfter) {\n            this.outputAfter = Optional.of(outputAfter);\n            return (T) this;\n        }\n\n        public T numberOfTrees(int numberOfTrees) {\n            this.numberOfTrees = numberOfTrees;\n            return (T) this;\n        }\n\n        public T shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return (T) this;\n        }\n\n        public T timeDecay(double timeDecay) {\n            this.timeDecay = Optional.of(timeDecay);\n            return (T) this;\n        }\n\n        public T randomSeed(long randomSeed) {\n            this.randomSeed = Optional.of(randomSeed);\n            return (T) this;\n        }\n\n        public T centerOfMassEnabled(boolean centerOfMassEnabled) {\n            this.centerOfMassEnabled = centerOfMassEnabled;\n            return (T) this;\n        }\n\n        public T parallelExecutionEnabled(boolean parallelExecutionEnabled) {\n            this.parallelExecutionEnabled = parallelExecutionEnabled;\n            return (T) this;\n        }\n\n        public T threadPoolSize(int threadPoolSize) {\n            this.threadPoolSize = Optional.of(threadPoolSize);\n            return (T) this;\n        }\n\n        public T initialPointStoreSize(int initialPointStoreSize) {\n            this.initialPointStoreSize = Optional.of(initialPointStoreSize);\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        @Deprecated\n        public T compact(boolean compact) {\n            return (T) this;\n        }\n\n        public T internalShinglingEnabled(boolean internalShinglingEnabled) {\n            this.internalShinglingEnabled = internalShinglingEnabled;\n            return (T) this;\n        }\n\n        public T internalRotationEnabled(boolean internalRotationEnabled) {\n            this.internalRotationEnabled = internalRotationEnabled;\n            return (T) this;\n        }\n\n        @Deprecated\n        public T dynamicResizingEnabled(boolean dynamicResizingEnabled) {\n            return (T) this;\n        }\n\n        @Deprecated\n        public T precision(Precision precision) {\n            return (T) this;\n        }\n\n        public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {\n            this.boundingBoxCacheFraction = boundingBoxCacheFraction;\n            return (T) this;\n        }\n\n        public T initialAcceptFraction(double initialAcceptFraction) {\n            this.initialAcceptFraction = initialAcceptFraction;\n            return (T) this;\n        }\n\n        public RandomCutForest build() {\n            return new RandomCutForest(this);\n        }\n\n        public Random getRandom() {\n            // If a random seed was given, use it to create a new Random. Otherwise, call\n            // the 0-argument constructor\n            return randomSeed.map(Random::new).orElseGet(Random::new);\n        }\n    }\n\n    /**\n     * Score a point using the given scoring functions.\n     *\n     * @param point                   input point being scored\n     * @param ignoreLeafMassThreshold said threshold\n     * @param seen                    the function that applies if input is equal to\n     *                                a previously seen sample in a leaf\n     * @param unseen                  if the input does not have a match in the\n     *                                leaves\n     * @param damp                    damping function based on the duplicity of the\n     *                                previously seen samples\n     * @return anomaly score\n     */\n    public double getDynamicScore(float[] point, int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,\n            BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp) {\n\n        checkArgument(ignoreLeafMassThreshold >= 0, \"ignoreLeafMassThreshold should be greater than or equal to 0\");\n\n        if (!isOutputReady()) {\n            return 0.0;\n        }\n\n        VisitorFactory<Double> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicScoreVisitor(\n                tree.projectToTree(y), tree.getMass(), ignoreLeafMassThreshold, seen, unseen, damp));\n        BinaryOperator<Double> accumulator = Double::sum;\n\n        Function<Double, Double> finisher = sum -> sum / numberOfTrees;\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Similar to above but now the scoring takes in a function of Bounding Box to\n     * probabilities (vector over the dimensions); and produces a score af-if the\n     * tree were built using that function (when in reality the tree is an RCF).\n     * Changing the defaultRCFgVec function to some other function f() will provide\n     * a mechanism of dynamic scoring for trees that are built using f() which is\n     * the purpose of TransductiveScalarScore visitor. Note that the answer is an\n     * MCMC simulation and is not normalized (because the scoring functions are\n     * flexible and unknown) and over a small number of trees the errors can be\n     * large specially if vecSep is very far from defaultRCFgVec\n     *\n     * Given the large number of possible sources of distortion, ignoreLeafThreshold\n     * is not supported.\n     *\n     * @param point  point to be scored\n     * @param seen   the score function for seen point\n     * @param unseen score function for unseen points\n     * @param damp   dampening the score for duplicates\n     * @param vecSep the function of (BoundingBox) -&gt; array of probabilities\n     * @return the simuated score\n     */\n\n    public double getDynamicSimulatedScore(float[] point, BiFunction<Double, Double, Double> seen,\n            BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp,\n            Function<IBoundingBoxView, double[]> vecSep) {\n\n        if (!isOutputReady()) {\n            return 0.0;\n        }\n\n        VisitorFactory<Double> visitorFactory = new VisitorFactory<>(\n                (tree, y) -> new SimulatedTransductiveScalarScoreVisitor(tree.projectToTree(y), tree.getMass(), seen,\n                        unseen, damp, CommonUtils::defaultRCFgVecFunction, vecSep));\n        BinaryOperator<Double> accumulator = Double::sum;\n\n        Function<Double, Double> finisher = sum -> sum / numberOfTrees;\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Score a point using the given scoring functions. This method will\n     * short-circuit before visiting all trees if the scores that are returned from\n     * a subset of trees appears to be converging to a given value. See\n     * {@link OneSidedConvergingDoubleAccumulator} for more about convergence.\n     *\n     * @param point                   input point\n     * @param precision               controls early convergence\n     * @param highIsCritical          this is true for the default scoring function.\n     *                                If the user wishes to use a different scoring\n     *                                function where anomaly scores are low values\n     *                                (for example, height in tree) then this should\n     *                                be set to false.\n     * @param ignoreLeafMassThreshold said threshold\n     * @param seen                    scoring function when the input matches some\n     *                                tuple in the leaves\n     * @param unseen                  scoring function when the input is not found\n     * @param damp                    dampening function for duplicates which are\n     *                                same as input (applies with seen)\n     * @return the dynamic score under sequential early stopping\n     */\n    public double getApproximateDynamicScore(float[] point, double precision, boolean highIsCritical,\n            int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,\n            BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> damp) {\n\n        checkArgument(ignoreLeafMassThreshold >= 0, \"ignoreLeafMassThreshold should be greater than or equal to 0\");\n\n        if (!isOutputReady()) {\n            return 0.0;\n        }\n\n        VisitorFactory<Double> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicScoreVisitor(\n                tree.projectToTree(y), tree.getMass(), ignoreLeafMassThreshold, seen, unseen, damp));\n\n        ConvergingAccumulator<Double> accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision,\n                DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Same as above, but for dynamic scoring. See the params of\n     * getDynamicScoreParallel\n     *\n     * @param point                   point to be scored\n     * @param ignoreLeafMassThreshold said threshold\n     * @param seen                    score function for seen points\n     * @param unseen                  score function for unseen points\n     * @param newDamp                 dampening function for duplicates in the seen\n     *                                function\n     * @return dynamic scoring attribution DiVector\n     */\n    public DiVector getDynamicAttribution(float[] point, int ignoreLeafMassThreshold,\n            BiFunction<Double, Double, Double> seen, BiFunction<Double, Double, Double> unseen,\n            BiFunction<Double, Double, Double> newDamp) {\n\n        if (!isOutputReady()) {\n            return new DiVector(dimensions);\n        }\n\n        VisitorFactory<DiVector> visitorFactory = new VisitorFactory<>(\n                (tree, y) -> new DynamicAttributionVisitor(tree.projectToTree(y), tree.getMass(),\n                        ignoreLeafMassThreshold, seen, unseen, newDamp),\n                (tree, x) -> x.lift(tree::liftFromTree));\n        BinaryOperator<DiVector> accumulator = DiVector::addToLeft;\n        Function<DiVector, DiVector> finisher = x -> x.scale(1.0 / numberOfTrees);\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n    /**\n     * Atrribution for dynamic sequential scoring; getL1Norm() should agree with\n     * getDynamicScoringSequential\n     *\n     * @param point                   input\n     * @param precision               parameter to stop early stopping\n     * @param highIsCritical          are high values anomalous (otherwise low\n     *                                values are anomalous)\n     * @param ignoreLeafMassThreshold we ignore leaves with mass equal/below *\n     *                                threshold\n     * @param seen                    function for scoring points that have been\n     *                                seen before\n     * @param unseen                  function for scoring points not seen in tree\n     * @param newDamp                 dampening function based on duplicates\n     * @return attribution DiVector of the score\n     */\n    public DiVector getApproximateDynamicAttribution(float[] point, double precision, boolean highIsCritical,\n            int ignoreLeafMassThreshold, BiFunction<Double, Double, Double> seen,\n            BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> newDamp) {\n\n        if (!isOutputReady()) {\n            return new DiVector(dimensions);\n        }\n\n        VisitorFactory<DiVector> visitorFactory = new VisitorFactory<>((tree, y) -> new DynamicAttributionVisitor(y,\n                tree.getMass(), ignoreLeafMassThreshold, seen, unseen, newDamp),\n                (tree, x) -> x.lift(tree::liftFromTree));\n\n        ConvergingAccumulator<DiVector> accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions,\n                highIsCritical, precision, DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        Function<DiVector, DiVector> finisher = vector -> vector.scale(1.0 / accumulator.getValuesAccepted());\n\n        return traverseForest(transformToShingledPoint(point), visitorFactory, accumulator, finisher);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/Visitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * This is the interface for a visitor which can be used to query a ITraversable\n * to produce a result. A visitor is submitted to\n * ITraversable#traverse(double[], Visitor), and during the traversal the\n * {@link #acceptLeaf} and {@link #accept} methods are invoked on the nodes in\n * the traversal path.\n * <p>\n * See ITraversable#traverse(double[], Visitor) for details about the traversal\n * path.\n */\npublic interface Visitor<R> {\n    /**\n     * Visit a node in the traversal path.\n     *\n     * @param node        the node being visited\n     * @param depthOfNode the depth of the node being visited\n     */\n    void accept(INodeView node, int depthOfNode);\n\n    /**\n     * Visit the leaf node in the traversal path. By default, this method proxies to\n     * {@link #accept(INodeView, int)}.\n     *\n     * @param leafNode    the leaf node being visited\n     * @param depthOfNode the depth of the leaf node\n     */\n    default void acceptLeaf(INodeView leafNode, final int depthOfNode) {\n        accept(leafNode, depthOfNode);\n    }\n\n    /**\n     * At the end of the traversal, this method is called to obtain the result\n     * computed by the visitor.\n     *\n     * @return the result value computed by the visitor.\n     */\n    R getResult();\n\n    /**\n     * This method short-circuits the evaluation of the Visitor at nodes on the\n     * traversal path. By default, the accept (or acceptLeaf) method will be invoked\n     * for each Node in the traversal path. But the NodeView has to prepare\n     * information to support that visitor invocation. Before invocation, the value\n     * of isConverged will be checked. If it is true, some of that preparation can\n     * be skipped -- because the visitor would not be updated. This method can be\n     * overwritten to optimize visitors that do not need to visit every node on the\n     * root to leaf path before returning a value.\n     *\n     * Mote that this convergence applies to a single visitor computation and is\n     * expected to be a speedup without any change in the value of the answer. This\n     * is different from converging accumulator which corresponds to sequential\n     * evaluation of different visitors and early stopping.\n     **/\n    default boolean isConverged() {\n        return false;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/VisitorFactory.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.tree.ITree;\n\n/**\n * This is the interface for a visitor factory the factory corresponds to\n * mapping a (tree,point) pair to a visitor and a mapping for the inverse result\n */\npublic class VisitorFactory<R> implements IVisitorFactory<R> {\n    private final BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor;\n    private final BiFunction<ITree<?, ?>, R, R> liftResult;\n\n    public VisitorFactory(BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor,\n            BiFunction<ITree<?, ?>, R, R> liftResult) {\n        this.newVisitor = newVisitor;\n        this.liftResult = liftResult;\n    }\n\n    public VisitorFactory(BiFunction<ITree<?, ?>, float[], Visitor<R>> newVisitor) {\n        this(newVisitor, (tree, x) -> x);\n    }\n\n    @Override\n    public Visitor<R> newVisitor(ITree<?, ?> tree, float[] point) {\n        return newVisitor.apply(tree, point);\n    }\n\n    @Override\n    public R liftResult(ITree<?, ?> tree, R result) {\n        return liftResult.apply(tree, result);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AbstractAttributionVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.Arrays;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * Attribution exposes the attribution of scores produced by ScalarScoreVisitor\n * corresponding to different attributes. It allows a boolean\n * ignoreClosestCandidate; which when true will compute the attribution as it\n * that near neighbor was not present in RCF. This is turned on by default for\n * duplicate points seen by the forest, so that the attribution does not change\n * is a sequence of duplicate points are seen. For non-duplicate points, if the\n * boolean turned on, reduces effects of masking (when anomalous points are\n * included in the forest -- which will be true with a few samples or when the\n * samples are not refreshed appropriately). It is worth remembering that\n * disallowing anomalous points from being included in the forest explicitly\n * will render the algorithm incapable of adjusting to a new normal -- which is\n * a strength of this algorithm.\n **/\npublic abstract class AbstractAttributionVisitor implements Visitor<DiVector> {\n\n    public static final int DEFAULT_IGNORE_LEAF_MASS_THRESHOLD = 0;\n\n    protected final double[] differenceInRangeVector;\n    protected final float[] pointToScore;\n    protected final int treeMass;\n    protected final DiVector directionalAttribution;\n    protected boolean hitDuplicates;\n    protected double savedScore;\n    protected double sumOfNewRange;\n    protected double sumOfDifferenceInRange;\n    protected boolean ignoreLeaf;\n    protected int ignoreLeafMassThreshold;\n\n    /**\n     * A flag that states whether the point to score is known to be contained inside\n     * the bounding box of Nodes being accepted. Assumes nodes are accepted in\n     * leaf-to-root order.\n     */\n    protected boolean pointInsideBox;\n\n    /**\n     * An array that keeps track of whether each margin of the point being scored is\n     * outside inside the box considered during the recursive call to compute the\n     * score. Assumes nodes are accepted in leaf-to-root order.\n     */\n    protected boolean[] coordInsideBox;\n    protected IBoundingBoxView shadowBox;\n\n    public AbstractAttributionVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {\n\n        this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);\n        this.treeMass = treeMass;\n        this.ignoreLeaf = ignoreLeafMassThreshold > DEFAULT_IGNORE_LEAF_MASS_THRESHOLD;\n        this.ignoreLeafMassThreshold = ignoreLeafMassThreshold;\n        hitDuplicates = false;\n        pointInsideBox = false;\n        savedScore = 0;\n        directionalAttribution = new DiVector(pointToScore.length);\n        shadowBox = null;\n        coordInsideBox = new boolean[pointToScore.length];\n        // array is twice as long as pointToScore because we store\n        // positive and negative differences separately\n        differenceInRangeVector = new double[2 * pointToScore.length];\n    }\n\n    public AbstractAttributionVisitor(float[] pointToScore, int treeMass) {\n        this(pointToScore, treeMass, DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);\n    }\n\n    /**\n     * Take the normalization function applied to the corresponding scoring visitor\n     * and apply that to each coordinate of the DiVector to modify the data in\n     * place. The function has to be associative in its first parameter; that is, fn\n     * (x1, y) + fn (x2, y) = fn (x1 + x2, y)\n     * \n     * @return The modified data.\n     */\n    @Override\n    public DiVector getResult() {\n        DiVector result = new DiVector(directionalAttribution);\n        result.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, treeMass));\n        return result;\n    }\n\n    /**\n     * Update the anomaly score based on the next step of the tree traversal.\n     *\n     * @param node        The current node in the tree traversal\n     * @param depthOfNode The depth of the current node in the tree\n     */\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n        if (pointInsideBox) {\n            return;\n        }\n\n        IBoundingBoxView smallBox;\n\n        if (hitDuplicates || ignoreLeaf) {\n            // use the sibling bounding box to represent counterfactual \"what if point & the\n            // candidate near neighbor\n            // had not been inserted in the tree\"\n\n            shadowBox = shadowBox == null ? node.getSiblingBoundingBox(pointToScore)\n                    : shadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));\n\n            smallBox = shadowBox;\n        } else {\n            smallBox = node.getBoundingBox();\n        }\n\n        IBoundingBoxView largeBox = smallBox.getMergedBox(pointToScore);\n        updateRangesForScoring(smallBox, largeBox);\n\n        double probOfCut = sumOfDifferenceInRange / sumOfNewRange;\n\n        // if leaves were ignored we need to keep accounting for the score\n        if (ignoreLeaf) {\n            savedScore = probOfCut * scoreUnseen(depthOfNode, node.getMass()) + (1 - probOfCut) * savedScore;\n        }\n\n        if (probOfCut <= 0) {\n            pointInsideBox = true;\n        } else {\n            double newScore = scoreUnseen(depthOfNode, node.getMass());\n\n            for (int i = 0; i < pointToScore.length; i++) {\n                double probOfCutInSpikeDirection = differenceInRangeVector[2 * i] / sumOfNewRange;\n                directionalAttribution.high[i] = probOfCutInSpikeDirection * newScore\n                        + (1 - probOfCut) * directionalAttribution.high[i];\n\n                double probOfCutInDipDirection = differenceInRangeVector[2 * i + 1] / sumOfNewRange;\n                directionalAttribution.low[i] = probOfCutInDipDirection * newScore\n                        + (1 - probOfCut) * directionalAttribution.low[i];\n            }\n        }\n\n        boolean capture = (pointInsideBox || depthOfNode == 0);\n        if ((hitDuplicates || ignoreLeaf) && capture) {\n            // final rescaling; this ensures agreement with the ScalarScoreVector\n            // the scoreUnseen/scoreSeen should be the same as scoring; other uses need\n            // caution.\n            directionalAttribution.renormalize(savedScore);\n\n        }\n    }\n\n    @Override\n    public void acceptLeaf(INodeView leafNode, int depthOfNode) {\n\n        updateRangesForScoring(leafNode.getBoundingBox(), leafNode.getBoundingBox().getMergedBox(pointToScore));\n\n        // newrange == 0 corresponds to equality of points and is fater than\n        // Array.equals\n        if (sumOfNewRange <= 0) {\n            hitDuplicates = true;\n        }\n\n        if ((hitDuplicates) && ((!ignoreLeaf) || (leafNode.getMass() > ignoreLeafMassThreshold))) {\n            savedScore = damp(leafNode.getMass(), treeMass) * scoreSeen(depthOfNode, leafNode.getMass());\n        } else {\n            savedScore = scoreUnseen(depthOfNode, leafNode.getMass());\n        }\n\n        if ((hitDuplicates) || ((ignoreLeaf) && (leafNode.getMass() <= ignoreLeafMassThreshold))) {\n            Arrays.fill(directionalAttribution.high, savedScore / (2 * pointToScore.length));\n            Arrays.fill(directionalAttribution.low, savedScore / (2 * pointToScore.length));\n            /* in this case do not have a better option than an equal attribution */\n            Arrays.fill(coordInsideBox, false);\n        } else {\n            for (int i = 0; i < pointToScore.length; i++) {\n                directionalAttribution.high[i] = savedScore * differenceInRangeVector[2 * i] / sumOfNewRange;\n                directionalAttribution.low[i] = savedScore * differenceInRangeVector[2 * i + 1] / sumOfNewRange;\n            }\n        }\n    }\n\n    /**\n     * A scoring function which is applied when the leaf node visited is equal to\n     * the point being scored.\n     * \n     * @param depth The depth of the node being visited\n     * @param mass  The mass of the node being visited\n     * @return an anomaly score contribution for a given node\n     */\n    protected abstract double scoreSeen(int depth, int mass);\n\n    /**\n     * A scoring function which is applied when the leaf node visited is not equal\n     * to the point being scored. This function is also used to compute the\n     * contribution to the anomaly score from non-leaf nodes.\n     * \n     * @param depth The depth of the node being visited.\n     * @param mass  The mass of the node being visited.\n     * @return an anomaly score contribution for a given node.\n     */\n    protected abstract double scoreUnseen(int depth, int mass);\n\n    /**\n     * This function produces a scaling factor which can be used to reduce the\n     * influence of leaf nodes with mass greater than 1.\n     * \n     * @param leafMass The mass of the leaf node visited\n     * @param treeMass The mass of the tree being visited\n     * @return a scaling factor to apply to the result from\n     *         {@link #scoreSeen(int, int)}.\n     */\n    protected abstract double damp(int leafMass, int treeMass);\n\n    /**\n     * When updating the score for a node, we compare the node's bounding box to the\n     * merged bounding box that would be created by adding the point to be scored.\n     * This method updates local instance variables sumOfDifferenceInRange and\n     * differenceInRange vector to reflect the total difference in side length and\n     * the difference in side length in each dimension, respectively.\n     *\n     * @param smallBox The bounding box corresponding to a Node being visited.\n     * @param largeBox The merged bounding box containing smallBox and the point\n     *                 being scored.\n     */\n    protected void updateRangesForScoring(IBoundingBoxView smallBox, IBoundingBoxView largeBox) {\n        sumOfDifferenceInRange = 0.0;\n        sumOfNewRange = 0.0;\n        Arrays.fill(differenceInRangeVector, 0.0);\n        for (int i = 0; i < pointToScore.length; i++) {\n\n            sumOfNewRange += largeBox.getRange(i);\n\n            // optimization turned off for ignoreLeaf\n            if (coordInsideBox[i] && !ignoreLeaf) {\n\n                continue;\n            }\n\n            double maxGap = Math.max(largeBox.getMaxValue(i) - smallBox.getMaxValue(i), 0.0);\n            double minGap = Math.max(smallBox.getMinValue(i) - largeBox.getMinValue(i), 0.0);\n\n            if (maxGap + minGap > 0.0) {\n                sumOfDifferenceInRange += (minGap + maxGap);\n                differenceInRangeVector[2 * i] = maxGap;\n                differenceInRangeVector[2 * i + 1] = minGap;\n\n            } else {\n                coordInsideBox[i] = true;\n            }\n        }\n    }\n\n    @Override\n    public boolean isConverged() {\n        return pointInsideBox;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AbstractScalarScoreVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.Arrays;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * This abstract visitor encodes a standard method for computing a scalar result\n * value. The basic computation is as follows:\n *\n * <ol>\n * <li>After following the traversal path to a leaf, compute a base score at the\n * leaf node.</li>\n * <li>For each node in the traversal path from the leaf to the root, compute\n * the probability that a random cut would separate the query point from the\n * node. The updated score uses this probability to create a weighted\n * combination between the current score and a score contribution from the\n * current node.</li>\n * </ol>\n * <p>\n * While this basic algorithm produces good results when all the points in the\n * sample are distinct, it can produce unexpected results when a significant\n * portion of the points in the sample are duplicates. Therefore this class\n * supports different optional features for modifying the score produced when\n * the point being scored is equal to the leaf node in the traversal.\n */\npublic abstract class AbstractScalarScoreVisitor implements Visitor<Double> {\n\n    public static final int DEFAULT_IGNORE_LEAF_MASS_THRESHOLD = 0;\n\n    /**\n     * The point whose anomaly score is being computed.\n     */\n    protected final float[] pointToScore;\n\n    /**\n     * The mass of the tree being visited. This value is used to normalize the final\n     * result.\n     */\n    protected final int treeMass;\n\n    /**\n     * This flag is set to 'true' if the point being scored is found to be contained\n     * by a bounding box in the traversal path, allowing us to short-circuit further\n     * computation.\n     */\n    protected boolean pointInsideBox;\n\n    /**\n     * Similar to pointInsideBox, the array coordInsideBox keeps track of whether\n     * each coordinate is contained in the corresponding bounding box projection for\n     * a bounding box in the traversal path. This field is used to skip unnecessary\n     * steps in the probability computation.\n     */\n    protected boolean[] coordInsideBox;\n\n    /**\n     * shadowbox used in attribution and ignoring the leaf to simulate a deletion\n     */\n    protected IBoundingBoxView shadowBox = null;\n\n    /**\n     * The function used to compute the base score in the case where the point being\n     * scored is equal to the leaf point (provided the ignoreLeafEquals and\n     * ignoreLeafMassThreshold variables indicate that we should use this method).\n     *\n     * Function arguments: leaf depth, leaf mass\n     */\n    protected double score;\n\n    /**\n     * If true, then the scoreUnseen method will be used to score a point equal to a\n     * leaf point in {@link #acceptLeaf(INodeView, int)}.\n     */\n    protected boolean ignoreLeafEquals;\n\n    /**\n     * If the point being scored is equal to the leaf point but the leaf mass is\n     * smaller than this value, then the scoreUnseen method will be used to score\n     * the point in {@link #accept(INodeView, int)}.\n     */\n    protected int ignoreLeafMassThreshold;\n\n    /**\n     * Construct a new ScalarScoreVisitor\n     *\n     * @param pointToScore            The point whose anomaly score we are computing\n     * @param treeMass                The total mass of the RandomCutTree that is\n     *                                scoring the point\n     * @param ignoreLeafMassThreshold Is the maximum mass of the leaf which can be\n     *                                ignored\n     */\n    public AbstractScalarScoreVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {\n        this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);\n        this.treeMass = treeMass;\n        pointInsideBox = false;\n        score = 0.0;\n        this.ignoreLeafEquals = (ignoreLeafMassThreshold > DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);\n        this.ignoreLeafMassThreshold = ignoreLeafMassThreshold;\n\n        // will be initialized to an array of false values\n        coordInsideBox = new boolean[pointToScore.length];\n    }\n\n    /**\n     * Construct a new AbstractScalarScoreVisitor using default leaf options.\n     *\n     * @param pointToScore The point whose anomaly score we are computing\n     * @param treeMass     The total mass of the RandomCutTree that is scoring the\n     *                     point\n     */\n    public AbstractScalarScoreVisitor(float[] pointToScore, int treeMass) {\n        this(pointToScore, treeMass, DEFAULT_IGNORE_LEAF_MASS_THRESHOLD);\n    }\n\n    /**\n     * @return The score computed up until this point.\n     */\n    @Override\n    public Double getResult() {\n        return CommonUtils.defaultScalarNormalizerFunction(score, treeMass);\n    }\n\n    /**\n     * Update the anomaly score based on the next step of the tree traversal.\n     *\n     * @param node        The current node in the tree traversal\n     * @param depthOfNode The depth of the current node in the tree\n     */\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n        if (pointInsideBox) {\n            return;\n        }\n        double probabilityOfSeparation;\n        if (!ignoreLeafEquals) {\n            probabilityOfSeparation = node.probailityOfSeparation(pointToScore);\n            if (probabilityOfSeparation <= 0) {\n                pointInsideBox = true;\n                return;\n            }\n        } else {\n            shadowBox = shadowBox == null ? node.getSiblingBoundingBox(pointToScore)\n                    : shadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));\n            probabilityOfSeparation = (shadowBox.getRangeSum() <= 0) ? 1.0 : getProbabilityOfSeparation(shadowBox);\n        }\n\n        score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass())\n                + (1 - probabilityOfSeparation) * score;\n    }\n\n    /**\n     * Update the anomaly score with the given leaf node.\n     *\n     * @param leafNode    The leaf node that was reached by traversing the tree\n     * @param depthOfNode The depth of the leaf node\n     */\n    @Override\n    public void acceptLeaf(INodeView leafNode, int depthOfNode) {\n        if (Arrays.equals(leafNode.getLeafPoint(), pointToScore)\n                && (!ignoreLeafEquals || (leafNode.getMass() > ignoreLeafMassThreshold))) {\n            pointInsideBox = true;\n            score = damp(leafNode.getMass(), treeMass) * scoreSeen(depthOfNode, leafNode.getMass());\n\n        } else {\n            score = scoreUnseen(depthOfNode, leafNode.getMass());\n        }\n    }\n\n    /**\n     * A scoring function which is applied when the leaf node visited is equal to\n     * the point being scored.\n     * \n     * @param depth The depth of the node being visited\n     * @param mass  The mass of the node being visited\n     * @return an anomaly score contribution for a given node\n     */\n    protected abstract double scoreSeen(int depth, int mass);\n\n    /**\n     * A scoring function which is applied when the leaf node visited is not equal\n     * to the point being scored. This function is also used to compute the\n     * contribution to the anomaly score from non-leaf nodes.\n     * \n     * @param depth The depth of the node being visited.\n     * @param mass  The mass of the node being visited.\n     * @return an anomaly score contribution for a given node.\n     */\n    protected abstract double scoreUnseen(int depth, int mass);\n\n    /**\n     * This function produces a scaling factor which can be used to reduce the\n     * influence of leaf nodes with mass greater than 1.\n     * \n     * @param leafMass The mass of the leaf node visited\n     * @param treeMass The mass of the tree being visited\n     * @return a scaling factor to apply to the result from\n     *         {@link #scoreSeen(int, int)}.\n     */\n    protected abstract double damp(int leafMass, int treeMass);\n\n    /**\n     * Compute the probability that a random cut would separate the point from the\n     * rest of the bounding box. This method is intended to compute the probability\n     * for a non-leaf Node, and will throw an exception if a leaf-node bounding box\n     * is detected.\n     *\n     * @param boundingBox The bounding box that we are computing the probability of\n     *                    separation from.\n     * @return is the probability\n     */\n    protected double getProbabilityOfSeparation(final IBoundingBoxView boundingBox) {\n        double sumOfNewRange = 0d;\n        double sumOfDifferenceInRange = 0d;\n\n        for (int i = 0; i < pointToScore.length; ++i) {\n            double maxVal = boundingBox.getMaxValue(i);\n            double minVal = boundingBox.getMinValue(i);\n            double oldRange = maxVal - minVal;\n\n            if (!coordInsideBox[i]) {\n                if (maxVal < pointToScore[i]) {\n                    maxVal = pointToScore[i];\n                } else if (minVal > pointToScore[i]) {\n                    minVal = pointToScore[i];\n                } else if (!ignoreLeafEquals) {\n                    // optimization turned on for ignoreLeafEquals==false\n                    sumOfNewRange += oldRange;\n                    coordInsideBox[i] = true;\n                    continue;\n                }\n\n                double newRange = maxVal - minVal;\n                sumOfNewRange += newRange;\n                sumOfDifferenceInRange += (newRange - oldRange);\n            } else {\n                sumOfNewRange += oldRange;\n            }\n        }\n\n        if (sumOfNewRange <= 0) {\n            // Sum of range across dimensions should only be 0 at leaf nodes as non-leaf\n            // nodes always contain\n            // more than one distinct point\n            throw new IllegalStateException(\"Sum of new range of merged box in scoring function is smaller than 0 \"\n                    + \"for a non-leaf node. The sum of range of new bounding box is: \" + sumOfNewRange);\n        }\n\n        return sumOfDifferenceInRange / sumOfNewRange;\n    }\n\n    public boolean isConverged() {\n        return pointInsideBox;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AnomalyAttributionVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport com.amazon.randomcutforest.CommonUtils;\n\n/**\n * Attribution exposes the attribution of scores produced by ScalarScoreVisitor\n * corresponding to different attributes. It allows a boolean\n * ignoreClosestCandidate; which when true will compute the attribution as it\n * that near neighbor was not present in RCF. This is turned on by default for\n * duplicate points seen by the forest, so that the attribution does not change\n * is a sequence of duplicate points are seen. For non-duplicate points, if the\n * boolean turned on, reduces effects of masking (when anomalous points are\n * included in the forest (which will be true with a few samples or when the\n * samples are not refreshed appropriately). It is worth remembering that\n * disallowing anomalous points from being included in the forest forest\n * explicitly will render the algorithm incapable of adjusting to a new normal\n * -- which is a strength of this algorithm.\n **/\npublic class AnomalyAttributionVisitor extends AbstractAttributionVisitor {\n\n    public AnomalyAttributionVisitor(float[] pointToScore, int treeMass, int ignoreThreshold) {\n        super(pointToScore, treeMass, ignoreThreshold);\n    }\n\n    public AnomalyAttributionVisitor(float[] pointToScore, int treeMass) {\n        super(pointToScore, treeMass);\n    }\n\n    @Override\n    protected double scoreSeen(int depth, int mass) {\n        return CommonUtils.defaultScoreSeenFunction(depth, mass);\n    }\n\n    @Override\n    protected double scoreUnseen(int depth, int mass) {\n        return CommonUtils.defaultScoreUnseenFunction(depth, mass);\n    }\n\n    @Override\n    protected double damp(int leafMass, int treeMass) {\n        return CommonUtils.defaultDampFunction(leafMass, treeMass);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/AnomalyScoreVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport com.amazon.randomcutforest.CommonUtils;\n\n/**\n * This visitor computes a scalar anomaly score for a specified point. The basic\n * score computation is defined by {@link AbstractScalarScoreVisitor}, and this\n * class overrides the scoring functions so that input points that are more\n * likely to separated from in-sample points by a random cut receive a higher\n * anomaly score.\n *\n * While this basic algorithm produces good results when all the points in the\n * sample are distinct, it can produce unexpected results when a significant\n * portion of the points in the sample are duplicates. Therefore this class\n * supports different optional features for modifying the score produced when\n * the point being scored is equal to the leaf node in the traversal.\n */\npublic class AnomalyScoreVisitor extends AbstractScalarScoreVisitor {\n\n    /**\n     * Construct a new ScalarScoreVisitor\n     *\n     * @param pointToScore The point whose anomaly score we are computing\n     * @param treeMass     The total mass of the RandomCutTree that is scoring the\n     *                     point\n     */\n    public AnomalyScoreVisitor(float[] pointToScore, int treeMass) {\n        super(pointToScore, treeMass);\n    }\n\n    /**\n     * Construct a new ScalarScoreVisitor\n     *\n     * @param pointToScore            The point whose anomaly score we are computing\n     * @param treeMass                The total mass of the RandomCutTree that is\n     *                                scoring the point\n     * @param ignoreLeafMassThreshold Is the maximum mass of the leaf which can be\n     *                                ignored\n     */\n    public AnomalyScoreVisitor(float[] pointToScore, int treeMass, int ignoreLeafMassThreshold) {\n        super(pointToScore, treeMass, ignoreLeafMassThreshold);\n    }\n\n    @Override\n    protected double scoreSeen(int depth, int mass) {\n        return CommonUtils.defaultScoreSeenFunction(depth, mass);\n    }\n\n    @Override\n    protected double scoreUnseen(int depth, int mass) {\n        return CommonUtils.defaultScoreUnseenFunction(depth, mass);\n    }\n\n    @Override\n    protected double damp(int leafMass, int treeMass) {\n        return CommonUtils.defaultDampFunction(leafMass, treeMass);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/DynamicAttributionVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.returntypes.DiVector;\n\npublic class DynamicAttributionVisitor extends AbstractAttributionVisitor {\n\n    /**\n     * The function used to compute the base score in the case where the point being\n     * scored is equal to the leaf point (provided the ignoreLeafEquals and\n     * ignoreLeafMassThreshold variables indicate that we should use this method).\n     * <p>\n     * Function arguments: leaf depth, leaf mass\n     */\n    private final BiFunction<Double, Double, Double> scoreSeen;\n\n    /**\n     * A damping function used to dilute the impact of a point with a large number\n     * of duplicates on the base score.\n     * <p>\n     * Function arguments: leaf mass, tree mass\n     */\n    private final BiFunction<Double, Double, Double> damp;\n\n    /**\n     * The scoring function to use when the point being scored is not equal to the\n     * leaf point, or when the points are equal but the ignoreLeafEquals or\n     * ignoreLeafMassThreshold variable indicates that we should use the scoreUnseen\n     * method.\n     * <p>\n     * Function arguments: leaf depth, leaf mass\n     */\n    private final BiFunction<Double, Double, Double> scoreUnseen;\n\n    /**\n     *\n     * @param point                   to be scored\n     * @param treeMass                mass of the tree\n     * @param ignoreLeafMassThreshold threshold of mass for leaves to be ignored\n     * @param scoreSeen               part of the score when point has been seen\n     * @param scoreUnseen             part of the score for unseen point\n     * @param damp                    dampening function for seen points\n     */\n    public DynamicAttributionVisitor(float[] point, int treeMass, int ignoreLeafMassThreshold,\n            BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,\n            BiFunction<Double, Double, Double> damp) {\n        super(point, treeMass, ignoreLeafMassThreshold);\n        this.scoreSeen = scoreSeen;\n        this.scoreUnseen = scoreUnseen;\n        this.damp = damp;\n    }\n\n    @Override\n    protected double scoreSeen(int depth, int leafMass) {\n        return scoreSeen.apply((double) depth, (double) leafMass);\n    }\n\n    @Override\n    protected double scoreUnseen(int depth, int leafMass) {\n        return scoreUnseen.apply((double) depth, (double) leafMass);\n    }\n\n    @Override\n    protected double damp(int leafMass, int treeMass) {\n        return damp.apply((double) leafMass, (double) treeMass);\n    }\n\n    // turning off normalization\n\n    @Override\n    public DiVector getResult() {\n        return new DiVector(directionalAttribution);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/DynamicScoreVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.function.BiFunction;\n\npublic class DynamicScoreVisitor extends AbstractScalarScoreVisitor {\n\n    /**\n     * The function used to compute the base score in the case where the point being\n     * scored is equal to the leaf point (provided the ignoreLeafEquals and\n     * ignoreLeafMassThreshold variables indicate that we should use this method).\n     * <p>\n     * Function arguments: leaf depth, leaf mass\n     */\n    protected final BiFunction<Double, Double, Double> scoreSeen;\n\n    /**\n     * A damping function used to dilute the impact of a point with a large number\n     * of duplicates on the base score.\n     * <p>\n     * Function arguments: leaf mass, tree mass\n     */\n    protected final BiFunction<Double, Double, Double> damp;\n\n    /**\n     * The scoring function to use when the point being scored is not equal to the\n     * leaf point, or when the points are equal but the ignoreLeafEquals or\n     * ignoreLeafMassThreshold variable indicates that we should use the scoreUnseen\n     * method.\n     * <p>\n     * Function arguments: leaf depth, leaf mass\n     */\n    protected final BiFunction<Double, Double, Double> scoreUnseen;\n\n    /**\n     * Constructor\n     * \n     * @param point                   being scored\n     * @param treeMass                mass of the tree\n     * @param ignoreLeafMassThreshold the threshold for ignoring leaf nodes\n     * @param scoreSeen               the part of score function for previously seen\n     *                                values\n     * @param scoreUnseen             part of the score for unseen values\n     * @param damp                    dampening function for seen points\n     */\n    public DynamicScoreVisitor(float[] point, int treeMass, int ignoreLeafMassThreshold,\n            BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,\n            BiFunction<Double, Double, Double> damp) {\n        super(point, treeMass, ignoreLeafMassThreshold);\n        this.scoreSeen = scoreSeen;\n        this.scoreUnseen = scoreUnseen;\n        this.damp = damp;\n    }\n\n    @Override\n    protected double scoreSeen(int depth, int leafMass) {\n        return scoreSeen.apply((double) depth, (double) leafMass);\n    }\n\n    @Override\n    protected double scoreUnseen(int depth, int leafMass) {\n        return scoreUnseen.apply((double) depth, (double) leafMass);\n    }\n\n    @Override\n    protected double damp(int leafMass, int treeMass) {\n        return damp.apply((double) leafMass, (double) treeMass);\n    }\n\n    /**\n     * normalization is turned off for dynamic scoring because the function ranges\n     * are unknown\n     */\n\n    @Override\n    public Double getResult() {\n        return score;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/SimulatedTransductiveScalarScoreVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\n\npublic class SimulatedTransductiveScalarScoreVisitor extends TransductiveScalarScoreVisitor {\n\n    private final Function<IBoundingBoxView, double[]> vecSepBuild;\n\n    /**\n     * Construct a new SimulatedTransductiveScalarScoreVisitor\n     *\n     * @param pointToScore The point whose anomaly score we are computing\n     * @param treeMass     The total mass of the RandomCutTree that is scoring the\n     *                     point\n     * @param scoreSeen    is the part of the score function when the point has been\n     *                     seen\n     * @param scoreUnseen  is the part of the score when the point has not been seen\n     * @param damp         corresponds to the dampening of the effect of the seen\n     *                     points\n     * @param vecSepBuild  A function that provides the probabilities of choosing\n     *                     different dimensions given a BoundingBox when the tree\n     *                     was built.\n     * @param vecSepScore  A function that corresponds to importance of dimensions\n     *                     during scoring\n     */\n    public SimulatedTransductiveScalarScoreVisitor(float[] pointToScore, int treeMass,\n            BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,\n            BiFunction<Double, Double, Double> damp, Function<IBoundingBoxView, double[]> vecSepBuild,\n            Function<IBoundingBoxView, double[]> vecSepScore) {\n        super(pointToScore, treeMass, scoreSeen, scoreUnseen, damp, vecSepScore);\n        this.vecSepBuild = vecSepBuild;\n    }\n\n    /**\n     * Update the anomaly score based on the next step of the tree traversal.\n     *\n     * @param node        The current node in the tree traversal\n     * @param depthOfNode The depth of the current node in the tree\n     */\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n        double weight = getWeight(node.getCutDimension(), vecSepBuild, node.getBoundingBox());\n\n        if (pointInsideBox) {\n            score *= weight;\n            return;\n        }\n\n        double probabilityOfSeparation = getProbabilityOfSeparation(node.getBoundingBox());\n        if (probabilityOfSeparation == 0) {\n            pointInsideBox = true;\n        }\n\n        score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass()) + weight * score;\n\n    }\n\n    // The above function differs from TransductiveScalarScoreVisitor only in the\n    // weight\n    // computation and when the weight function is used.\n\n    @Override\n    public boolean isConverged() {\n        return false;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/anomalydetection/TransductiveScalarScoreVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\n\npublic class TransductiveScalarScoreVisitor extends DynamicScoreVisitor {\n\n    /*\n     * the goal of this visitor is to allow tranductive inference; where during\n     * scoring we make adjustments so that it appears (to the best of simulation\n     * ability) that the tree was built using the knowledge of the point being\n     * scored\n     * \n     */\n    protected final Function<IBoundingBoxView, double[]> vecSepScore;\n\n    /**\n     * Construct a new SimulatedTransductiveScalarScoreVisitor\n     *\n     * @param pointToScore The point whose anomaly score we are computing\n     * @param treeMass     The total mass of the RandomCutTree that is scoring the\n     *                     point\n     * @param scoreSeen    is the part of the score function when the point has been\n     *                     seen\n     * @param scoreUnseen  is the part of the score when the point has not been seen\n     * @param damp         corresponds to the dampening of the effect of the seen\n     *                     points\n     * @param vecSep       A function that provides the probabilities of choosing\n     *                     different dimensions given a BoundingBox when the tree\n     *                     was built. This must be the same as the probabilies of\n     *                     Transductive inference during scoring. For extenstions\n     *                     where these are different, see\n     *                     SimulatedTransductiveScalarScoreVisitor\n     *\n     *                     Note that scores are not normalized because the function\n     *                     ranges are unknown as is the case with\n     *                     DynamicScoreVisitor\n     */\n\n    public TransductiveScalarScoreVisitor(float[] pointToScore, int treeMass,\n            BiFunction<Double, Double, Double> scoreSeen, BiFunction<Double, Double, Double> scoreUnseen,\n            BiFunction<Double, Double, Double> damp, Function<IBoundingBoxView, double[]> vecSep) {\n        super(pointToScore, treeMass, 0, scoreSeen, scoreUnseen, damp);\n        this.vecSepScore = vecSep;\n        // build function is the same as scoring function\n    }\n\n    /**\n     * Update the anomaly score based on the next step of the tree traversal.\n     *\n     * @param node        The current node in the tree traversal\n     * @param depthOfNode The depth of the current node in the tree\n     */\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n        if (pointInsideBox) {\n            return;\n        }\n        // note that score was unchanged before the return\n        // this is only reasonable if the scoring was done using the same\n        // probability function used to build the trees.\n\n        double probabilityOfSeparation = getProbabilityOfSeparation(node.getBoundingBox());\n        double weight = getWeight(node.getCutDimension(), vecSepScore, node.getBoundingBox());\n        if (probabilityOfSeparation == 0) {\n            pointInsideBox = true;\n            return;\n        }\n\n        score = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass()) + weight * score;\n\n    }\n\n    /**\n     * Compute the probability that a random cut would separate the point from the\n     * rest of the bounding box. This method is intended to compute the probability\n     * for a non-leaf Node, and will throw an exception if a leaf-node bounding box\n     * is detected.\n     *\n     * @param boundingBox The bounding box that we are computing the probability of\n     *                    separation from.\n     * @return is the probability\n     */\n    @Override\n    protected double getProbabilityOfSeparation(final IBoundingBoxView boundingBox) {\n        double sumOfDenominator = 0d;\n        double sumOfNumerator = 0d;\n\n        double[] vec = vecSepScore.apply(boundingBox.getMergedBox(pointToScore));\n\n        for (int i = 0; i < pointToScore.length; ++i) {\n            double maxVal = boundingBox.getMaxValue(i);\n            double minVal = boundingBox.getMinValue(i);\n            double oldRange = maxVal - minVal;\n            sumOfDenominator += vec[i];\n            if (!coordInsideBox[i]) {\n                if (maxVal < pointToScore[i]) {\n                    maxVal = pointToScore[i];\n                } else if (minVal > pointToScore[i]) {\n                    minVal = pointToScore[i];\n                }\n\n                double newRange = maxVal - minVal;\n                if (newRange > oldRange) {\n                    sumOfNumerator += vec[i] * (newRange - oldRange) / newRange;\n                } else\n                    coordInsideBox[i] = true;\n            }\n        }\n\n        if (sumOfDenominator <= 0) {\n            // Sum of range across dimensions should only be 0 at leaf nodes as non-leaf\n            // nodes always contain\n            // more than one distinct point\n            throw new IllegalStateException(\"Incorrect State\");\n        }\n        return sumOfNumerator / sumOfDenominator;\n        // for RCFs vec[i] = newRange (for dimension i) and therefore the\n        // sumOfNumerator is the sum of the difference (after and before\n        // merging the point to the box) of ranges\n        // sum of denominator is the sum the ranges in each dimension\n    }\n\n    // for this visitor class the assumption is that the trees are built using the\n    // same probabilities as are used in scoring. In the application herein\n    // vecSepBuild\n    // is the same as vecSepScore as in the accept(node) above; however the function\n    // is\n    // written in the more general form so that it can be used for the Simulated\n    // version as well without any changes.\n\n    protected double getWeight(int dim, Function<IBoundingBoxView, double[]> vecSepBuild,\n            final IBoundingBoxView boundingBox) {\n\n        double[] vecSmall = vecSepBuild.apply(boundingBox);\n        // the smaller box was built!\n        IBoundingBoxView largeBox = boundingBox.getMergedBox(pointToScore);\n        double[] vecLarge = vecSepScore.apply(largeBox);\n        // the larger box is only scored!\n        double sumSmall = 0;\n        double sumLarge = 0;\n        for (int i = 0; i < pointToScore.length; i++) {\n            sumSmall += vecSmall[i];\n            sumLarge += vecLarge[i];\n        }\n\n        return (boundingBox.getRange(dim) / largeBox.getRange(dim)) * (sumSmall / sumLarge)\n                * (vecLarge[dim] / vecSmall[dim]);\n        // this can be larger than 1\n        // For RCFs vecLarge[dim] = largeBox.getRange(dim) and\n        // vecSmall[dim] = smallBox.getRange(dim)\n        // sumSmall/sumLarge is the probability of non-separation\n\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/Config.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\npublic class Config {\n    public static final String BOUNDING_BOX_CACHE_FRACTION = \"bounding_box_cache_fraction\";\n    public static final String TIME_DECAY = \"time_decay\";\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/ForestMode.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\n/**\n * Options for using RCF, specially with thresholds\n */\npublic enum ForestMode {\n\n    /**\n     * a standard mode that uses shingling and most known applications; it uses the\n     * last K data points where K=1 would correspond to non time series (population)\n     * analysis\n     */\n    STANDARD,\n    /**\n     * time stamp is added automatically to data to correlate within RCF itself;\n     * this is useful for event streaams and for modeling sparse events. Option is\n     * provided to normalize the time gaps.\n     */\n    TIME_AUGMENTED,\n    /**\n     * uses various Fill-In strageies for data with gaps but not really sparse. Must\n     * have shingleSize greater than 1, typically larger shingle size is better, and\n     * so is fewer input dimensions\n     */\n    STREAMING_IMPUTE;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/IDynamicConfig.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\n/**\n * This interface is used by model classes to configure model parameters by\n * name. This is intended primarily for settings that a user may want to change\n * at runtime.\n */\npublic interface IDynamicConfig {\n\n    <T> void setConfig(String name, T value, Class<T> clazz);\n\n    default void setConfig(String name, short value) {\n        setConfig(name, value, Short.class);\n    }\n\n    default void setConfig(String name, int value) {\n        setConfig(name, value, Integer.class);\n    }\n\n    default void setConfig(String name, long value) {\n        setConfig(name, value, Long.class);\n    }\n\n    default void setConfig(String name, float value) {\n        setConfig(name, value, Float.class);\n    }\n\n    default void setConfig(String name, double value) {\n        setConfig(name, value, Double.class);\n    }\n\n    default void setConfig(String name, boolean value) {\n        setConfig(name, value, Boolean.class);\n    }\n\n    <T> T getConfig(String name, Class<T> clazz);\n\n    default Object getConfig(String name) {\n        return getConfig(name, Object.class);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/ImputationMethod.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\n/**\n * Options for filling in missing values\n */\npublic enum ImputationMethod {\n\n    /**\n     * use all 0's\n     */\n    ZERO,\n    /**\n     * use a fixed set of specified values (same as input dimension)\n     */\n    FIXED_VALUES,\n    /**\n     * last known value in each input dimension\n     */\n    PREVIOUS,\n    /**\n     * next seen value in each input dimension\n     */\n    NEXT,\n    /**\n     * linear interpolation\n     */\n    LINEAR,\n    /**\n     * use the RCF imputation; but would often require a minimum number of\n     * observations and would use defaults (often LINEAR) till that point\n     */\n    RCF;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/Precision.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\n/**\n * Options for floating-point precision.\n */\npublic enum Precision {\n    /**\n     * Single-precision (32 bit) floating point numbers.\n     */\n    FLOAT_32,\n    /**\n     * Double-precision (64 bit) floating point numbers.\n     */\n    FLOAT_64;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/config/TransformMethod.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.config;\n\n/**\n * Options for internally transforming data in RCF These are built for\n * convenience. Domain knowledge before feeding data into RCF(any tool) will\n * often have the best benefit! These apply to the basic data and not\n * timestamps, time is (hopefully) always moving forward and is measured shifted\n * (from a running mean), with an option of normalization.\n */\npublic enum TransformMethod {\n\n    /**\n     * the best transformation for data!\n     */\n    NONE,\n    /**\n     * standard column normalization using fixed weights\n     */\n    WEIGHTED,\n    /**\n     * subtract a moving average -- the average would be computed using the same\n     * discount factor as the time decay of the RCF samplers.\n     */\n    SUBTRACT_MA,\n    /**\n     * divide by standard deviation, after subtracting MA\n     */\n    NORMALIZE,\n    /**\n     * difference from previous\n     */\n    DIFFERENCE,\n    /**\n     * divide by standard deviation of difference, after differencing (again\n     * subtract MA)\n     */\n    NORMALIZE_DIFFERENCE;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractForestTraversalExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\n\npublic abstract class AbstractForestTraversalExecutor {\n\n    protected final ComponentList<?, ?> components;\n\n    protected AbstractForestTraversalExecutor(ComponentList<?, ?> components) {\n        this.components = components;\n    }\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A visitor is constructed for each tree using the visitor\n     * factory, and then submitted to a tree. The results from all the trees are\n     * combined using the accumulator and then transformed using the finisher before\n     * being returned.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param accumulator    A function that combines the results from individual\n     *                       trees into an aggregate result.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            BinaryOperator<R> accumulator, Function<R, S> finisher);\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A visitor is constructed for each tree using the visitor\n     * factory, and then submitted to each tree. The results from individual trees\n     * are collected using the {@link java.util.stream.Collector} and returned.\n     * Trees are visited in parallel using\n     * {@link java.util.Collection#parallelStream()}.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param collector      A collector used to aggregate individual tree results\n     *                       into a final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            Collector<R, ?, S> collector);\n\n    /**\n     * Visit each of the trees in the forest sequentially and combine the individual\n     * results into an aggregate result. A visitor is constructed for each tree\n     * using the visitor factory, and then submitted to each tree. The results from\n     * all the trees are combined using the {@link ConvergingAccumulator}, and the\n     * method stops visiting trees after convergence is reached. The result is\n     * transformed using the finisher before being returned.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param accumulator    An accumulator that combines the results from\n     *                       individual trees into an aggregate result and checks to\n     *                       see if the result can be returned without further\n     *                       processing.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public abstract <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            ConvergingAccumulator<R> accumulator, Function<R, S> finisher);\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A multi-visitor is constructed for each tree using the\n     * visitor factory, and then submitted to a tree. The results from all the trees\n     * are combined using the accumulator and then transformed using the finisher\n     * before being returned.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a multi-visitor.\n     * @param accumulator    A function that combines the results from individual\n     *                       trees into an aggregate result.\n     * @param finisher       A function called on the aggregate result in order to\n     *                       produce the final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public abstract <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            BinaryOperator<R> accumulator, Function<R, S> finisher);\n\n    /**\n     * Visit each of the trees in the forest and combine the individual results into\n     * an aggregate result. A multi-visitor is constructed for each tree using the\n     * visitor factory, and then submitted to a tree. The results from individual\n     * trees are collected using the {@link java.util.stream.Collector} and\n     * returned. Trees are visited in parallel using\n     * {@link java.util.Collection#parallelStream()}.\n     *\n     * @param point          The point that defines the traversal path.\n     * @param visitorFactory A factory method which is invoked for each tree to\n     *                       construct a visitor.\n     * @param collector      A collector used to aggregate individual tree results\n     *                       into a final result.\n     * @param <R>            The visitor result type. This is the type that will be\n     *                       returned after traversing each individual tree.\n     * @param <S>            The final type, after any final normalization at the\n     *                       forest level.\n     * @return The aggregated and finalized result after sending a visitor through\n     *         each tree in the forest.\n     */\n    public abstract <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            Collector<R, ?, S> collector);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractForestUpdateExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.Collections;\nimport java.util.List;\n\nimport lombok.Getter;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.store.IPointStore;\n\n/**\n * The class transforms input points into the form expected by internal models,\n * and submits transformed points to individual models for updating.\n *\n * @param <PointReference> The point representation used by model data\n *                         structures.\n * @param <Point>          The explicit data type of exchanging points\n */\n@Getter\npublic abstract class AbstractForestUpdateExecutor<PointReference, Point> {\n\n    protected final IStateCoordinator<PointReference, Point> updateCoordinator;\n    protected final ComponentList<PointReference, Point> components;\n    protected boolean currentlySampling = true;\n\n    /**\n     * Create a new AbstractForestUpdateExecutor.\n     * \n     * @param updateCoordinator The update coordinater that will be used to\n     *                          transform points and process deleted points if\n     *                          needed.\n     * @param components        A list of models to update.\n     */\n    protected AbstractForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,\n            ComponentList<PointReference, Point> components) {\n        this.updateCoordinator = updateCoordinator;\n        this.components = components;\n    }\n\n    /**\n     * Update the forest with the given point. The point is submitted to each\n     * sampler in the forest. If the sampler accepts the point, the point is\n     * submitted to the update method in the corresponding Random Cut Tree.\n     *\n     * @param point The point used to update the forest.\n     */\n    public void update(Point point) {\n        update(point, false);\n    }\n\n    public void update(Point point, boolean updateShingleOnly) {\n        long internalSequenceNumber = updateCoordinator.getTotalUpdates();\n        IPointStore<?, ?> store = updateCoordinator.getStore();\n        if (store != null && store.isInternalShinglingEnabled()) {\n            internalSequenceNumber -= store.getShingleSize() - 1;\n        }\n        update(point, internalSequenceNumber, updateShingleOnly);\n    }\n\n    public void update(Point point, long sequenceNumber) {\n        update(point, sequenceNumber, false);\n    }\n\n    public void update(Point point, long sequenceNumber, boolean updateShingleOnly) {\n        PointReference updateInput = updateCoordinator.initUpdate(point, sequenceNumber, updateShingleOnly);\n        boolean propagate = (updateInput != null) && currentlySampling;\n        List<UpdateResult<PointReference>> results = (!propagate) ? Collections.emptyList()\n                : updateInternal(updateInput, sequenceNumber);\n        updateCoordinator.completeUpdate(results, updateInput);\n    }\n\n    /**\n     * Internal update method which submits the given input value to\n     * {@link IUpdatable#update} for each model managed by this executor.\n     *\n     * @param updateInput  Input value that will be submitted to the update method\n     *                     for each tree.\n     * @param currentIndex the timestamp\n     * @return a list of points that were deleted from the model as part of the\n     *         update.\n     */\n    protected abstract List<UpdateResult<PointReference>> updateInternal(PointReference updateInput, long currentIndex);\n\n    public void setCurrentlySampling(boolean value) {\n        currentlySampling = value;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/AbstractUpdateCoordinator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\n/**\n * A partial implementation of the {@link IStateCoordinator} interface that\n * defines a protected instance variable to track total updates and implements\n * the {@link IStateCoordinator#getTotalUpdates()} method. Classes that extend\n * AbstractStateCoordinator are responsible for incrementing the totalUpdates\n * counter after completing an update successfully.\n *\n * @param <PointReference> An internal point representation.\n * @param <Point>          Data type of potential exchanges of data\n */\npublic abstract class AbstractUpdateCoordinator<PointReference, Point>\n        implements IStateCoordinator<PointReference, Point> {\n    @Getter\n    @Setter\n    protected long totalUpdates;\n\n    public AbstractUpdateCoordinator() {\n        totalUpdates = 0;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/IStateCoordinator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.List;\n\nimport com.amazon.randomcutforest.store.IPointStore;\n\n/**\n * An IStateCoordinator is used in conjunction with a family of IUpdatable\n * instances. The coordinator transforms the input point into the form expected\n * by the updatable models, and processes the list of deleted points if needed.\n * An IStateCoordinator can be used to manage shared state.\n *\n * @param <PointReference> An internal point representation.\n * @param <Point>          Explicit point type\n */\npublic interface IStateCoordinator<PointReference, Point> {\n    /**\n     * Transform the input point into a value that can be submitted to IUpdatable\n     * instances.\n     *\n     * @param point             The input point.\n     * @param sequenceNumber    the sequence number associated with the point\n     * @param updateShingleOnly Only update the shingles (Provide a null reference)\n     *                          or, also update the point store (provide a usable\n     *                          reference)\n     *\n     * @return The point transformed into the representation expected by an\n     *         IUpdatable instance.\n     */\n    PointReference initUpdate(Point point, long sequenceNumber, boolean updateShingleOnly);\n\n    /**\n     * Complete the update. This method is called by IStateCoordinator after all\n     * IUpdatable instances have completed their individual updates. This method\n     * receives the list of points that were deleted IUpdatable instances for\n     * further processing if needed.\n     *\n     * @param updateResults A list of points that were deleted.\n     * @param updateInput   The corresponding output from {@link #initUpdate}, which\n     *                      was passed into the update method for each component\n     */\n    void completeUpdate(List<UpdateResult<PointReference>> updateResults, PointReference updateInput);\n\n    long getTotalUpdates();\n\n    void setTotalUpdates(long totalUpdates);\n\n    IPointStore<PointReference, Point> getStore();\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/ITraversable.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.MultiVisitor;\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.tree.ITree;\n\n/**\n * This interface defines a model that can be traversed by a {@link Visitor}.\n */\npublic interface ITraversable {\n    /**\n     * Traverse the path defined by {@code point} and invoke the visitor. The path\n     * defined by {@code point} is the path from the root node to the leaf node\n     * where {@code point} would be inserted. The visitor is invoked for each node\n     * in the path in reverse order (starting from the leaf node and ending at the\n     * root node). The return value is obtained by calling\n     * {@link Visitor#getResult()} on the visitor after it has visited each node in\n     * the path.\n     * \n     * @param point          A point that determines the traversal path.\n     * @param visitorFactory A factory function that can be applied to an\n     *                       {@link ITree} instance to obtain a {@link Visitor}\n     *                       instance.\n     * @param <R>            The return value type of the visitor.\n     * @return the value of {@link Visitor#getResult()} after visiting each node in\n     *         the path.\n     */\n    <R> R traverse(float[] point, IVisitorFactory<R> visitorFactory);\n\n    /**\n     * Traverse the paths defined by {@code point} and the multi-visitor, and invoke\n     * the multi-visitor on each node. The path defined by {@code point} is the path\n     * from the root node to the leaf node where {@code point} would be inserted.\n     * However, at each node along the path we invoke {@link MultiVisitor#trigger},\n     * and if it returns true we create a copy of the visitor and send it down both\n     * branches of the tree. The multi-visitor is invoked for each node in the path\n     * in reverse order (starting from the leaf node and ending at the root node).\n     * When two multi-visitors meet at a node, they are combined by calling\n     * {@link MultiVisitor#combine}. The return value is obtained by calling\n     * {@link MultiVisitor#getResult()} on the single remaining visitor after it has\n     * visited each node in each branch the path.\n     * \n     * @param point          A point that determines the traversal path.\n     * @param visitorFactory A factory function that can be applied to an\n     *                       {@link ITree} instance to obtain a {@link MultiVisitor}\n     *                       instance.\n     * @param <R>            The return value type of the multi-visitor.\n     * @return the value of {@link MultiVisitor#getResult()} after traversing all\n     *         paths.\n     */\n    <R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory);\n\n    /**\n     * After a new traversable model is initialized, it will not be able to return\n     * meaningful results to queries until it has been updated with (i.e., learned\n     * from) some number of points. The exact number of points may vary for\n     * different models. After this method returns true for the first time, it\n     * should continue to return true unless the user takes an explicit action to\n     * reset the model state.\n     *\n     * @return true if this model is ready to provide a meaningful response to a\n     *         traversal query, otherwise false.\n     */\n    boolean isOutputReady();\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/IUpdatable.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\npublic interface IUpdatable<PointReference> {\n    /**\n     * result of an update on a sampler plus tree\n     * \n     * @param point  to be considered for updating the sampler plus tree\n     * @param seqNum timestamp\n     * @return the (inserted,deleted) pair of handles in the tree for eventual\n     *         bookkeeping\n     */\n    UpdateResult<PointReference> update(PointReference point, long seqNum);\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/ParallelForestTraversalExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.List;\nimport java.util.concurrent.Callable;\nimport java.util.concurrent.ForkJoinPool;\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\nimport java.util.stream.Collectors;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\n\n/**\n * An implementation of forest traversal methods that uses a private thread pool\n * to visit trees in parallel.\n */\npublic class ParallelForestTraversalExecutor extends AbstractForestTraversalExecutor {\n\n    ForkJoinPool forkJoinPool;\n    private final int threadPoolSize;\n\n    public ParallelForestTraversalExecutor(ComponentList<?, ?> treeExecutors, int threadPoolSize) {\n        super(treeExecutors);\n        this.threadPoolSize = threadPoolSize;\n        forkJoinPool = new ForkJoinPool(threadPoolSize);\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,\n            Function<R, S> finisher) {\n\n        return submitAndJoin(() -> components.parallelStream().map(c -> c.traverse(point, visitorFactory))\n                .reduce(accumulator).map(finisher))\n                        .orElseThrow(() -> new IllegalStateException(\"accumulator returned an empty result\"));\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {\n\n        return submitAndJoin(\n                () -> components.parallelStream().map(c -> c.traverse(point, visitorFactory)).collect(collector));\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {\n\n        for (int i = 0; i < components.size(); i += threadPoolSize) {\n            final int start = i;\n            final int end = Math.min(start + threadPoolSize, components.size());\n\n            List<R> results = submitAndJoin(() -> components.subList(start, end).parallelStream()\n                    .map(c -> c.traverse(point, visitorFactory)).collect(Collectors.toList()));\n            results.forEach(accumulator::accept);\n\n            if (accumulator.isConverged()) {\n                break;\n            }\n        }\n\n        return finisher.apply(accumulator.getAccumulatedValue());\n    }\n\n    @Override\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            BinaryOperator<R> accumulator, Function<R, S> finisher) {\n\n        return submitAndJoin(() -> components.parallelStream().map(c -> c.traverseMulti(point, visitorFactory))\n                .reduce(accumulator).map(finisher))\n                        .orElseThrow(() -> new IllegalStateException(\"accumulator returned an empty result\"));\n    }\n\n    @Override\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            Collector<R, ?, S> collector) {\n\n        return submitAndJoin(\n                () -> components.parallelStream().map(c -> c.traverseMulti(point, visitorFactory)).collect(collector));\n    }\n\n    <T> T submitAndJoin(Callable<T> callable) {\n        if (forkJoinPool == null) {\n            forkJoinPool = new ForkJoinPool(threadPoolSize);\n        }\n        return forkJoinPool.submit(callable).join();\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/ParallelForestUpdateExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.List;\nimport java.util.concurrent.Callable;\nimport java.util.concurrent.ForkJoinPool;\nimport java.util.stream.Collectors;\n\nimport com.amazon.randomcutforest.ComponentList;\n\n/**\n * An implementation of forest traversal methods that uses a private thread pool\n * to visit trees in parallel.\n * \n * @param <PointReference> references to a point\n * @param <Point>          explicit data type of a point\n */\npublic class ParallelForestUpdateExecutor<PointReference, Point>\n        extends AbstractForestUpdateExecutor<PointReference, Point> {\n\n    ForkJoinPool forkJoinPool;\n    private final int threadPoolSize;\n\n    public ParallelForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,\n            ComponentList<PointReference, Point> components, int threadPoolSize) {\n        super(updateCoordinator, components);\n        this.threadPoolSize = threadPoolSize;\n        forkJoinPool = new ForkJoinPool(threadPoolSize);\n    }\n\n    @Override\n    protected List<UpdateResult<PointReference>> updateInternal(PointReference point, long seqNum) {\n        return submitAndJoin(() -> components.parallelStream().map(t -> t.update(point, seqNum))\n                .filter(UpdateResult::isStateChange).collect(Collectors.toList()));\n    }\n\n    <T> T submitAndJoin(Callable<T> callable) {\n        if (forkJoinPool == null) {\n            forkJoinPool = new ForkJoinPool(threadPoolSize);\n        }\n        return forkJoinPool.submit(callable).join();\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/PointStoreCoordinator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\n\nimport java.util.List;\n\nimport com.amazon.randomcutforest.store.IPointStore;\nimport com.amazon.randomcutforest.store.PointStore;\n\n/**\n * pointstore coordinator for compact RCF\n * \n * @param <Point> the datatype of the actual point\n */\n\npublic class PointStoreCoordinator<Point> extends AbstractUpdateCoordinator<Integer, Point> {\n\n    private final IPointStore<Integer, Point> store;\n\n    public PointStoreCoordinator(IPointStore<Integer, Point> store) {\n        checkNotNull(store, \"store must not be null\");\n        this.store = store;\n    }\n\n    @Override\n    public Integer initUpdate(Point point, long sequenceNumber, boolean updateShingleOnly) {\n        int index = store.add(point, sequenceNumber, updateShingleOnly);\n        return (index == PointStore.INFEASIBLE_POINTSTORE_INDEX) ? null : index;\n    }\n\n    @Override\n    public void completeUpdate(List<UpdateResult<Integer>> updateResults, Integer updateInput) {\n        if (updateInput != null) { // can be null for initial shingling\n            updateResults.forEach(result -> {\n                result.getAddedPoint().ifPresent(store::incrementRefCount);\n                result.getDeletedPoint().ifPresent(store::decrementRefCount);\n            });\n            store.decrementRefCount(updateInput);\n        }\n        totalUpdates++;\n    }\n\n    public IPointStore<Integer, Point> getStore() {\n        return store;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/SamplerPlusTree.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\n\nimport java.util.Optional;\n\nimport lombok.Getter;\n\nimport com.amazon.randomcutforest.IComponentModel;\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.sampler.ISampled;\nimport com.amazon.randomcutforest.sampler.IStreamSampler;\nimport com.amazon.randomcutforest.tree.ITree;\n\n/**\n * A SamplerPlusTree corresponds to a combination of sampler and tree where the\n * information is passed via P and the tree can seek explicit point information\n * of type Q\n *\n * @param <P> The internal point representation expected by the component models\n *            in this list.\n * @param <Q> The explicit data type of points being passed\n */\n@Getter\npublic class SamplerPlusTree<P, Q> implements IComponentModel<P, Q> {\n\n    private ITree<P, Q> tree;\n    private IStreamSampler<P> sampler;\n\n    /**\n     * Constructor of a pair of sampler + tree. The sampler is the driver's seat\n     * because it aceepts/rejects independently of the tree and the tree has to\n     * remain consistent.\n     *\n     * @param sampler the sampler\n     * @param tree    the corresponding tree\n     */\n    public SamplerPlusTree(IStreamSampler<P> sampler, ITree<P, Q> tree) {\n        checkNotNull(sampler, \"sampler must not be null\");\n        checkNotNull(tree, \"tree must not be null\");\n        this.sampler = sampler;\n        this.tree = tree;\n    }\n\n    /**\n     * This is main function that maintains the coordination between the sampler and\n     * the tree. The sampler proposes acceptance (by setting the weight in\n     * queueEntry) and in that case the evictedPoint is set. That evictedPoint is\n     * removed from the tree and in that case its reference deleteRef of type T is\n     * noted. The point is then added to the tree where the tree may propose a new\n     * reference newRef because the point is already present in the tree. The\n     * sampler entry is modified and added to the sampler. The pair of the newRef\n     * and deleteRef are returned for plausible bookkeeping in update executors.\n     *\n     * @param point         point in consideration for updating the sampler plus\n     *                      tree\n     * @param sequenceIndex a time stamp that is used to generate weight in the\n     *                      timed sampling\n     * @return the pair of (newRef,deleteRef) with potential Optional.empty()\n     */\n\n    @Override\n    public UpdateResult<P> update(P point, long sequenceIndex) {\n        P deleteRef = null;\n        if (sampler.acceptPoint(sequenceIndex)) {\n            Optional<ISampled<P>> deletedPoint = sampler.getEvictedPoint();\n            if (deletedPoint.isPresent()) {\n                ISampled<P> p = deletedPoint.get();\n                deleteRef = p.getValue();\n                tree.deletePoint(deleteRef, p.getSequenceIndex());\n            }\n\n            // the tree may choose to return a reference to an existing point\n            // whose value is equal to `point`\n            P addedPoint = tree.addPoint(point, sequenceIndex);\n            sampler.addPoint(addedPoint);\n            return UpdateResult.<P>builder().addedPoint(addedPoint).deletedPoint(deleteRef).build();\n        }\n        return UpdateResult.noop();\n    }\n\n    @Override\n    public <R> R traverse(float[] point, IVisitorFactory<R> visitorFactory) {\n        return tree.traverse(point, visitorFactory);\n    }\n\n    @Override\n    public <R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory) {\n        return tree.traverseMulti(point, visitorFactory);\n    }\n\n    @Override\n    public <T> void setConfig(String name, T value, Class<T> clazz) {\n        if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {\n            tree.setConfig(name, value, clazz);\n        } else if (Config.TIME_DECAY.equals(name)) {\n            sampler.setConfig(name, value, clazz);\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    @Override\n    public <T> T getConfig(String name, Class<T> clazz) {\n        checkNotNull(clazz, \"clazz must not be null\");\n        if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {\n            return tree.getConfig(name, clazz);\n        } else if (Config.TIME_DECAY.equals(name)) {\n            return sampler.getConfig(name, clazz);\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    @Override\n    public boolean isOutputReady() {\n        return tree.isOutputReady();\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/SequentialForestTraversalExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IComponentModel;\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\n\n/**\n * Traverse the trees in a forest sequentially.\n */\npublic class SequentialForestTraversalExecutor extends AbstractForestTraversalExecutor {\n\n    public SequentialForestTraversalExecutor(ComponentList<?, ?> components) {\n        super(components);\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, BinaryOperator<R> accumulator,\n            Function<R, S> finisher) {\n\n        R unnormalizedResult = components.stream().map(c -> c.traverse(point, visitorFactory)).reduce(accumulator)\n                .orElseThrow(() -> new IllegalStateException(\"accumulator returned an empty result\"));\n\n        return finisher.apply(unnormalizedResult);\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory, Collector<R, ?, S> collector) {\n\n        return components.stream().map(c -> c.traverse(point, visitorFactory)).collect(collector);\n    }\n\n    @Override\n    public <R, S> S traverseForest(float[] point, IVisitorFactory<R> visitorFactory,\n            ConvergingAccumulator<R> accumulator, Function<R, S> finisher) {\n\n        for (IComponentModel<?, ?> component : components) {\n            accumulator.accept(component.traverse(point, visitorFactory));\n            if (accumulator.isConverged()) {\n                break;\n            }\n        }\n\n        return finisher.apply(accumulator.getAccumulatedValue());\n    }\n\n    @Override\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            BinaryOperator<R> accumulator, Function<R, S> finisher) {\n\n        R unnormalizedResult = components.stream().map(c -> c.traverseMulti(point, visitorFactory)).reduce(accumulator)\n                .orElseThrow(() -> new IllegalStateException(\"accumulator returned an empty result\"));\n\n        return finisher.apply(unnormalizedResult);\n    }\n\n    @Override\n    public <R, S> S traverseForestMulti(float[] point, IMultiVisitorFactory<R> visitorFactory,\n            Collector<R, ?, S> collector) {\n\n        return components.stream().map(c -> c.traverseMulti(point, visitorFactory)).collect(collector);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/SequentialForestUpdateExecutor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.List;\nimport java.util.stream.Collectors;\n\nimport com.amazon.randomcutforest.ComponentList;\n\n/**\n * Traverse the trees in a forest sequentially.\n * \n * @param <PointReference> references to a point\n * @param <Point>          explicit data type of a point\n */\npublic class SequentialForestUpdateExecutor<PointReference, Point>\n        extends AbstractForestUpdateExecutor<PointReference, Point> {\n\n    public SequentialForestUpdateExecutor(IStateCoordinator<PointReference, Point> updateCoordinator,\n            ComponentList<PointReference, Point> components) {\n        super(updateCoordinator, components);\n    }\n\n    @Override\n    protected List<UpdateResult<PointReference>> updateInternal(PointReference point, long seqNum) {\n        return components.stream().map(t -> t.update(point, seqNum)).filter(UpdateResult::isStateChange)\n                .collect(Collectors.toList());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/executor/UpdateResult.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport java.util.Optional;\n\nimport lombok.Builder;\n\n/**\n * When {@link IUpdatable#update} is called, an updatable model may choose to\n * update its state with the submitted point. This class contains the result of\n * such an operation. A list of {@code AddPointResults}s is consumed by\n * {@link IStateCoordinator#completeUpdate} to update global state as needed to\n * reflect the updates to individual component models.\n * \n * @param <PointReference> The point reference type.\n */\n@Builder\npublic class UpdateResult<PointReference> {\n\n    private static final UpdateResult<Object> NOOP = builder().build();\n\n    private final PointReference addedPoint;\n\n    private final PointReference deletedPoint;\n\n    /**\n     * Return an {@code UpdateResult} value a no-op (an operation that did not\n     * change the state of the model). For the returned value,\n     * {@code isStateChange()} will be false.\n     * \n     * @param <Q> The point reference type.\n     * @return an {@code UpdateResult} value representing a no-op.\n     */\n    public static <Q> UpdateResult<Q> noop() {\n        return (UpdateResult<Q>) NOOP;\n    }\n\n    /**\n     * An optional containing a reference to the point that was added to the model\n     * as part of the udpate call, or {@code Optional.empty()} if no point was\n     * added.\n     * \n     * @return an optional containing a reference to the point that was added to the\n     *         model as part of the udpate call, or {@code Optional.empty()} if no\n     *         point was added.\n     */\n    public Optional<PointReference> getAddedPoint() {\n        return Optional.ofNullable(addedPoint);\n    }\n\n    /**\n     * Once a model is at capacity, a point may be deleted from the model as part of\n     * an update. If a point is deleted during the update operation, then the\n     * deleted point reference will be present in the result of this method.\n     * \n     * @return a reference to the deleted point reference or\n     *         {@code Optional.empty()} if no point was deleted.\n     */\n    public Optional<PointReference> getDeletedPoint() {\n        return Optional.ofNullable(deletedPoint);\n    }\n\n    /**\n     * Return true if this update result represents a change to the updatable model.\n     * A change means that a point was added to the model, and possibly a point was\n     * deleted from the model.\n     * \n     * @return true if this update result represents a change to the updatabla\n     *         model.\n     */\n    public boolean isStateChange() {\n        return addedPoint != null || deletedPoint != null;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/imputation/ConditionalSampleSummarizer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.imputation;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.min;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class ConditionalSampleSummarizer {\n\n    /**\n     * this limits the number of valueswe would see per dimension; note that it may\n     * be hard to interpret a larger list\n     */\n    public static int MAX_NUMBER_OF_TYPICAL_PER_DIMENSION = 2;\n\n    /**\n     * the maximum size of the typical points array, irrespective of the number of\n     * missing dimensions\n     */\n    public static int MAX_NUMBER_OF_TYPICAL_ELEMENTS = 5;\n\n    /**\n     * the array of missing dimension indices\n     */\n    protected int[] missingDimensions;\n\n    /**\n     * the query point, where we are inferring the missing values indicated by\n     * missingDimensions[0], missingDimensions[1], ... etc.\n     */\n    protected float[] queryPoint;\n\n    /**\n     * a control parameter; =0 corresponds to (near) random samples and =1\n     * correponds to more central (low anomaly score) samples\n     */\n    protected double centrality;\n\n    /**\n     * a boolean that determines if the summarization should use the missing\n     * dimensions or the full dimensions.\n     */\n    protected boolean project = false;\n\n    protected int numberOfReps = 1;\n\n    protected double shrinkage = 0;\n\n    protected int shingleSize = 1;\n\n    public ConditionalSampleSummarizer(int[] missingDimensions, float[] queryPoint, double centrality, boolean project,\n            int numberOfReps, double shrinkage, int shingleSize) {\n        this.missingDimensions = Arrays.copyOf(missingDimensions, missingDimensions.length);\n        this.queryPoint = Arrays.copyOf(queryPoint, queryPoint.length);\n        this.centrality = centrality;\n        this.project = project;\n        this.numberOfReps = numberOfReps;\n        this.shrinkage = shrinkage;\n        this.shingleSize = shingleSize;\n    }\n\n    public SampleSummary summarize(List<ConditionalTreeSample> alist) {\n        checkArgument(alist.size() > 0, \"incorrect call to summarize\");\n        return summarize(alist, true);\n    }\n\n    public SampleSummary summarize(List<ConditionalTreeSample> alist, boolean addTypical) {\n        /**\n         * first we dedup over the points in the pointStore -- it is likely, and\n         * beneficial that different trees acting as different predictors in an ensemble\n         * predict the same point that has been seen before. This would be specially\n         * true if the time decay is large -- then the whole ensemble starts to behave\n         * as a sliding window.\n         *\n         * note that it is possible that two different *points* predict the same missing\n         * value especially when values are repeated in time. however that check of\n         * equality of points would be expensive -- and one mechanism is to use a tree\n         * (much like an RCT) to test for equality. We will try to not perform such a\n         * test.\n         */\n\n        double totalWeight = alist.size();\n        List<ConditionalTreeSample> newList = ConditionalTreeSample.dedup(alist);\n\n        newList.sort((o1, o2) -> Double.compare(o1.distance, o2.distance));\n        int dimensions = queryPoint.length;\n\n        if (!addTypical) {\n            ArrayList<Weighted<float[]>> points = new ArrayList<>();\n            newList.stream().forEach(e -> {\n                if (!project) {\n                    if (shingleSize == 1) {\n                        points.add(new Weighted<>(e.leafPoint, (float) e.weight));\n                    } else {\n                        float[] values = Arrays.copyOfRange(e.leafPoint, dimensions - dimensions / shingleSize,\n                                dimensions);\n                        points.add(new Weighted<>(values, (float) e.weight));\n                    }\n                } else {\n                    float[] values = new float[missingDimensions.length];\n                    for (int i = 0; i < missingDimensions.length; i++) {\n                        values[i] = e.leafPoint[missingDimensions[i]];\n                    }\n                    points.add(new Weighted<>(values, (float) e.weight));\n                }\n            });\n\n            return new SampleSummary(points);\n        }\n\n        /**\n         * for centrality = 0; there will be no filtration for centrality = 1; at least\n         * half the values will be present -- the sum of distance(P33) + distance(P50)\n         * appears to be slightly more reasonable than 2 * distance(P50) the distance 0\n         * elements correspond to exact matches (on the available fields)\n         *\n         * it is an open question is the weight of such points should be higher. But if\n         * one wants true dynamic adaptability then such a choice to increase weights of\n         * exact matches would go against the dynamic sampling based use of RCF.\n         **/\n\n        int num = 0;\n        if (centrality > 0) {\n            double threshold = centrality * newList.get(0).distance + 1e-6;\n            double currentWeight = 0;\n            int alwaysInclude = 0;\n            double remainderWeight = totalWeight;\n            while (newList.get(alwaysInclude).distance == 0) {\n                remainderWeight -= newList.get(alwaysInclude).weight;\n                ++alwaysInclude;\n                if (alwaysInclude == newList.size()) {\n                    break;\n                }\n            }\n            for (int j = 1; j < newList.size(); j++) {\n                if ((currentWeight < remainderWeight / 3\n                        && currentWeight + newList.get(j).weight >= remainderWeight / 3)\n                        || (currentWeight < remainderWeight / 2\n                                && currentWeight + newList.get(j).weight >= remainderWeight / 2)) {\n                    threshold = centrality * newList.get(j).distance;\n                }\n                currentWeight += newList.get(j).weight;\n            }\n            // note that the threshold is currently centrality * (some distance in the list)\n            // thus the sequel uses a convex combination; and setting centrality = 0 removes\n            // the entire filtering based on distances\n            threshold += (1 - centrality) * newList.get(newList.size() - 1).distance;\n            while (num < newList.size() && newList.get(num).distance <= threshold) {\n                ++num;\n            }\n        } else {\n            num = newList.size();\n        }\n\n        ArrayList<Weighted<float[]>> typicalPoints = new ArrayList<>();\n        for (int j = 0; j < num; j++) {\n            ConditionalTreeSample e = newList.get(j);\n            float[] values;\n            if (project) {\n                values = new float[missingDimensions.length];\n                for (int i = 0; i < missingDimensions.length; i++) {\n                    values[i] = e.leafPoint[missingDimensions[i]];\n                }\n            } else {\n                if (shingleSize == 1) {\n                    values = e.leafPoint;\n                } else {\n                    values = Arrays.copyOfRange(e.leafPoint, dimensions - dimensions / shingleSize, dimensions);\n                }\n            }\n            typicalPoints.add(new Weighted<>(values, (float) e.weight));\n        }\n        int maxAllowed = min(queryPoint.length * MAX_NUMBER_OF_TYPICAL_PER_DIMENSION, MAX_NUMBER_OF_TYPICAL_ELEMENTS);\n        maxAllowed = min(maxAllowed, num);\n\n        SampleSummary projectedSummary = Summarizer.summarize(typicalPoints, maxAllowed, num, false,\n                Summarizer::L2distance, 72, false, numberOfReps, shrinkage);\n\n        return new SampleSummary(typicalPoints, projectedSummary);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/imputation/ImputeVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.imputation;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.MultiVisitor;\nimport com.amazon.randomcutforest.anomalydetection.AnomalyScoreVisitor;\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.tree.BoundingBox;\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * A MultiVisitor which imputes missing values in a point. The missing values\n * are first imputed with the corresponding values in the leaf node in the\n * traversal path. Then, when this MultiVisitor is merged with another\n * MultiVisitor, we keep the imputed values with a lower rank, where the rank\n * value is the anomaly score for the imputed point.\n */\npublic class ImputeVisitor implements MultiVisitor<ConditionalTreeSample> {\n\n    // default large values for initialization; consider -ve log( 0 )\n    public static double DEFAULT_INIT_VALUE = Double.MAX_VALUE;\n\n    /**\n     * an array that helps indicate the missing entires in the tree space\n     */\n    protected final boolean[] missing;\n\n    /**\n     * the query point in the tree space, where the missing entries (in tree space)\n     * would be overwritten\n     */\n    protected float[] queryPoint;\n\n    /**\n     * the unnormalized anomaly score of a point, should be interpreted as -ve\n     * log(likelihood)\n     */\n    protected double anomalyRank;\n\n    /**\n     * distance of the point in the forest space, this is not tree specific\n     */\n    protected double distance;\n\n    /**\n     * a parameter that controls central estimation ( = 1.0) and fully random sample\n     * over entire range ( = 0.0 )\n     */\n    protected double centrality;\n\n    protected long randomSeed;\n\n    protected double randomRank;\n\n    protected boolean converged;\n\n    protected int pointIndex;\n\n    protected int[] dimensionsUsed;\n\n    protected BoundingBox box;\n\n    /**\n     * Create a new ImputeVisitor.\n     *\n     * @param liftedPoint          The point with missing values we want to impute\n     * @param queryPoint           The projected point in the tree space\n     * @param liftedMissingIndexes the original missing indices\n     * @param missingIndexes       The indexes of the missing values in the tree\n     *                             space\n     */\n    public ImputeVisitor(float[] liftedPoint, float[] queryPoint, int[] liftedMissingIndexes, int[] missingIndexes,\n            double centrality, long randomSeed) {\n        checkArgument(centrality >= 0, \" cannoit be negative \");\n        checkArgument(centrality <= 1.0, \" cannot be more than 1.0\");\n        this.queryPoint = Arrays.copyOf(queryPoint, queryPoint.length);\n        this.missing = new boolean[queryPoint.length];\n        this.centrality = centrality;\n        this.randomSeed = randomSeed;\n        this.dimensionsUsed = new int[queryPoint.length];\n\n        if (missingIndexes == null) {\n            missingIndexes = new int[0];\n        }\n\n        for (int i = 0; i < missingIndexes.length; i++) {\n            checkArgument(0 <= missingIndexes[i], \"Missing value indexes cannot be negative\");\n            checkArgument(missingIndexes[i] < queryPoint.length,\n                    \"Missing value indexes must be less than query length\");\n            missing[missingIndexes[i]] = true;\n        }\n\n        anomalyRank = DEFAULT_INIT_VALUE;\n        distance = DEFAULT_INIT_VALUE;\n    }\n\n    public ImputeVisitor(float[] queryPoint, int numberOfMissingIndices, int[] missingIndexes) {\n        this(queryPoint, Arrays.copyOf(queryPoint, queryPoint.length),\n                Arrays.copyOf(missingIndexes, Math.min(numberOfMissingIndices, missingIndexes.length)),\n                Arrays.copyOf(missingIndexes, Math.min(numberOfMissingIndices, missingIndexes.length)), 1.0, 0L);\n    }\n\n    /**\n     * A copy constructor which creates a deep but partial copy of the original\n     * ImputeVisitor.\n     *\n     * @param original\n     */\n    ImputeVisitor(ImputeVisitor original) {\n        int length = original.queryPoint.length;\n        this.queryPoint = Arrays.copyOf(original.queryPoint, length);\n        this.missing = Arrays.copyOf(original.missing, length);\n        this.dimensionsUsed = new int[original.dimensionsUsed.length];\n        this.randomSeed = new Random(original.randomSeed).nextLong();\n        this.centrality = original.centrality;\n        anomalyRank = DEFAULT_INIT_VALUE;\n        distance = DEFAULT_INIT_VALUE;\n    }\n\n    /**\n     * Update the rank value using the probability that the imputed query point is\n     * separated from this bounding box in a random cut. This step is conceptually\n     * the same as * {@link AnomalyScoreVisitor#accept}.\n     *\n     * @param node        the node being visited\n     * @param depthOfNode the depth of the node being visited\n     */\n    public void accept(final INodeView node, final int depthOfNode) {\n\n        double probabilityOfSeparation;\n        if (box == null) {\n            box = (BoundingBox) node.getBoundingBox();\n            probabilityOfSeparation = CommonUtils.getProbabilityOfSeparation(box, queryPoint);\n        } else {\n            probabilityOfSeparation = node.probailityOfSeparation(queryPoint);\n        }\n        converged = (probabilityOfSeparation == 0);\n\n        if (probabilityOfSeparation <= 0) {\n            return;\n        }\n\n        anomalyRank = probabilityOfSeparation * scoreUnseen(depthOfNode, node.getMass())\n                + (1 - probabilityOfSeparation) * anomalyRank;\n    }\n\n    /**\n     * Impute the missing values in the query point with the corresponding values in\n     * the leaf point. Set the rank to the score function evaluated at the leaf\n     * node.\n     *\n     * @param leafNode    the leaf node being visited\n     * @param depthOfNode the depth of the leaf node\n     */\n    @Override\n    public void acceptLeaf(final INodeView leafNode, final int depthOfNode) {\n        float[] leafPoint = leafNode.getLeafPoint();\n        pointIndex = leafNode.getLeafPointIndex();\n        double distance = 0;\n        for (int i = 0; i < queryPoint.length; i++) {\n            if (missing[i]) {\n                queryPoint[i] = leafPoint[i];\n            } else {\n                double t = (queryPoint[i] - leafPoint[i]);\n                distance += Math.abs(t);\n            }\n        }\n\n        if (centrality < 1.0) {\n            Random rng = new Random(randomSeed);\n            randomSeed = rng.nextLong();\n            randomRank = rng.nextDouble();\n        }\n\n        this.distance = distance;\n        if (distance <= 0) {\n            converged = true;\n            if (depthOfNode == 0) {\n                anomalyRank = 0;\n            } else {\n                anomalyRank = scoreSeen(depthOfNode, leafNode.getMass());\n            }\n        } else {\n            anomalyRank = scoreUnseen(depthOfNode, leafNode.getMass());\n        }\n    }\n\n    /**\n     * @return the imputed point.\n     */\n    @Override\n    public ConditionalTreeSample getResult() {\n        return new ConditionalTreeSample(pointIndex, box, distance, queryPoint);\n    }\n\n    /**\n     * An ImputeVisitor should split whenever the cut dimension in a node\n     * corresponds to a missing value in the query point.\n     *\n     * @param node A node in the tree traversal\n     * @return true if the cut dimension in the node corresponds to a missing value\n     *         in the query point, false otherwise.\n     */\n    @Override\n    public boolean trigger(final INodeView node) {\n        int index = node.getCutDimension();\n        ++dimensionsUsed[index];\n        return missing[index];\n    }\n\n    protected double getAnomalyRank() {\n        return anomalyRank;\n    }\n\n    protected double getDistance() {\n        return distance;\n    }\n\n    /**\n     * @return a copy of this visitor.\n     */\n    @Override\n    public MultiVisitor<ConditionalTreeSample> newPartialCopy() {\n        return new ImputeVisitor(this);\n    }\n\n    double adjustedRank() {\n        return (1 - centrality) * randomRank + centrality * anomalyRank;\n    }\n\n    protected boolean updateCombine(ImputeVisitor other) {\n        return other.adjustedRank() < adjustedRank();\n    }\n\n    /**\n     * If this visitor as a lower rank than the second visitor, do nothing.\n     * Otherwise, overwrite this visitor's imputed values withe the valuse from the\n     * second visitor.\n     *\n     * @param other A second visitor\n     */\n    @Override\n    public void combine(MultiVisitor<ConditionalTreeSample> other) {\n        ImputeVisitor visitor = (ImputeVisitor) other;\n        if (updateCombine(visitor)) {\n            updateFrom(visitor);\n        }\n    }\n\n    protected void updateFrom(ImputeVisitor visitor) {\n        System.arraycopy(visitor.queryPoint, 0, queryPoint, 0, queryPoint.length);\n        pointIndex = visitor.pointIndex;\n        anomalyRank = visitor.anomalyRank;\n        box = visitor.box;\n        converged = visitor.converged;\n        distance = visitor.distance;\n    }\n\n    protected double scoreSeen(int depth, int mass) {\n        return CommonUtils.defaultScoreSeenFunction(depth, mass);\n    }\n\n    protected double scoreUnseen(int depth, int mass) {\n        return CommonUtils.defaultScoreUnseenFunction(depth, mass);\n    }\n\n    @Override\n    public boolean isConverged() {\n        return converged;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/inputtypes/Point.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.inputtypes;\n\nimport java.util.Arrays;\n\n/**\n * a basic class that defines a proto-point\n */\npublic class Point {\n\n    // current values\n    double[] currentInput;\n\n    // input timestamp\n    long inputTimestamp;\n\n    public Point(double[] input, long inputTimestamp) {\n        this.currentInput = copyIfNotnull(input);\n        this.inputTimestamp = inputTimestamp;\n    }\n\n    public double[] getCurrentInput() {\n        return copyIfNotnull(currentInput);\n    }\n\n    public long getInputTimestamp() {\n        return inputTimestamp;\n    }\n\n    protected double[] copyIfNotnull(double[] array) {\n        return array == null ? null : Arrays.copyOf(array, array.length);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/inspect/NearNeighborVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.inspect;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\n\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * A visitor that returns the leaf node in a traversal if the distance between\n * the leaf point and the query point is less than a given threshold.\n */\npublic class NearNeighborVisitor implements Visitor<Optional<Neighbor>> {\n\n    private final float[] queryPoint;\n    private final double distanceThreshold;\n    private Neighbor neighbor;\n\n    /**\n     * Create a NearNeighborVisitor for the given query point.\n     *\n     * @param queryPoint        The point whose neighbors we are looking for.\n     * @param distanceThreshold Leaf points whose distance from the query point is\n     *                          less than this value are considered near neighbors.\n     */\n    public NearNeighborVisitor(float[] queryPoint, double distanceThreshold) {\n        this.queryPoint = queryPoint;\n        this.distanceThreshold = distanceThreshold;\n        neighbor = null;\n    }\n\n    /**\n     * Create a NearNeighborVisitor which always returns the leaf point in the\n     * traversal. The distance threshold is set to positive infinity.\n     *\n     * @param queryPoint The point whose neighbors we are looking for.\n     */\n    public NearNeighborVisitor(float[] queryPoint) {\n        this(queryPoint, Double.POSITIVE_INFINITY);\n    }\n\n    /**\n     * Near neighbors are identified in the {@link #acceptLeaf} method, hence this\n     * method does nothing.\n     *\n     * @param node        the node being visited\n     * @param depthOfNode the depth of the node being visited\n     */\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n    }\n\n    /**\n     * Check to see whether the Euclidean distance between the leaf point and the\n     * query point is less than the distance threshold. If it is, then this visitor\n     * will return an {@link java.util.Optional} containing this leaf point\n     * (converted to a {@link Neighbor} object). Otherwise, this visitor will return\n     * an empty Optional.\n     *\n     * @param leafNode    the leaf node being visited\n     * @param depthOfNode the depth of the leaf node\n     */\n    @Override\n    public void acceptLeaf(INodeView leafNode, int depthOfNode) {\n        float[] leafPoint = leafNode.getLiftedLeafPoint();\n        double distanceSquared = 0.0;\n        for (int i = 0; i < leafPoint.length; i++) {\n            double diff = queryPoint[i] - leafPoint[i];\n            distanceSquared += diff * diff;\n        }\n\n        if (Math.sqrt(distanceSquared) < distanceThreshold) {\n            List<Long> sequenceIndexes = new ArrayList<>(leafNode.getSequenceIndexes().keySet());\n\n            neighbor = new Neighbor(leafPoint, Math.sqrt(distanceSquared), sequenceIndexes);\n        }\n    }\n\n    /**\n     * @return an {@link Optional} containing the leaf point (converted to a\n     *         {@link Neighbor} if the Euclidean distance between the leaf point and\n     *         the query point is less than the distance threshold. Otherwise return\n     *         an empty Optional.\n     */\n    @Override\n    public Optional<Neighbor> getResult() {\n        return Optional.ofNullable(neighbor);\n    }\n\n    @Override\n    public boolean isConverged() {\n        return true;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/interpolation/SimpleInterpolationVisitor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.interpolation;\n\nimport java.util.Arrays;\n\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.InterpolationMeasure;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\n\n/**\n * A Visitor which computes several geometric measures that related a given\n * query point to the points stored in a RandomCutTree.\n **/\npublic class SimpleInterpolationVisitor implements Visitor<InterpolationMeasure> {\n\n    private final float[] pointToScore;\n    private final long sampleSize;\n    private final boolean centerOfMass;\n    public InterpolationMeasure stored;\n    double sumOfNewRange = 0d;\n    double sumOfDifferenceInRange = 0d;\n    double[] directionalDistanceVector;\n    double[] differenceInRangeVector;\n    /**\n     * A flag that states whether the point to score is known to be contained inside\n     * the bounding box of Nodes being accepted. Assumes nodes are accepted in\n     * leaf-to-root order.\n     */\n    boolean pointInsideBox;\n    /**\n     * An array that keeps track of whether each margin of the point being scored is\n     * outside inside the box considered during the recursive call to compute the\n     * score. Assumes nodes are accepted in leaf-to-root order.\n     */\n    boolean[] coordInsideBox;\n    private boolean pointEqualsLeaf;\n    private IBoundingBoxView theShadowBox;\n    private double savedMass;\n    private double pointMass;\n\n    /**\n     * Construct a new Visitor\n     *\n     * @param pointToScore The point whose anomaly score we are computing\n     * @param sampleSize   The sub-sample size used by the RandomCutTree that is\n     *                     scoring the point\n     * @param pointMass    indicates the mass/duplicity of the current point\n     * @param centerOfMass indicates if the tree has centerOfMass\n     */\n    public SimpleInterpolationVisitor(float[] pointToScore, int sampleSize, double pointMass, boolean centerOfMass) {\n        this.pointToScore = Arrays.copyOf(pointToScore, pointToScore.length);\n        this.sampleSize = sampleSize;\n        // the samplesize may be useful to scale\n        pointInsideBox = false;\n        this.pointMass = pointMass; // this corresponds to the mass/duplicity of the query\n        stored = new DensityOutput(pointToScore.length, sampleSize);\n        directionalDistanceVector = new double[2 * pointToScore.length];\n        differenceInRangeVector = new double[2 * pointToScore.length];\n        pointEqualsLeaf = false;\n        this.centerOfMass = centerOfMass;\n        // will be initialized to an array of false values\n        coordInsideBox = new boolean[pointToScore.length];\n    }\n\n    /**\n     * @return The score computed up until this point.\n     */\n    @Override\n    public InterpolationMeasure getResult() {\n        return stored;\n    }\n\n    @Override\n    public void accept(INodeView node, int depthOfNode) {\n        if (pointInsideBox) {\n            return;\n        }\n        IBoundingBoxView largeBox;\n        IBoundingBoxView smallBox;\n\n        if (pointEqualsLeaf) {\n            largeBox = node.getBoundingBox();\n            theShadowBox = theShadowBox == null ? node.getSiblingBoundingBox(pointToScore)\n                    : theShadowBox.getMergedBox(node.getSiblingBoundingBox(pointToScore));\n            smallBox = theShadowBox;\n        } else {\n            smallBox = node.getBoundingBox();\n            largeBox = smallBox.getMergedBox(pointToScore);\n        }\n\n        updateForCompute(smallBox, largeBox);\n\n        double probOfCut = sumOfDifferenceInRange / sumOfNewRange;\n        if (probOfCut <= 0) {\n            pointInsideBox = true;\n        } else {\n            double fieldVal = fieldExt(node, centerOfMass, savedMass, pointToScore);\n            double influenceVal = influenceExt(node, centerOfMass, savedMass, pointToScore);\n            // if center of mass has been enabled, then those can be used in a similar\n            // situation\n            // otherwise the center of mass is the 0 vector\n            for (int i = 0; i < pointToScore.length; i++) {\n                double prob = differenceInRangeVector[2 * i] / sumOfNewRange;\n                stored.probMass.high[i] = prob * influenceVal + (1 - probOfCut) * stored.probMass.high[i];\n                stored.measure.high[i] = prob * fieldVal + (1 - probOfCut) * stored.measure.high[i];\n                stored.distances.high[i] = prob * directionalDistanceVector[2 * i] * influenceVal\n                        + (1 - probOfCut) * stored.distances.high[i];\n\n            }\n            for (int i = 0; i < pointToScore.length; i++) {\n                double prob = differenceInRangeVector[2 * i + 1] / sumOfNewRange;\n                stored.probMass.low[i] = prob * influenceVal + (1 - probOfCut) * stored.probMass.low[i];\n                stored.measure.low[i] = prob * fieldVal + (1 - probOfCut) * stored.measure.low[i];\n                stored.distances.low[i] = prob * directionalDistanceVector[2 * i + 1] * influenceVal\n                        + (1 - probOfCut) * stored.distances.low[i];\n\n            }\n\n        }\n    }\n\n    @Override\n    public void acceptLeaf(INodeView leafNode, int depthOfNode) {\n        updateForCompute(leafNode.getBoundingBox(), leafNode.getBoundingBox().getMergedBox(pointToScore));\n\n        if (sumOfDifferenceInRange <= 0) { // values must be equal\n            savedMass = pointMass + leafNode.getMass();\n            pointEqualsLeaf = true;\n            for (int i = 0; i < pointToScore.length; i++) {\n                stored.measure.high[i] = stored.measure.low[i] = 0.5 * selfField(leafNode, savedMass)\n                        / pointToScore.length;\n                stored.probMass.high[i] = stored.probMass.low[i] = 0.5 * selfInfluence(leafNode, savedMass)\n                        / pointToScore.length;\n            }\n            Arrays.fill(coordInsideBox, false);\n        } else {\n            savedMass = pointMass;\n            double fieldVal = fieldPoint(leafNode, savedMass, pointToScore);\n            double influenceVal = influencePoint(leafNode, savedMass, pointToScore);\n            for (int i = 0; i < pointToScore.length; i++) {\n                double prob = differenceInRangeVector[2 * i] / sumOfNewRange;\n                stored.probMass.high[i] = prob * influenceVal;\n                stored.measure.high[i] = prob * fieldVal;\n                stored.distances.high[i] = prob * directionalDistanceVector[2 * i] * influenceVal;\n            }\n            for (int i = 0; i < pointToScore.length; i++) {\n                double prob = differenceInRangeVector[2 * i + 1] / sumOfNewRange;\n                stored.probMass.low[i] = prob * influenceVal;\n                stored.measure.low[i] = prob * fieldVal;\n                stored.distances.low[i] = prob * directionalDistanceVector[2 * i + 1] * influenceVal;\n            }\n        }\n    }\n\n    /**\n     * Update instance variables based on the difference between the large box and\n     * small box. The values set by this method are used in {@link #accept} and\n     * {@link #acceptLeaf} to update the stored density.\n     *\n     * @param smallBox\n     * @param largeBox\n     */\n    void updateForCompute(IBoundingBoxView smallBox, IBoundingBoxView largeBox) {\n\n        sumOfNewRange = 0d;\n        sumOfDifferenceInRange = 0d;\n        Arrays.fill(directionalDistanceVector, 0);\n        Arrays.fill(differenceInRangeVector, 0);\n\n        for (int i = 0; i < pointToScore.length; ++i) {\n            sumOfNewRange += largeBox.getRange(i);\n            if (coordInsideBox[i]) {\n                continue;\n            }\n\n            double maxGap = Math.max(largeBox.getMaxValue(i) - smallBox.getMaxValue(i), 0.0);\n            double minGap = Math.max(smallBox.getMinValue(i) - largeBox.getMinValue(i), 0.0);\n\n            if (maxGap + minGap > 0.0) {\n                sumOfDifferenceInRange += (minGap + maxGap);\n                differenceInRangeVector[2 * i] = maxGap;\n                differenceInRangeVector[2 * i + 1] = minGap;\n                if (maxGap > 0) {\n                    directionalDistanceVector[2 * i] = (maxGap + smallBox.getRange(i));\n                } else {\n                    directionalDistanceVector[2 * i + 1] = (minGap + smallBox.getRange(i));\n                }\n            } else {\n                coordInsideBox[i] = true;\n            }\n        }\n    }\n\n    /**\n     * The functions below can be changed for arbitrary interpolations.\n     *\n     * @param node/leafNode corresponds to the node in the tree influencing the\n     *                      current point\n     * @param centerOfMass  feature flag describing if the center of mass is enabled\n     *                      in tree in general this can be used for arbitrary\n     *                      extensions of the node class with additional\n     *                      information.\n     * @param thisMass      duplicity of query\n     * @param thislocation  location of query\n     * @return is the value or a 0/1 function -- the functions can be thresholded\n     *         based of geometric coordinates of the query and the node. Many\n     *         different Kernels can be expressed in this decomposed manner.\n     */\n\n    double fieldExt(INodeView node, boolean centerOfMass, double thisMass, float[] thislocation) {\n        return (node.getMass() + thisMass);\n    }\n\n    double influenceExt(INodeView node, boolean centerOfMass, double thisMass, float[] thislocation) {\n        return 1.0;\n    }\n\n    double fieldPoint(INodeView node, double thisMass, float[] thislocation) {\n        return (node.getMass() + thisMass);\n    }\n\n    double influencePoint(INodeView node, double thisMass, float[] thislocation) {\n        return 1.0;\n    }\n\n    double selfField(INodeView leafNode, double mass) {\n        return mass;\n    }\n\n    double selfInfluence(INodeView leafnode, double mass) {\n        return 1.0;\n    }\n\n    @Override\n    public boolean isConverged() {\n        return pointInsideBox;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/IPreprocessor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\n\npublic interface IPreprocessor {\n\n    boolean isOutputReady();\n\n    int getShingleSize();\n\n    int getInputLength();\n\n    float[] getLastShingledPoint();\n\n    double[] getShift();\n\n    double[] getScale();\n\n    double[] getSmoothedDeviations();\n\n    int getInternalTimeStamp();\n\n    int getValuesSeen();\n\n    ImputationMethod getImputationMethod();\n\n    double dataQuality();\n\n    float[] getScaledShingledInput(double[] point, long timestamp, int[] missing, RandomCutForest forest);\n\n    SampleSummary invertInPlaceRecentSummaryBlock(SampleSummary summary);\n\n    void update(double[] point, float[] rcfPoint, long timestamp, int[] missing, RandomCutForest forest);\n\n    double[] getExpectedValue(int relativeBlockIndex, double[] reference, float[] point, float[] newPoint);\n\n    double[] getShingledInput(int index);\n\n    double[] getShingledInput();\n\n    double[] getDefaultFill();\n\n    void setDefaultFill(double[] fill);\n\n    long getTimeStamp(int index);\n\n    double getTransformDecay();\n\n    int numberOfImputes(long timestamp);\n\n    TimedRangeVector invertForecastRange(RangeVector ranges, long lastTimeStamp, double[] delta, boolean useExpected,\n            long expectedTimeStamp);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/ImputePreprocessor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.LINEAR;\nimport static com.amazon.randomcutforest.config.ImputationMethod.NEXT;\nimport static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.config.TransformMethod.DIFFERENCE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE_DIFFERENCE;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class ImputePreprocessor extends InitialSegmentPreprocessor {\n\n    public static ImputationMethod DEFAULT_INITIAL = LINEAR;\n    public static ImputationMethod DEFAULT_DYNAMIC = PREVIOUS;\n\n    /**\n     * the builder initializes the numberOfImputed, which is not used in the other\n     * classes\n     * \n     * @param builder a builder for Preprocessor\n     */\n    public ImputePreprocessor(Builder<?> builder) {\n        super(builder);\n        numberOfImputed = shingleSize;\n    }\n\n    public float[] getScaledShingledInput(double[] inputPoint, long timestamp, int[] missing, RandomCutForest forest) {\n        if (valuesSeen < startNormalization) {\n            return null;\n        }\n        checkArgument(timestamp > previousTimeStamps[shingleSize - 1], \"incorrect ordering of time\");\n\n        // generate next tuple without changing the forest, these get modified in the\n        // transform\n        // a primary culprit is differencing, a secondary culprit is the numberOfImputed\n        long[] savedTimestamps = Arrays.copyOf(previousTimeStamps, previousTimeStamps.length);\n        double[] savedShingledInput = Arrays.copyOf(lastShingledInput, lastShingledInput.length);\n        float[] savedShingle = Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);\n        int savedNumberOfImputed = numberOfImputed;\n        int lastActualInternal = internalTimeStamp;\n\n        float[] point = generateShingle(inputPoint, timestamp, missing, getTimeFactor(timeStampDeviations[1]), false,\n                forest);\n\n        // restore state\n        internalTimeStamp = lastActualInternal;\n        numberOfImputed = savedNumberOfImputed;\n        previousTimeStamps = Arrays.copyOf(savedTimestamps, savedTimestamps.length);\n        lastShingledInput = Arrays.copyOf(savedShingledInput, savedShingledInput.length);\n        lastShingledPoint = Arrays.copyOf(savedShingle, savedShingle.length);\n\n        return point;\n    }\n\n    /**\n     * decides if the forest should be updated, this is needed for imputation on the\n     * fly. The main goal of this function is to avoid runaway sequences where a\n     * single input changes the forest too much. But in some cases that behavior can\n     * be warranted and then this function should be changed\n     *\n     * @return if the forest should be updated\n     */\n    protected boolean updateAllowed() {\n        double fraction = numberOfImputed * 1.0 / (shingleSize);\n        if (fraction > 1) {\n            fraction = 1;\n        }\n        if (numberOfImputed >= shingleSize - 1 && previousTimeStamps[0] != previousTimeStamps[1]\n                && (transformMethod == DIFFERENCE || transformMethod == NORMALIZE_DIFFERENCE)) {\n            // this shingle is disconnected from the previously seen values\n            // these transformations will have little meaning\n            // positions 0 and 1 corresponds to the oldest in the shingle -- if we admit\n            // that case\n            // then we would admit a shingle where impact of the most recent observation is\n            // shingleSize - 1\n            // and the oldest one is 1. It seemed conservative to not allow that --\n            // primarily to stop a\n            // \"runaway\" effect where a single value (and its imputations affect\n            // everything).\n            // A gap at positions 1 and 2 would correspond to a shingleSize - 2 and 2 (or\n            // two different points).\n            return false;\n        }\n\n        dataQuality[0].update(1 - fraction);\n        return (fraction < useImputedFraction && internalTimeStamp >= shingleSize);\n    }\n\n    @Override\n    protected void updateTimestamps(long timestamp) {\n        /*\n         * For imputations done on timestamps other than the current one (specified by\n         * the timestamp parameter), the timestamp of the imputed tuple matches that of\n         * the input tuple, and we increment numberOfImputed. For imputations done at\n         * the current timestamp (if all input values are missing), the timestamp of the\n         * imputed tuple is the current timestamp, and we increment numberOfImputed.\n         *\n         * To check if imputed values are still present in the shingle, we use the\n         * condition (previousTimeStamps[0] == previousTimeStamps[1]). This works\n         * because previousTimeStamps has a size equal to the shingle size and is filled\n         * with the current timestamp.\n         *\n         * For example, if the last 10 values were imputed and the shingle size is 8,\n         * the condition will most likely return false until all 10 imputed values are\n         * removed from the shingle.\n         *\n         * However, there are scenarios where we might miss decrementing\n         * numberOfImputed:\n         *\n         * 1. Not all values in the shingle are imputed. 2. We accumulated\n         * numberOfImputed when the current timestamp had missing values.\n         *\n         * As a result, this could cause the data quality measure to decrease\n         * continuously since we are always counting missing values that should\n         * eventually be reset to zero. To address the issue, we add code in method\n         * updateForest to decrement numberOfImputed when we move to a new timestamp,\n         * provided there is no imputation. This ensures the imputation fraction does\n         * not increase as long as the imputation is continuing. This also ensures that\n         * the forest update decision, which relies on the imputation fraction,\n         * functions correctly. The forest is updated only when the imputation fraction\n         * is below the threshold of 0.5.\n         *\n         * Also, why can't we combine the decrement code between updateTimestamps and\n         * updateForest together? This would cause Consistency.ImputeTest to fail when\n         * testing with and without imputation, as the RCF scores would not change. The\n         * method updateTimestamps is used in other places (e.g., updateState and\n         * dischargeInitial), not only in updateForest.\n         */\n        if (previousTimeStamps[0] == previousTimeStamps[1]) {\n            numberOfImputed = numberOfImputed - 1;\n        }\n        super.updateTimestamps(timestamp);\n    }\n\n    /**\n     * the following function mutates the forest, the lastShingledPoint,\n     * lastShingledInput as well as previousTimeStamps, and adds the shingled input\n     * to the forest (provided it is allowed by the number of imputes and the\n     * transformation function)\n     * \n     * @param input          the input point (can be imputed)\n     * @param timestamp      the input timestamp (will be the most recent timestamp\n     *                       for imputes)\n     * @param forest         the resident RCF\n     * @param isFullyImputed is the current input fully imputed based on timestamps\n     */\n    void updateForest(boolean changeForest, double[] input, long timestamp, RandomCutForest forest,\n            boolean isFullyImputed) {\n        float[] scaledInput = transformer.transformValues(internalTimeStamp, input, getShingledInput(shingleSize - 1),\n                null, clipFactor);\n\n        updateShingle(input, scaledInput);\n        updateTimestamps(timestamp);\n        if (isFullyImputed) {\n            // The numImputed is now capped at the shingle size to ensure that the impute\n            // fraction,\n            // calculated as numberOfImputed * 1.0 / shingleSize, does not exceed 1.\n            numberOfImputed = Math.min(numberOfImputed + 1, shingleSize);\n        } else if (numberOfImputed > 0) {\n            // Decrement numberOfImputed when the new value is not imputed\n            numberOfImputed = numberOfImputed - 1;\n        }\n        if (changeForest) {\n            if (forest.isInternalShinglingEnabled()) {\n                // update allowed = not updateShingleOnly\n                forest.update(scaledInput, !updateAllowed());\n            } else if (updateAllowed()) {\n                forest.update(lastShingledPoint);\n            }\n        }\n    }\n\n    @Override\n    public void update(double[] point, float[] rcfPoint, long timestamp, int[] missing, RandomCutForest forest) {\n        if (valuesSeen < startNormalization) {\n            storeInitial(point, timestamp, missing); // will change valuesSeen\n            if (valuesSeen == startNormalization) {\n                dischargeInitial(forest);\n            }\n            return;\n        }\n        generateShingle(point, timestamp, missing, getTimeFactor(timeStampDeviations[1]), true, forest);\n        // The confidence formula depends on numImputed (the number of recent\n        // imputations seen)\n        // and seenValues (all values seen). To ensure confidence decreases when\n        // numImputed increases,\n        // we need to count only non-imputed values as seenValues.\n        if (missing == null || missing.length != point.length) {\n            ++valuesSeen;\n        }\n    }\n\n    protected double getTimeFactor(Deviation deviation) {\n        double timeFactor = deviation.getMean();\n        double dev = deviation.getDeviation();\n        if (dev > 0 && dev < timeFactor / 2) {\n            // a correction\n            timeFactor -= dev * dev / (2 * timeFactor);\n        }\n        return timeFactor;\n    }\n\n    /**\n     * a block which is executed once. It first computes the multipliers for\n     * normalization and then processes each of the stored inputs\n     */\n    protected void dischargeInitial(RandomCutForest forest) {\n        Deviation tempTimeDeviation = new Deviation();\n        for (int i = 0; i < initialTimeStamps.length - 1; i++) {\n            tempTimeDeviation.update(initialTimeStamps[i + 1] - initialTimeStamps[i]);\n        }\n        double timeFactor = getTimeFactor(tempTimeDeviation);\n\n        prepareInitialInput();\n        Deviation[] deviations = getInitialDeviations();\n        Arrays.fill(previousTimeStamps, initialTimeStamps[0]);\n        numberOfImputed = shingleSize;\n        for (int i = 0; i < valuesSeen; i++) {\n            // initial imputation; not using the global dependency\n            long lastInputTimeStamp = previousTimeStamps[shingleSize - 1];\n            if (internalTimeStamp > 0) {\n                double[] previous = new double[inputLength];\n                System.arraycopy(lastShingledInput, lastShingledInput.length - inputLength, previous, 0, inputLength);\n                int numberToImpute = determineGap(initialTimeStamps[i] - lastInputTimeStamp, timeFactor) - 1;\n                if (numberToImpute > 0) {\n                    double step = 1.0 / (numberToImpute + 1);\n                    // the last impute corresponds to the current observed value\n                    for (int j = 0; j < numberToImpute; j++) {\n                        double[] result = basicImpute(step * (j + 1), previous, initialValues[i], DEFAULT_INITIAL);\n                        float[] scaledInput = transformer.transformValues(internalTimeStamp, result,\n                                getShingledInput(shingleSize - 1), deviations, clipFactor);\n                        updateShingle(result, scaledInput);\n                        updateTimestamps(initialTimeStamps[i]);\n                        numberOfImputed = numberOfImputed + 1;\n                        if (forest.isInternalShinglingEnabled()) {\n                            // updateAllowed = not updateShingleOnly\n                            forest.update(scaledInput, !updateAllowed());\n                        } else {\n                            if (updateAllowed()) {\n                                forest.update(lastShingledPoint);\n                            }\n                        }\n                    }\n                }\n            }\n            float[] scaledInput = transformer.transformValues(internalTimeStamp, initialValues[i],\n                    getShingledInput(shingleSize - 1), deviations, clipFactor);\n            // note that initial values are all interpolated by 0,fixed, or linear\n            // there are no missing values to handle\n            updateState(initialValues[i], scaledInput, initialTimeStamps[i], lastInputTimeStamp, null);\n            if (forest.isInternalShinglingEnabled()) {\n                // updateAllowed = not updateShingleOnly\n                forest.update(scaledInput, !updateAllowed());\n            } else {\n                if (updateAllowed()) {\n                    forest.update(lastShingledPoint);\n                }\n            }\n        }\n        initialTimeStamps = null;\n        initialValues = null;\n    }\n\n    /**\n     * determines the gap between the last known timestamp and the current timestamp\n     * \n     * @param timestampGap current gap\n     * @param averageGap   the average gap (often determined by\n     *                     timeStampDeviation.getMean()\n     * @return the number of positions till timestamp\n     */\n    protected int determineGap(long timestampGap, double averageGap) {\n        if (internalTimeStamp <= 1) {\n            return 1;\n        } else {\n            double gap = timestampGap / averageGap;\n            return (gap >= 1.5) ? (int) Math.ceil(gap) : 1;\n        }\n    }\n\n    public int numberOfImputes(long timestamp) {\n        long lastInputTimeStamp = previousTimeStamps[shingleSize - 1];\n        return determineGap(timestamp - lastInputTimeStamp, getTimeFactor(timeStampDeviations[1])) - 1;\n    }\n\n    /**\n     * a single function that constructs the next shingle, with the option of\n     * committing them to the forest However the shingle needs to be generated\n     * before we process a point; and can only be committed once the point has been\n     * scored. Having the same deterministic transformation can be useful. Note for\n     * this imputation timestamp cannot be missing\n     *\n     * @param averageGap   the gap in timestamps\n     * @param changeForest boolean determining if we commit to the forest or not\n     * @param forest       the resident RCF\n     * @return the next shingle\n     */\n    protected float[] generateShingle(double[] inputTuple, long timestamp, int[] missingValues, double averageGap,\n            boolean changeForest, RandomCutForest forest) {\n        long lastInputTimeStamp = previousTimeStamps[shingleSize - 1];\n        double[] input = Arrays.copyOf(inputTuple, inputLength);\n        double[] previous = getShingledInput(shingleSize - 1);\n        double[] savedInput = Arrays.copyOf(previous, inputLength);\n        int numberToImpute = determineGap(timestamp - lastInputTimeStamp, averageGap) - 1;\n\n        if (imputationMethod != RCF || !forest.isOutputReady()) {\n            ImputationMethod method = (imputationMethod == RCF) ? DEFAULT_DYNAMIC : imputationMethod;\n            // for STREAMING_IMPUTE the timestamp cannot be missing\n            // hence missingValues[] can be 0 to inputLength - 1\n            // for next and Linear there are no current values\n            // we are forced to use fixedvalues or previous\n            if (missingValues != null) {\n                for (int missingValue : missingValues) {\n                    input[missingValue] = (defaultFill == null) ? previous[missingValue] : defaultFill[missingValue];\n                }\n            }\n\n            if (numberToImpute > 0) {\n                double step = 1.0 / (numberToImpute + 1);\n                // the last impute corresponds to the current observed value\n                for (int i = 0; i < numberToImpute; i++) {\n                    // only the last tuple is partial\n                    double[] result = basicImpute(step * (i + 1), previous, input, method);\n                    updateForest(changeForest, result, timestamp, forest, true);\n                }\n            }\n        } else {\n            // the following is a mechanism to prevent a large number of updates using RCF\n            // supposing the data is aggregated at 10min interval and the gap in values\n            // correspond to a month = 30 * 24 * 6 imputations -- that would be not only\n            // be slow, but also it would be unclear if analysis at shingleSize = 10 is\n            // appropriate\n            // for imputing 4000+ values. RCF is an example of reinforcement/continuous\n            // learning\n            // this would be very ripe for hallucination\n            // in general, the intent of impute is to correct occasional drops of data\n            if (numberToImpute < 3 * shingleSize || !fastForward) {\n                for (int i = 0; i < numberToImpute; i++) {\n                    double[] result = imputeRCF(forest, null, null);\n                    updateForest(changeForest, result, timestamp, forest, true);\n                }\n            } else {\n                // we will skip a lot of values\n                double[] shift = getShift(); // uses the transformation to get typical values\n                // resets number of imputed\n                numberOfImputed = 0;\n                for (int i = 0; i < shingleSize - 1; i++) {\n                    updateForest(changeForest, shift, timestamp, forest, false);\n                }\n            }\n            // finally the current input may be partial\n            if (missingValues != null && missingValues.length > 0) {\n                input = imputeRCF(forest, input, missingValues);\n            }\n        }\n\n        // last parameter isFullyImputed = if we miss everything in inputTuple?\n        // This would ensure dataQuality is decreasing if we impute whenever\n        updateForest(changeForest, input, timestamp, forest,\n                missingValues != null ? missingValues.length == inputTuple.length : false);\n        if (changeForest) {\n            updateTimeStampDeviations(timestamp, lastInputTimeStamp);\n            transformer.updateDeviation(input, savedInput, missingValues);\n        }\n        return Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);\n    }\n\n    /**\n     * a basic function that performs a single step imputation in the input space\n     * the function has to be deterministic since it is run twice, first at scoring\n     * and then at committing to the RCF\n     * \n     * @param stepFraction the interpolation fraction\n     * @param previous     the previous input point\n     * @param input        the current input point\n     * @param method       the imputation method of choice\n     * @return the imputed/interpolated result\n     */\n    protected double[] basicImpute(double stepFraction, double[] previous, double[] input, ImputationMethod method) {\n        double[] result = new double[inputLength];\n        if (method == FIXED_VALUES) {\n            System.arraycopy(defaultFill, 0, result, 0, inputLength);\n        } else if (method == LINEAR) {\n            for (int z = 0; z < inputLength; z++) {\n                result[z] = previous[z] + stepFraction * (input[z] - previous[z]);\n            }\n        } else if (method == PREVIOUS) {\n            System.arraycopy(previous, 0, result, 0, inputLength);\n        } else if (method == NEXT) {\n            System.arraycopy(input, 0, result, 0, inputLength);\n        }\n        return result;\n    }\n\n    /**\n     * Uses RCF to impute the missing values in the current input or impute the\n     * entire set of values for that time step (based on partial input being null)\n     * \n     * @param forest        the RCF\n     * @param partialInput  the information available about the most recent point\n     * @param missingValues the array indicating missing values for the partial\n     *                      input\n     * @return the potential completion of the partial tuple or the predicted\n     *         current value\n     */\n    protected double[] imputeRCF(RandomCutForest forest, double[] partialInput, int[] missingValues) {\n        float[] temp = Arrays.copyOf(lastShingledPoint, lastShingledPoint.length);\n        shiftLeft(temp, inputLength);\n        int startPosition = inputLength * (shingleSize - 1);\n        int[] missingIndices;\n        if (partialInput == null) {\n            missingIndices = new int[inputLength];\n            for (int i = 0; i < inputLength; i++) {\n                missingIndices[i] = startPosition + i;\n            }\n        } else {\n            missingIndices = new int[missingValues.length];\n            for (int i = 0; i < missingValues.length; i++) {\n                missingIndices[i] = startPosition + missingValues[i];\n            }\n            float[] scaledInput = transformer.transformValues(internalTimeStamp, partialInput,\n                    getShingledInput(shingleSize - 1), null, clipFactor);\n            copyAtEnd(temp, scaledInput);\n        }\n        float[] newPoint = forest.imputeMissingValues(temp, missingIndices.length, missingIndices);\n        return toDoubleArray(getExpectedBlock(newPoint, 0));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/InitialSegmentPreprocessor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor;\n\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.ZERO;\nimport static com.amazon.randomcutforest.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;\nimport static java.lang.Math.round;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class InitialSegmentPreprocessor extends Preprocessor {\n\n    public InitialSegmentPreprocessor(Builder<?> builder) {\n        super(builder);\n        initialValues = new double[startNormalization][];\n        initialTimeStamps = new long[startNormalization];\n    }\n\n    /**\n     * stores initial data for normalization. It is possible to perform the\n     * imputation inline while storing (for some options) but it seems cleaner to\n     * perform en masse imputation (and more complicated algorithms can be used)\n     *\n     * @param inputPoint    input data\n     * @param timestamp     timestamp\n     * @param missingValues missing values\n     */\n    protected void storeInitial(double[] inputPoint, long timestamp, int[] missingValues) {\n        // note that timestamps cannot be missing for updates\n        initialTimeStamps[valuesSeen] = timestamp;\n        int length = inputLength + ((missingValues == null) ? 0 : missingValues.length);\n        double[] temp = new double[length];\n        System.arraycopy(inputPoint, 0, temp, 0, inputLength);\n        if (missingValues != null) {\n            for (int i = 0; i < length - inputLength; i++) {\n                temp[inputLength + i] = missingValues[i];\n            }\n        }\n        initialValues[valuesSeen] = temp;\n        valuesSeen++;\n    }\n\n    /**\n     * prepare initial values which can have missing entries in individual tuples.\n     * We use a simple interpolation strategy. At some level, lack of data simply\n     * cannot be solved easily without data. This is run as one of the initial steps\n     * in dischargeInitial() If all the entries corresponding to some variables are\n     * missing -- there is no good starting point; we assume the value is\n     * defaultFill()\n     */\n    double prepareInitialInput() {\n        int totalMissing = 0;\n        // note that timestamp cannot be missing for updates\n        boolean[][] missing = new boolean[initialValues.length][inputLength];\n        for (int i = 0; i < initialValues.length; i++) {\n            Arrays.fill(missing[i], false);\n            int length = initialValues[i].length - inputLength;\n            for (int j = 0; j < length; j++) {\n                // duplicates are fine; but should not be encouraged\n                ++totalMissing;\n                missing[i][(int) round(initialValues[i][inputLength + j])] = true;\n            }\n        }\n\n        if (imputationMethod == ZERO || imputationMethod == FIXED_VALUES) {\n            for (int i = 0; i < initialValues.length - 1; i++) {\n                for (int j = 0; j < inputLength; j++) {\n                    initialValues[i][j] = (!missing[i][j]) ? initialValues[i][j] : defaultFill[j];\n                }\n            }\n        } else { // no simple alternative other than linear interpolation\n                 // at least for the initial segment -- because the trees are\n                 // not ready\n            boolean[] startingValuesSet = new boolean[inputLength];\n            for (int j = 0; j < inputLength; j++) {\n                // what is the first is missing?\n                int next = 0;\n                startingValuesSet[j] = false;\n                while (next < initialValues.length && missing[next][j]) {\n                    ++next;\n                }\n                startingValuesSet[j] = (next < initialValues.length);\n                if (startingValuesSet[j]) {\n                    initialValues[0][j] = initialValues[next][j];\n                    missing[0][j] = false;\n                    // note if the first value si present then i==0\n                    int start = 0;\n                    while (start < initialValues.length - 1) {\n                        int end = start + 1;\n                        while (end < initialValues.length && missing[end][j]) {\n                            ++end;\n                        }\n                        if (end < initialValues.length && end > start + 1) {\n                            for (int y = start + 1; y < end; y++) { // linear interpolation\n                                double factor = (1.0 * initialTimeStamps[start] - initialTimeStamps[y])\n                                        / (initialTimeStamps[start] - initialTimeStamps[end]);\n                                initialValues[y][j] = factor * initialValues[start][j]\n                                        + (1 - factor) * initialValues[end][j];\n                            }\n                        }\n                        start = end;\n                    }\n                } else {\n                    // set 0; note there is no value in the entire column.\n                    if (defaultFill != null) {\n                        // can be set for other options as well\n                        for (int y = 0; y < initialValues.length; y++) {\n                            initialValues[y][j] = defaultFill[j];\n                        }\n                    } else {\n                        for (int y = 0; y < initialValues.length; y++) {\n                            initialValues[y][j] = 0;\n                        }\n                    }\n                }\n            }\n        }\n\n        // truncate to input length, since the missing values were stored as well\n        for (int i = 0; i < initialValues.length; i++) {\n            initialValues[i] = Arrays.copyOf(initialValues[i], inputLength);\n        }\n        return 1.0 - (1.0 * totalMissing) / initialValues.length;\n    }\n\n    @Override\n    public void update(double[] point, float[] rcfPoint, long timestamp, int[] missing, RandomCutForest forest) {\n        if (valuesSeen < startNormalization) {\n            storeInitial(point, timestamp, missing);\n            // will change valuesSeen\n            if (valuesSeen == startNormalization) {\n                dischargeInitial(forest);\n            }\n            return;\n        }\n        super.update(point, rcfPoint, timestamp, missing, forest);\n    }\n\n    // computes the normalization statistics\n    protected Deviation[] getInitialDeviations() {\n        Deviation[] tempList = new Deviation[NUMBER_OF_STATS * inputLength];\n        for (int j = 0; j < NUMBER_OF_STATS * inputLength; j++) {\n            tempList[j] = new Deviation(transformDecay);\n        }\n        for (int i = 0; i < initialValues.length; i++) {\n            for (int j = 0; j < inputLength; j++) {\n                tempList[j].update(initialValues[i][j]);\n                double value = (i == 0) ? 0 : initialValues[i][j] - initialValues[i - 1][j];\n                tempList[j + inputLength].update(value);\n            }\n        }\n        for (int i = 0; i < initialValues.length; i++) {\n            for (int j = 0; j < inputLength; j++) {\n                tempList[j + 2 * inputLength].update(tempList[j].getDeviation());\n                tempList[j + 3 * inputLength].update(tempList[j + inputLength].getMean());\n                tempList[j + 4 * inputLength].update(tempList[j + inputLength].getDeviation());\n            }\n        }\n        return tempList;\n    }\n\n    /**\n     * a block which executes once; it first computes the multipliers for\n     * normalization and then processes each of the stored inputs\n     */\n\n    protected void dischargeInitial(RandomCutForest forest) {\n        Deviation tempTimeDeviation = new Deviation();\n        for (int i = 0; i < initialTimeStamps.length - 1; i++) {\n            tempTimeDeviation.update(initialTimeStamps[i + 1] - initialTimeStamps[i]);\n        }\n        double timeFactor = 1.0 + tempTimeDeviation.getDeviation();\n\n        double quality = prepareInitialInput();\n        Deviation[] deviations = getInitialDeviations();\n        Arrays.fill(previousTimeStamps, initialTimeStamps[0]);\n\n        for (int i = 0; i < valuesSeen; i++) {\n            float[] scaledInput = getScaledInput(initialValues[i], initialTimeStamps[i], deviations, timeFactor);\n            // missing values are null\n            updateState(initialValues[i], scaledInput, initialTimeStamps[i], previousTimeStamps[shingleSize - 1], null);\n            dataQuality[0].update(quality);\n            if (forest != null) {\n                if (forest.isInternalShinglingEnabled()) {\n                    forest.update(scaledInput);\n                } else {\n                    forest.update(lastShingledPoint);\n                }\n            }\n        }\n        initialTimeStamps = null;\n        initialValues = null;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/Preprocessor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SHINGLE_SIZE;\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.config.ImputationMethod.ZERO;\nimport static com.amazon.randomcutforest.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;\nimport static java.lang.Math.exp;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\nimport java.util.Optional;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.preprocessor.transform.DifferenceTransformer;\nimport com.amazon.randomcutforest.preprocessor.transform.ITransformer;\nimport com.amazon.randomcutforest.preprocessor.transform.NormalizedDifferenceTransformer;\nimport com.amazon.randomcutforest.preprocessor.transform.NormalizedTransformer;\nimport com.amazon.randomcutforest.preprocessor.transform.SubtractMATransformer;\nimport com.amazon.randomcutforest.preprocessor.transform.WeightedTransformer;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class Preprocessor implements IPreprocessor {\n\n    public static double NORMALIZATION_SCALING_FACTOR = 2.0;\n\n    // in case of normalization, uses this constant in denominator to ensure\n    // smoothness near 0\n    public static double DEFAULT_NORMALIZATION_PRECISION = 1e-3;\n\n    // the number of points to buffer before starting to normalize/gather statistic\n    public static int DEFAULT_START_NORMALIZATION = 10;\n\n    // the number at which to stop normalization -- it may not e easy to imagine why\n    // this is required\n    // but this is comforting to those interested in \"stopping\" a model from\n    // learning continuously\n    public static int DEFAULT_STOP_NORMALIZATION = Integer.MAX_VALUE;\n\n    // in case of normalization the deviations beyond 10 Sigma are likely measure 0\n    // events\n    public static int DEFAULT_CLIP_NORMALIZATION = 100;\n\n    // normalization is not turned on by default\n    public static boolean DEFAULT_NORMALIZATION = false;\n\n    // differencing is not turned on by default\n    // for some smooth predictable data differencing is helpful, but have unintended\n    // consequences\n    public static boolean DEFAULT_DIFFERENCING = false;\n\n    // the fraction of data points that can be imputed in a shingle before the\n    // shingle is admitted in a forest\n    public static double DEFAULT_USE_IMPUTED_FRACTION = 0.5;\n\n    // minimum number of observations before using a model to predict any expected\n    // behavior -- if we can score, we should predict\n    public static int MINIMUM_OBSERVATIONS_FOR_EXPECTED = 100;\n\n    public static int DEFAULT_DATA_QUALITY_STATES = 1;\n\n    // the input corresponds to timestamp data and this statistic helps align input\n    protected Deviation[] timeStampDeviations;\n\n    // normalize time difference;\n    protected boolean normalizeTime;\n\n    protected double weightTime;\n\n    protected double transformDecay;\n\n    // recording the last seen timestamp\n    protected long[] previousTimeStamps;\n\n    // this parameter is used as a clock if imputing missing values in the input\n    // this is different from valuesSeen in STREAMING_IMPUTE\n    protected int internalTimeStamp = 0;\n\n    // initial values used for normalization\n    protected double[][] initialValues;\n    protected long[] initialTimeStamps;\n\n    // initial values after which to start normalization\n    protected int startNormalization;\n\n    // sequence number to stop normalization at\n    protected Integer stopNormalization;\n\n    // a number indicating the actual values seen (not imputed)\n    protected int valuesSeen = 0;\n\n    // to use a set of default values for imputation\n    protected double[] defaultFill;\n\n    // fraction of data that should be actual input before they are added to RCF\n    protected double useImputedFraction = DEFAULT_USE_IMPUTED_FRACTION;\n\n    // number of imputed values in stored shingle\n    protected int numberOfImputed;\n\n    // particular strategy for impute\n    protected ImputationMethod imputationMethod = RCF;\n\n    // used in normalization\n    protected double clipFactor = DEFAULT_CLIP_NORMALIZATION;\n\n    // last shingled values (without normalization/change or augmentation by time)\n    protected double[] lastShingledInput;\n\n    // last point\n    protected float[] lastShingledPoint;\n\n    // method used to transform data in the preprocessor\n    protected TransformMethod transformMethod;\n\n    // shingle size in the forest\n    protected int shingleSize;\n\n    // actual dimension of the forest\n    protected int dimension;\n\n    // length of input to be seen, may depend on internal/external shingling\n    protected int inputLength;\n\n    // the mode of the forest used in this preprocessing\n    protected ForestMode mode;\n\n    // measures the data quality in imputed modes\n    protected Deviation[] dataQuality;\n\n    protected ITransformer transformer;\n\n    // to be used for speeding up STREAMING_IMPUTE over large gaps\n    protected boolean fastForward = false;\n\n    public Preprocessor(Builder<?> builder) {\n        checkArgument(builder.transformMethod != null, \"transform required\");\n        checkArgument(builder.forestMode != null, \" forest mode is required\");\n        checkArgument(builder.inputLength > 0, \"incorrect input length\");\n        checkArgument(builder.shingleSize > 0, \"incorrect shingle size\");\n        checkArgument(builder.dimensions > 0, \"incorrect dimensions\");\n        checkArgument(builder.shingleSize == 1 || builder.dimensions % builder.shingleSize == 0,\n                \" shingle size should divide the dimensions\");\n        checkArgument(builder.forestMode != ForestMode.STREAMING_IMPUTE || builder.shingleSize > 1,\n                \"cannot impute a time series with shingle size 1\");\n        checkArgument(builder.forestMode == ForestMode.TIME_AUGMENTED || builder.inputLength == builder.dimensions\n                || builder.inputLength * builder.shingleSize == builder.dimensions, \"incorrect input size\");\n        checkArgument(\n                builder.forestMode != ForestMode.TIME_AUGMENTED\n                        || (builder.inputLength + 1) * builder.shingleSize == builder.dimensions,\n                \"incorrect input size\");\n        checkArgument(builder.startNormalization <= builder.stopNormalization, \"incorrect normalization parameters\");\n        checkArgument(builder.startNormalization > 0 || !builder.normalizeTime, \" start of normalization cannot be 0\");\n        checkArgument(builder.startNormalization > 0 || !(builder.transformMethod == TransformMethod.NORMALIZE),\n                \" start of normalization cannot be 0 for normalize\");\n        checkArgument(\n                builder.startNormalization > 0 || !(builder.transformMethod == TransformMethod.NORMALIZE_DIFFERENCE),\n                \" start of normalization cannot be 0 for normalized difference\");\n        checkArgument(builder.weights == null || builder.weights.length >= builder.inputLength, \" incorrect weights\");\n        if (builder.initialShingledInput != null) {\n            // if (builder.inputLength == builder.dimensions) {\n            // checkArgument(builder.initialShingledInput.length == builder.inputLength,\n            // \"incorrect length shingled input\");\n            // } else\n            {\n                checkArgument(builder.initialShingledInput.length == builder.inputLength * builder.shingleSize,\n                        \"incorrect length shingled input\");\n            }\n        }\n        checkArgument(builder.initialPoint == null || builder.initialPoint.length == builder.dimensions,\n                \"incorrect length shingled transformed point\");\n        inputLength = builder.inputLength;\n        dimension = builder.dimensions;\n        shingleSize = builder.shingleSize;\n        mode = builder.forestMode;\n        lastShingledPoint = (builder.initialPoint == null) ? new float[dimension] : copyIfNotnull(builder.initialPoint);\n        this.transformMethod = builder.transformMethod;\n        this.startNormalization = builder.startNormalization;\n        this.stopNormalization = builder.stopNormalization;\n        this.normalizeTime = builder.normalizeTime;\n        double[] weights = new double[inputLength];\n        Arrays.fill(weights, 1.0);\n        if (builder.weights != null) {\n            if (builder.weights.length == inputLength) {\n                System.arraycopy(builder.weights, 0, weights, 0, inputLength);\n                weightTime = builder.weightTime;\n            } else {\n                System.arraycopy(builder.weights, 0, weights, 0, inputLength);\n                weightTime = builder.weights[inputLength];\n            }\n        } else {\n            weightTime = builder.weightTime;\n        }\n        previousTimeStamps = new long[shingleSize];\n        if (inputLength == dimension) {\n            lastShingledInput = (builder.initialShingledInput == null) ? new double[dimension]\n                    : Arrays.copyOf(builder.initialShingledInput, dimension);\n        } else {\n            lastShingledInput = (builder.initialShingledInput == null) ? new double[shingleSize * inputLength]\n                    : Arrays.copyOf(builder.initialShingledInput, shingleSize * inputLength);\n        }\n        transformDecay = builder.transformDecay;\n        dataQuality = builder.dataQuality.orElse(new Deviation[] { new Deviation(transformDecay) });\n\n        Deviation[] deviationList = new Deviation[NUMBER_OF_STATS * inputLength];\n        manageDeviations(deviationList, builder.deviations, transformDecay);\n        timeStampDeviations = new Deviation[NUMBER_OF_STATS];\n        manageDeviations(timeStampDeviations, builder.timeDeviations, transformDecay);\n\n        if (transformMethod == TransformMethod.NONE) {\n            for (int i = 0; i < inputLength; i++) {\n                checkArgument(weights[i] == 1.0, \"incorrect weights\");\n            }\n            transformer = new WeightedTransformer(weights, deviationList);\n        } else if (transformMethod == TransformMethod.WEIGHTED) {\n            transformer = new WeightedTransformer(weights, deviationList);\n        } else if (transformMethod == TransformMethod.DIFFERENCE) {\n            transformer = new DifferenceTransformer(weights, deviationList);\n        } else if (transformMethod == TransformMethod.SUBTRACT_MA) {\n            transformer = new SubtractMATransformer(weights, deviationList);\n        } else if (transformMethod == TransformMethod.NORMALIZE) {\n            transformer = new NormalizedTransformer(weights, deviationList);\n        } else {\n            transformer = new NormalizedDifferenceTransformer(weights, deviationList);\n        }\n        imputationMethod = builder.imputationMethod;\n        checkArgument(builder.fillValues == null || builder.fillValues.length == inputLength,\n                \" the number of values should match the shingled input\");\n        // if defaultFill is non-null then there is explicit request to use those\n        // values (unless set to ZERO, which is a specific default, at even higher\n        // precedence)\n        // defaults have higher precedence over next, linear because the\n        // next values are not present when impute is invoked\n        //\n        // algorithmically RCF seems to perform smoothest since it fits the data\n        // next best is previous and that has higher precedence\n        // the default is used when no initial value is present\n        if (imputationMethod == ZERO) {\n            this.defaultFill = new double[inputLength]; // set to 0\n        } else if (imputationMethod == FIXED_VALUES) {\n            checkArgument(builder.fillValues != null, \"fill values cannot be null\");\n            this.defaultFill = Arrays.copyOf(builder.fillValues, builder.fillValues.length);\n        } else {\n            this.defaultFill = copyIfNotnull(builder.fillValues);\n        }\n        if (mode == ForestMode.STREAMING_IMPUTE) {\n            // imputationMethod = builder.imputationMethod;\n            normalizeTime = true;\n            this.useImputedFraction = builder.useImputedFraction.orElse(0.5);\n            this.fastForward = builder.fastForward;\n        }\n    }\n\n    // the following fills the first argument as copies of the original\n    // but if the original is null or otherwise then new deviations are created; the\n    // last third\n    // are filled with 0.1 * transformDecay and are reserved for smoothing\n    void manageDeviations(Deviation[] deviationList, Optional<Deviation[]> original, double timeDecay) {\n        checkArgument(deviationList.length % NUMBER_OF_STATS == 0, \" has to be a multiple of five\");\n        int usedDeviations = 0;\n        if (original.isPresent()) {\n            Deviation[] list = original.get();\n            usedDeviations = min(list.length, deviationList.length);\n            // note the lengths can be different based on a different version of the model\n            // we will convert the model; and rely on RCF's ability to adjust to new data\n            for (int i = 0; i < usedDeviations; i++) {\n                deviationList[i] = list[i].copy();\n            }\n        }\n        for (int i = usedDeviations; i < deviationList.length - 2 * deviationList.length / 5; i++) {\n            deviationList[i] = new Deviation(timeDecay);\n        }\n        usedDeviations = max(usedDeviations, deviationList.length - 2 * deviationList.length / 5);\n        for (int i = usedDeviations; i < deviationList.length; i++) {\n            deviationList[i] = new Deviation(0.1 * timeDecay);\n        }\n    }\n\n    /**\n     * decides if normalization is required, and then is used to store and discharge\n     * an initial segment\n     *\n     * @return a boolean indicating th need to store initial values\n     */\n    public static boolean requireInitialSegment(boolean normalizeTime, TransformMethod transformMethod, ForestMode mode,\n            ImputationMethod imputationMethod) {\n        return normalizeTime || imputationMethod != ZERO && imputationMethod != FIXED_VALUES\n                || transformMethod == TransformMethod.NORMALIZE\n                || transformMethod == TransformMethod.NORMALIZE_DIFFERENCE\n                || transformMethod == TransformMethod.SUBTRACT_MA || mode != ForestMode.STANDARD;\n    }\n\n    public float[] getScaledInput(double[] point, long timestamp) {\n        if (valuesSeen < startNormalization\n                && requireInitialSegment(normalizeTime, transformMethod, mode, imputationMethod)) {\n            return null;\n        }\n        return getScaledInput(point, timestamp, null, getTimeShift());\n    }\n\n    public float[] getScaledInput(float[] point, long timestamp) {\n        return getScaledInput(toDoubleArray(point), timestamp, null, getTimeShift());\n    }\n\n    public float[] getScaledShingledInput(double[] inputPoint, long timestamp, int[] missing, RandomCutForest forest) {\n        boolean requireForest = (imputationMethod == RCF || mode != ForestMode.STANDARD);\n        checkArgument(!requireForest || forest != null, \"need a forest\");\n\n        if (!requireForest) {\n            double[] newInput = Arrays.copyOf(inputPoint, inputLength);\n            double[] values = (defaultFill != null) ? defaultFill : getShingledInput(shingleSize - 1);\n            if (missing != null) {\n                for (int j : missing) {\n                    newInput[j] = values[j];\n                }\n            }\n            float[] scaledInput = getScaledInput(newInput, timestamp);\n            if (scaledInput == null) {\n                return null;\n            }\n            float[] point = Arrays.copyOf(lastShingledPoint, dimension);\n            shiftLeft(point, inputLength);\n            System.arraycopy(scaledInput, 0, point, dimension - inputLength, inputLength);\n            return point;\n        } else {\n            float[] scaledInput = getScaledInput(inputPoint, timestamp);\n            float[] point = null;\n\n            if (scaledInput != null) {\n                if (forest.isInternalShinglingEnabled()) {\n                    point = forest.transformToShingledPoint(scaledInput);\n                } else {\n                    int dimension = forest.getDimensions();\n                    if (scaledInput.length == dimension) {\n                        point = scaledInput;\n                    } else {\n                        point = new float[dimension];\n                        System.arraycopy(getLastShingledPoint(), scaledInput.length, point, 0,\n                                dimension - scaledInput.length);\n                        System.arraycopy(scaledInput, 0, point, dimension - scaledInput.length, scaledInput.length);\n                    }\n                }\n                if (missing != null) {\n                    int[] newMissing = Arrays.copyOf(missing, missing.length);\n                    for (int i = 0; i < missing.length; i++) {\n                        newMissing[i] = missing[i] + dimension - scaledInput.length;\n                    }\n                    point = forest.imputeMissingValues(point, newMissing.length, newMissing);\n                }\n            }\n            return point;\n        }\n    }\n\n    public double[] getScale() {\n        if (mode != ForestMode.TIME_AUGMENTED) {\n            return transformer.getScale();\n        } else {\n            double[] scale = new double[inputLength + 1];\n            System.arraycopy(transformer.getScale(), 0, scale, 0, inputLength);\n            scale[inputLength] = (weightTime == 0) ? 0 : 1.0 / weightTime;\n            if (normalizeTime) {\n                scale[inputLength] *= NORMALIZATION_SCALING_FACTOR\n                        * (getTimeGapDifference() + DEFAULT_NORMALIZATION_PRECISION);\n            }\n            return scale;\n        }\n    }\n\n    @Override\n    public boolean isOutputReady() {\n        return internalTimeStamp > 0;\n    }\n\n    public double[] getShift() {\n        double[] previous = (inputLength == lastShingledInput.length) ? lastShingledInput\n                : getShingledInput(shingleSize - 1);\n        if (mode != ForestMode.TIME_AUGMENTED) {\n            return transformer.getShift(previous);\n        } else {\n            double[] shift = new double[inputLength + 1];\n            System.arraycopy(transformer.getShift(previous), 0, shift, 0, inputLength);\n            // time is always differenced\n            shift[inputLength] = ((normalizeTime) ? getTimeShift() : 0) + previousTimeStamps[shingleSize - 1];\n            return shift;\n        }\n    }\n\n    public double[] getSmoothedDeviations() {\n        if (mode != ForestMode.TIME_AUGMENTED) {\n            double[] deviations = new double[2 * inputLength];\n            System.arraycopy(transformer.getSmoothedDeviations(), 0, deviations, 0, inputLength);\n            System.arraycopy(transformer.getSmoothedDifferenceDeviations(), 0, deviations, inputLength, inputLength);\n            return deviations;\n        } else {\n            double[] deviations = new double[2 * inputLength + 2];\n            System.arraycopy(transformer.getSmoothedDeviations(), 0, deviations, 0, inputLength);\n            System.arraycopy(transformer.getSmoothedDifferenceDeviations(), 0, deviations, inputLength + 1,\n                    inputLength);\n            // time is differenced (for now) or unchanged\n            deviations[inputLength + 1] = timeStampDeviations[4].getMean();\n            deviations[2 * inputLength + 1] = timeStampDeviations[4].getMean();\n            return deviations;\n        }\n    }\n\n    public void update(double[] point, float[] rcfPoint, long timestamp, int[] missing, RandomCutForest forest) {\n\n        updateState(point, rcfPoint, timestamp, previousTimeStamps[shingleSize - 1], missing);\n        ++valuesSeen;\n        double miss = (missing == null) ? 0 : missing.length;\n        dataQuality[0].update(1 - 1.0 * miss / inputLength);\n        if (forest != null) {\n            if (forest.isInternalShinglingEnabled()) {\n                int length = inputLength + ((mode == ForestMode.TIME_AUGMENTED) ? 1 : 0);\n                float[] scaledInput = new float[length];\n                System.arraycopy(rcfPoint, rcfPoint.length - length, scaledInput, 0, length);\n                forest.update(scaledInput);\n            } else {\n                forest.update(rcfPoint);\n            }\n        }\n    }\n\n    public double dataQuality() {\n        return dataQuality[0].getMean();\n    }\n\n    public int numberOfImputes(long timestamp) {\n        return 0;\n    }\n\n    /**\n     * maps the time back. The returned value is an approximation for\n     * relativePosition less than 0 which corresponds to an anomaly in the past.\n     * Since the state of the statistic is now changed based on more recent values\n     *\n     * @param gap              estimated value\n     * @param relativePosition how far back in the shingle\n     * @return transform of the time value to original input space\n     */\n    public long inverseMapTime(double gap, int relativePosition) {\n        // note this corresponds to differencing being always on\n        checkArgument(shingleSize + relativePosition >= 0, \" error\");\n        return inverseMapTimeValue(gap, previousTimeStamps[shingleSize - 1 + relativePosition]);\n    }\n\n    // same as inverseMapTime, using explicit value also useful in forecast\n    protected long inverseMapTimeValue(double gap, long timestamp) {\n        double factor = (weightTime == 0) ? 0 : 1.0 / weightTime;\n        if (factor == 0) {\n            return 0;\n        }\n        if (normalizeTime) {\n            return (long) Math\n                    .round(timestamp + getTimeShift() + NORMALIZATION_SCALING_FACTOR * gap * getTimeScale() * factor);\n        } else {\n            return (long) Math.round(gap * factor + timestamp);\n        }\n    }\n\n    /**\n     * returns the input values corresponding to a position in the shingle; this is\n     * needed in the corrector steps; and avoids the need for replicating this\n     * information downstream\n     * \n     * @param index position in the shingle\n     * @return the input values for those positions in the shingle\n     */\n    public double[] getShingledInput(int index) {\n        int base = lastShingledInput.length / shingleSize;\n        double[] values = new double[base];\n        System.arraycopy(lastShingledInput, index * base, values, 0, base);\n        return values;\n    }\n\n    @Override\n    public double[] getShingledInput() {\n        return copyIfNotnull(lastShingledInput);\n    }\n\n    /**\n     * produces the expected value given location of the anomaly -- being aware that\n     * the nearest anomaly may be behind us in time.\n     * \n     * @param relativeBlockIndex the relative index of the anomaly\n     * @param reference          the reference input (so that we do not generate\n     *                           arbitrary rounding errors of transformations which\n     *                           can be indistinguishable from true expected values)\n     * @param point              the point (in the RCF shingled space)\n     * @param newPoint           the expected point (in the RCF shingled space) --\n     *                           where only the most egregiously offending entries\n     *                           corresponding to the shingleSize - 1 +\n     *                           relativeBlockIndex are changed.\n     * @return the set of values (in the input space) that would have produced\n     *         newPoint\n     */\n    public double[] getExpectedValue(int relativeBlockIndex, double[] reference, float[] point, float[] newPoint) {\n        checkArgument(newPoint.length == dimension, \"incorrect invocation\");\n        double[] values = toDoubleArray(getExpectedBlock(newPoint, relativeBlockIndex));\n        if (reference != null) {\n            int startPosition = (shingleSize - 1 + relativeBlockIndex) * dimension / shingleSize;\n            int length = lastShingledInput.length / shingleSize;\n            for (int i = 0; i < length; i++) {\n                double currentValue = (reference.length == dimension) ? reference[startPosition + i] : reference[i];\n                values[i] = (point[startPosition + i] == newPoint[startPosition + i]) ? currentValue : values[i];\n            }\n        }\n        if (mode == ForestMode.TIME_AUGMENTED) {\n            int endPosition = (shingleSize - 1 + relativeBlockIndex + 1) * dimension / shingleSize;\n            double timeGap = (newPoint[endPosition - 1] - point[endPosition - 1]);\n            long expectedTimestamp = (timeGap == 0) ? getTimeStamp(shingleSize - 1 + relativeBlockIndex)\n                    : inverseMapTime(timeGap, relativeBlockIndex);\n            values[dimension / shingleSize - 1] = expectedTimestamp;\n        }\n        return values;\n    }\n\n    protected float[] getExpectedBlock(float[] newPoint, int relativeBlockIndex) {\n        int startPosition = newPoint.length - (1 - relativeBlockIndex) * dimension / shingleSize;\n        checkArgument(startPosition >= 0, \"incorrect inversion\");\n        float[] values = new float[dimension / shingleSize];\n        System.arraycopy(newPoint, startPosition, values, 0, dimension / shingleSize);\n        invertInPlace(values, getShingledInput(shingleSize - 1 + relativeBlockIndex), relativeBlockIndex);\n        if (mode == ForestMode.TIME_AUGMENTED) {\n            // this will be lossy\n            values[dimension / shingleSize - 1] = (float) inverseMapTime(values[dimension / shingleSize - 1],\n                    relativeBlockIndex);\n        }\n        return values;\n    }\n\n    /**\n     * inverts the values to the input space from the RCF space\n     *\n     */\n    protected void invertInPlace(float[] values, double[] previous, int relativeBlockIndex) {\n        checkArgument(values.length == dimension / shingleSize, \"incorrect invocation\");\n        transformer.invertInPlace(values, previous);\n        if (mode == ForestMode.TIME_AUGMENTED) {\n            // this will be lossy\n            values[values.length - 1] = (float) inverseMapTime(values[values.length - 1], relativeBlockIndex);\n        }\n    }\n\n    public SampleSummary invertInPlaceRecentSummaryBlock(SampleSummary summary) {\n        if (summary == null) {\n            return null;\n        }\n        double[] scale = getScale();\n        double[] previous = getShingledInput(shingleSize - 1);\n        invertInPlace(summary.mean, previous, 0);\n        invertInPlace(summary.median, previous, 0);\n        invertInPlace(summary.upper, previous, 0);\n        invertInPlace(summary.lower, previous, 0);\n        for (int i = 0; i < summary.summaryPoints.length; i++) {\n            checkArgument(summary.measure[i].length == scale.length, \"only applies to blocks\");\n            invertInPlace(summary.summaryPoints[i], previous, 0);\n            for (int j = 0; j < scale.length; j++) {\n                summary.measure[i][j] *= (float) scale[j];\n            }\n        }\n        return summary;\n    }\n\n    public TimedRangeVector invertForecastRange(RangeVector ranges, long lastTimeStamp, double[] delta,\n            boolean useExpected, long expectedTimeStamp) {\n        int baseDimension = inputLength + (mode == ForestMode.TIME_AUGMENTED ? 1 : 0);\n        checkArgument(ranges.values.length % baseDimension == 0, \" incorrect length of ranges\");\n        int horizon = ranges.values.length / baseDimension;\n\n        double[] correction = copyIfNotnull(delta);\n        int gap = (int) (internalTimeStamp - lastTimeStamp);\n        if (correction != null) {\n            double decay = max(getTransformDecay(), 1.0 / (3 * shingleSize));\n            double factor = exp(-gap * decay);\n            for (int i = 0; i < correction.length; i++) {\n                correction[i] *= factor;\n            }\n        } else {\n            correction = new double[baseDimension];\n        }\n        long localTimeStamp = previousTimeStamps[shingleSize - 1];\n\n        TimedRangeVector timedRangeVector;\n        if (mode != ForestMode.TIME_AUGMENTED) {\n            timedRangeVector = new TimedRangeVector(ranges, horizon);\n            // Note that STREAMING_IMPUTE we are already using the time values\n            // to fill in values -- moreover such missing values can be large in number\n            // predicting next timestamps in the future in such a scenario would correspond\n            // to a joint prediction and TIME_AUGMENTED mode may be more suitable.\n            // therefore for STREAMING_IMPUTE the timestamps values are not predicted\n            if (mode != ForestMode.STREAMING_IMPUTE) {\n                double timeGap = getTimeDrift();\n                double timeBound = 1.3 * getTimeGapDifference();\n\n                for (int i = 0; i < horizon; i++) {\n                    timedRangeVector.timeStamps[i] = inverseMapTimeValue(timeGap, localTimeStamp);\n                    timedRangeVector.upperTimeStamps[i] = max(timedRangeVector.timeStamps[i],\n                            inverseMapTimeValue(timeGap + timeBound, localTimeStamp));\n                    timedRangeVector.lowerTimeStamps[i] = min(timedRangeVector.timeStamps[i],\n                            inverseMapTimeValue(max(0, timeGap - timeBound), localTimeStamp));\n                    localTimeStamp = timedRangeVector.timeStamps[i];\n                }\n            }\n        } else {\n            if (useExpected && gap == 1) {\n                localTimeStamp = expectedTimeStamp;\n            }\n            timedRangeVector = new TimedRangeVector(inputLength * horizon, horizon);\n            for (int i = 0; i < horizon; i++) {\n                for (int j = 0; j < inputLength; j++) {\n                    timedRangeVector.rangeVector.values[i * inputLength + j] = ranges.values[i * baseDimension + j];\n                    timedRangeVector.rangeVector.upper[i * inputLength + j] = ranges.upper[i * baseDimension + j];\n                    timedRangeVector.rangeVector.lower[i * inputLength + j] = ranges.lower[i * baseDimension + j];\n                }\n                timedRangeVector.timeStamps[i] = inverseMapTimeValue(\n                        max(ranges.values[i * baseDimension + inputLength], 0), localTimeStamp);\n                timedRangeVector.upperTimeStamps[i] = max(timedRangeVector.timeStamps[i],\n                        inverseMapTimeValue(max(ranges.upper[i * baseDimension + inputLength], 0), localTimeStamp));\n                timedRangeVector.lowerTimeStamps[i] = min(timedRangeVector.timeStamps[i],\n                        inverseMapTimeValue(max(ranges.lower[i * baseDimension + inputLength], 0), localTimeStamp));\n                localTimeStamp = timedRangeVector.upperTimeStamps[i];\n            }\n        }\n        // the following is the post-anomaly transformation, can be impacted by\n        // anomalies\n        transformer.invertForecastRange(timedRangeVector.rangeVector, inputLength, getShingledInput(shingleSize - 1),\n                correction);\n        return timedRangeVector;\n    }\n\n    /**\n     * given an input produces a scaled transform to be used in the forest\n     *\n     * @param input             the actual input seen\n     * @param timestamp         timestamp of said input\n     * @param defaults          default statistics, potentially used in\n     *                          initialization\n     * @param defaultTimeFactor default time statistic\n     * @return a scaled/transformed input which can be used in the forest\n     */\n    protected float[] getScaledInput(double[] input, long timestamp, Deviation[] defaults, double defaultTimeFactor) {\n        double[] previous = (input.length == lastShingledInput.length) ? lastShingledInput\n                : getShingledInput(shingleSize - 1);\n        float[] scaledInput = transformer.transformValues(internalTimeStamp, input, previous, defaults, clipFactor);\n        if (mode == ForestMode.TIME_AUGMENTED) {\n            scaledInput = augmentTime(scaledInput, timestamp, defaultTimeFactor);\n        }\n        return scaledInput;\n    }\n\n    /**\n     * updates the various shingles\n     * \n     * @param inputPoint  the input point\n     * @param scaledPoint the scaled/transformed point which is used in the RCF\n     */\n\n    protected void updateShingle(double[] inputPoint, float[] scaledPoint) {\n        if (inputPoint.length == lastShingledInput.length) {\n            lastShingledInput = Arrays.copyOf(inputPoint, inputPoint.length);\n        } else {\n            shiftLeft(lastShingledInput, inputPoint.length);\n            copyAtEnd(lastShingledInput, inputPoint);\n        }\n        if (scaledPoint.length == lastShingledPoint.length) {\n            lastShingledPoint = Arrays.copyOf(scaledPoint, scaledPoint.length);\n        } else {\n            shiftLeft(lastShingledPoint, scaledPoint.length);\n            copyAtEnd(lastShingledPoint, scaledPoint);\n        }\n    }\n\n    /**\n     * updates timestamps\n     * \n     * @param timestamp the timestamp of the current input\n     */\n    protected void updateTimestamps(long timestamp) {\n        for (int i = 0; i < shingleSize - 1; i++) {\n            previousTimeStamps[i] = previousTimeStamps[i + 1];\n        }\n        previousTimeStamps[shingleSize - 1] = timestamp;\n        ++internalTimeStamp;\n    }\n\n    protected void updateTimeStampDeviations(long timestamp, long previous) {\n\n        timeStampDeviations[0].update(timestamp);\n        timeStampDeviations[1].update(timestamp - previous);\n        // smoothing - not used currently\n        timeStampDeviations[2].update(timeStampDeviations[0].getDeviation());\n        timeStampDeviations[3].update(timeStampDeviations[1].getMean());\n        timeStampDeviations[4].update(timeStampDeviations[1].getDeviation());\n    }\n\n    double getTimeScale() {\n        return 1.0 + getTimeGapDifference();\n    }\n\n    double getTimeGapDifference() {\n        return Math.abs(timeStampDeviations[4].getMean());\n    }\n\n    double getTimeShift() {\n        return timeStampDeviations[1].getMean();\n    }\n\n    double getTimeDrift() {\n        return timeStampDeviations[3].getMean();\n    }\n\n    /**\n     * updates the state of the preprocessor\n     * \n     * @param inputPoint    the actual input\n     * @param scaledInput   the transformed input\n     * @param timestamp     the timestamp of the input\n     * @param previous      the previous timestamp\n     * @param missingValues missing values (if any) in range 0..(inputLength-1)\n     */\n    protected void updateState(double[] inputPoint, float[] scaledInput, long timestamp, long previous,\n            int[] missingValues) {\n        // timestamp cannot be missing for an update\n        updateTimeStampDeviations(timestamp, previous);\n        updateTimestamps(timestamp);\n        double[] previousInput = (inputLength == lastShingledInput.length) ? lastShingledInput\n                : getShingledInput(shingleSize - 1);\n        transformer.updateDeviation(inputPoint, previousInput, missingValues);\n        updateShingle(inputPoint, scaledInput);\n    }\n\n    /**\n     * copies at the end for a shingle\n     * \n     * @param array shingled array\n     * @param small new small array\n     */\n    public static void copyAtEnd(double[] array, double[] small) {\n        checkArgument(array.length >= small.length, \" incorrect operation \");\n        System.arraycopy(small, 0, array, array.length - small.length, small.length);\n    }\n\n    public static void copyAtEnd(float[] array, float[] small) {\n        checkArgument(array.length >= small.length, \" incorrect operation \");\n        System.arraycopy(small, 0, array, array.length - small.length, small.length);\n    }\n\n    // a utility function\n    protected static double[] copyIfNotnull(double[] array) {\n        return array == null ? null : Arrays.copyOf(array, array.length);\n    }\n\n    protected static float[] copyIfNotnull(float[] array) {\n        return array == null ? null : Arrays.copyOf(array, array.length);\n    }\n\n    // left shifting used for the shingles\n    public static void shiftLeft(double[] array, int baseDimension) {\n        for (int i = 0; i < array.length - baseDimension; i++) {\n            array[i] = array[i + baseDimension];\n        }\n    }\n\n    public static void shiftLeft(float[] array, int baseDimension) {\n        for (int i = 0; i < array.length - baseDimension; i++) {\n            array[i] = array[i + baseDimension];\n        }\n    }\n\n    /**\n     * maps a value shifted to the current mean or to a relative space for time\n     *\n     * @return the normalized value\n     */\n    protected double normalize(double value, double factor) {\n        double currentFactor = (factor != 0) ? factor : getTimeScale();\n        if (value - getTimeShift() >= NORMALIZATION_SCALING_FACTOR * clipFactor\n                * (currentFactor + DEFAULT_NORMALIZATION_PRECISION)) {\n            return clipFactor;\n        }\n        if (value - getTimeShift() <= -NORMALIZATION_SCALING_FACTOR * clipFactor\n                * (currentFactor + DEFAULT_NORMALIZATION_PRECISION)) {\n            return -clipFactor;\n        } else {\n            // deviation cannot be 0\n            return (value - getTimeShift())\n                    / (NORMALIZATION_SCALING_FACTOR * (currentFactor + DEFAULT_NORMALIZATION_PRECISION));\n        }\n    }\n\n    /**\n     * augments (potentially normalized) input with time (which is always\n     * differenced)\n     *\n     * @param normalized (potentially normalized) input point\n     * @param timestamp  timestamp of current point\n     * @param timeFactor a factor used in normalizing time\n     * @return a tuple with one extra field\n     */\n    protected float[] augmentTime(float[] normalized, long timestamp, double timeFactor) {\n        float[] scaledInput = new float[normalized.length + 1];\n        System.arraycopy(normalized, 0, scaledInput, 0, normalized.length);\n        if (valuesSeen <= 1) {\n            scaledInput[normalized.length] = 0;\n        } else {\n            double timeShift = timestamp - previousTimeStamps[shingleSize - 1];\n            scaledInput[normalized.length] = (float) (weightTime\n                    * ((normalizeTime) ? normalize(timeShift, timeFactor) : timeShift));\n        }\n        return scaledInput;\n    }\n\n    // mapper\n    public long[] getInitialTimeStamps() {\n        return (initialTimeStamps == null) ? null : Arrays.copyOf(initialTimeStamps, initialTimeStamps.length);\n    }\n\n    // mapper\n    public void setInitialTimeStamps(long[] values) {\n        initialTimeStamps = (values == null) ? null : Arrays.copyOf(values, values.length);\n    }\n\n    // mapper\n    public double[][] getInitialValues() {\n        if (initialValues == null) {\n            return null;\n        } else {\n            double[][] result = new double[initialValues.length][];\n            for (int i = 0; i < initialValues.length; i++) {\n                result[i] = copyIfNotnull(initialValues[i]);\n            }\n            return result;\n        }\n    }\n\n    // mapper\n    public void setInitialValues(double[][] values) {\n        if (values == null) {\n            initialValues = null;\n        } else {\n            initialValues = new double[values.length][];\n            for (int i = 0; i < values.length; i++) {\n                initialValues[i] = copyIfNotnull(values[i]);\n            }\n        }\n    }\n\n    // mapper\n    public double[] getLastShingledInput() {\n        return copyIfNotnull(lastShingledInput);\n    }\n\n    // mapper\n    public void setLastShingledInput(double[] point) {\n        lastShingledInput = copyIfNotnull(point);\n    }\n\n    // mapper\n    public void setPreviousTimeStamps(long[] values) {\n        checkArgument(values.length == shingleSize, \" incorrect length \");\n        previousTimeStamps = Arrays.copyOf(values, values.length);\n        numberOfImputed = 0;\n        for (int i = 0; i < previousTimeStamps.length - 1; i++) {\n            if (previousTimeStamps[i] == previousTimeStamps[i + 1]) {\n                ++numberOfImputed;\n            }\n        }\n    }\n\n    // mapper\n    public Deviation[] getTimeStampDeviations() {\n        return timeStampDeviations;\n    }\n\n    // mapper\n    public long[] getPreviousTimeStamps() {\n        return Arrays.copyOf(previousTimeStamps, previousTimeStamps.length);\n    }\n\n    public Deviation[] getDeviationList() {\n        return transformer.getDeviations();\n    }\n\n    public double getTransformDecay() {\n        return transformDecay;\n    }\n\n    /**\n     * used in mapper; augments weightTime to the weights array to produce a single\n     * array of length inputLength + 1\n     */\n    public double[] getWeights() {\n        double[] basic = transformer.getWeights();\n        double[] answer = new double[inputLength + 1];\n        System.arraycopy(basic, 0, answer, 0, inputLength);\n        answer[inputLength] = weightTime;\n        return answer;\n    }\n\n    // mapper\n    public double[] getDefaultFill() {\n        return copyIfNotnull(defaultFill);\n    }\n\n    // mapper\n    public void setDefaultFill(double[] values) {\n        checkArgument(values.length == inputLength, \"incorrect length defaults\");\n        defaultFill = copyIfNotnull(values);\n    }\n\n    // mapper\n    public long getTimeStamp(int index) {\n        return previousTimeStamps[index];\n    }\n\n    /**\n     * @return a new builder.\n     */\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        protected int dimensions;\n        protected int startNormalization = DEFAULT_START_NORMALIZATION;\n        protected Integer stopNormalization = DEFAULT_STOP_NORMALIZATION;\n        protected double transformDecay;\n        protected Optional<Long> randomSeed = Optional.empty();\n        protected int shingleSize = DEFAULT_SHINGLE_SIZE;\n        protected double anomalyRate = 0.01;\n        protected TransformMethod transformMethod = TransformMethod.NONE;\n        protected ImputationMethod imputationMethod = PREVIOUS;\n        protected ForestMode forestMode = ForestMode.STANDARD;\n        protected int inputLength;\n        protected boolean normalizeTime = false;\n        protected double[] fillValues = null;\n        protected double[] weights = null;\n        protected double[] initialShingledInput = null;\n        protected float[] initialPoint = null;\n        protected double weightTime = 1.0;\n        protected Optional<Double> useImputedFraction = Optional.empty();\n        protected Optional<Deviation[]> deviations = Optional.empty();\n        protected Optional<Deviation[]> timeDeviations = Optional.empty();\n        protected Optional<Deviation[]> dataQuality = Optional.empty();\n        protected boolean fastForward = false;\n\n        public Preprocessor build() {\n            if (forestMode == ForestMode.STREAMING_IMPUTE) {\n                return new ImputePreprocessor(this);\n            } else if (requireInitialSegment(normalizeTime, transformMethod, forestMode, imputationMethod)) {\n                return new InitialSegmentPreprocessor(this);\n            }\n            return new Preprocessor(this);\n        }\n\n        public T dimensions(int dimensions) {\n            this.dimensions = dimensions;\n            return (T) this;\n        }\n\n        public T inputLength(int inputLength) {\n            this.inputLength = inputLength;\n            return (T) this;\n        }\n\n        public T startNormalization(int startNormalization) {\n            this.startNormalization = startNormalization;\n            return (T) this;\n        }\n\n        public T stopNormalization(Integer stopNormalization) {\n            this.stopNormalization = stopNormalization;\n            return (T) this;\n        }\n\n        public T shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return (T) this;\n        }\n\n        public T transformDecay(double transformDecay) {\n            this.transformDecay = transformDecay;\n            return (T) this;\n        }\n\n        public T useImputedFraction(double fraction) {\n            this.useImputedFraction = Optional.of(fraction);\n            return (T) this;\n        }\n\n        public T randomSeed(long randomSeed) {\n            this.randomSeed = Optional.of(randomSeed);\n            return (T) this;\n        }\n\n        public T imputationMethod(ImputationMethod imputationMethod) {\n            this.imputationMethod = imputationMethod;\n            return (T) this;\n        }\n\n        public T fillValues(double[] values) {\n            // values can be null\n            this.fillValues = (values == null) ? null : Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T weights(double[] values) {\n            // values can be null\n            this.weights = (values == null) ? null : Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T weightTime(double value) {\n            this.weightTime = value;\n            return (T) this;\n        }\n\n        public T normalizeTime(boolean normalizeTime) {\n            this.normalizeTime = normalizeTime;\n            return (T) this;\n        }\n\n        public T transformMethod(TransformMethod method) {\n            this.transformMethod = method;\n            return (T) this;\n        }\n\n        public T forestMode(ForestMode forestMode) {\n            this.forestMode = forestMode;\n            return (T) this;\n        }\n\n        // mapper\n        public T deviations(Deviation[] deviations) {\n            this.deviations = Optional.ofNullable(deviations);\n            return (T) this;\n        }\n\n        // mapper\n        public T dataQuality(Deviation[] dataQuality) {\n            this.dataQuality = Optional.ofNullable(dataQuality);\n            return (T) this;\n        }\n\n        // mapper\n        public T timeDeviations(Deviation[] timeDeviations) {\n            this.timeDeviations = Optional.ofNullable(timeDeviations);\n            return (T) this;\n        }\n\n        public T initialShingledInput(double[] initialShingledInput) {\n            this.initialShingledInput = copyIfNotnull(initialShingledInput);\n            return (T) this;\n        }\n\n        public T initialPoint(float[] initialPoint) {\n            this.initialPoint = copyIfNotnull(initialPoint);\n            return (T) this;\n        }\n\n        public T fastForward(boolean fastForward) {\n            this.fastForward = fastForward;\n            return (T) this;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/DifferenceTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class DifferenceTransformer extends WeightedTransformer {\n\n    public DifferenceTransformer(double[] weights, Deviation[] deviation) {\n        super(weights, deviation);\n    }\n\n    @Override\n    public void invertInPlace(float[] values, double[] previousInput) {\n        checkArgument(values.length >= previousInput.length, \"have to be at least previous\");\n        super.invertInPlace(values, previousInput);\n        for (int i = 0; i < previousInput.length; i++) {\n            double output = values[i] + previousInput[i];\n            values[i] = (float) output;\n        }\n    }\n\n    /**\n     * inverts a forecast (and upper and lower limits) provided by RangeVector range\n     * the values are scaled by the factor used in the transformation for each\n     * iteration; and the resulting value is added back as an inverse of the\n     * differencing operation.\n     * \n     * @param ranges        provides p50 values with upper and lower estimates\n     * @param baseDimension the number of variables being forecast (often 1)\n     * @param previousInput the last input of length baseDimension\n     */\n    @Override\n    public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,\n            double[] correction) {\n        int inputLength = weights.length;\n        int horizon = ranges.values.length / baseDimension;\n        double[] last = Arrays.copyOf(previousInput, previousInput.length);\n        checkArgument(correction.length >= inputLength, \" incorrect length \");\n        for (int i = 0; i < horizon; i++) {\n            for (int j = 0; j < inputLength; j++) {\n                float weight = (weights[j] == 0) ? 0f : 1.0f / (float) weights[j];\n                ranges.scale(i * baseDimension + j, weight);\n                ranges.shift(i * baseDimension + j, (float) (getShift(j, deviations) + last[j]));\n                last[j] = ranges.values[j];\n            }\n        }\n    }\n\n    @Override\n    public float[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,\n            Deviation[] initials, double clipFactor) {\n\n        double[] input = new double[inputPoint.length];\n        for (int i = 0; i < input.length; i++) {\n            input[i] = (internalTimeStamp == 0) ? 0 : (inputPoint[i] - previousInput[i]);\n        }\n        return super.transformValues(internalTimeStamp, input, null, initials, clipFactor);\n    }\n\n    @Override\n    public double[] getShift(double[] previous) {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = getShift(i, deviations) + previous[i];\n        }\n        return answer;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/ITransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n/**\n * ThresholdedRCF allows transformers that transform the data in a streaming\n * manner; invoke RCF on the transformed data; and invert the results to the\n * original input space. A typical examples are differencing,\n * (streaming/stochastic) normalization, etc.\n *\n * This interface class spells out the operations required from such\n * transformers. Some operations below are specific to the existing\n * implementation and required by the mappers to produce state classes.\n */\n\npublic interface ITransformer {\n\n    // required by the mapper; this corresponds to providing each input\n    // column/attribute a weight\n    // different from 1.0 -- changing these weights can alter the RCF predictions\n    // significantly\n    // these weights should be informed by the domain and the intent of the overall\n    // computation\n\n    double[] getWeights();\n\n    // reverse of the above, used in mappers\n\n    void setWeights(double[] weights);\n\n    // used in mappers stores basic discounted averages and discounted (single step)\n    // differenced average\n\n    Deviation[] getDeviations();\n\n    // If the RCF expects values described by values[] corresponding to the\n    // correspondingInput[]\n    // then what should be alternative input that would have been transformed into\n    // values[]; the transformation is done in place\n\n    void invertInPlace(float[] values, double[] previousInput);\n\n    // similar to invert() but applies to a forecast provided by RangeVector over an\n    // input length (number of variables in a multivariate analysis) baseDimension\n    // and\n    // previousInput[] corresponds to the last observed values of those input.\n    // correction is the effect of last anomaly\n\n    void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput, double[] correction);\n\n    // update the internal data structures based on the current (multivariate) input\n    // inputPoint\n    // previousInput[] is the corresponding values of the last observed values\n    // missing values are in 0..(inputLength-1)\n\n    void updateDeviation(double[] inputPoint, double[] previousInput, int[] missingValues);\n\n    // transforms inputPoint[] to RCF space, non-null values of initials[] are\n    // used in normalization\n    // and are specific to this implementation, internalStamp corresponds to the\n    // sequence number of the\n    // input and clipFactor is a parameter that clips any normalization\n\n    float[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput, Deviation[] initials,\n            double clipFactor);\n\n    // used for converting RCF representations to actuals, used in\n    // predictor-corrector\n    double[] getShift(double[] previous);\n\n    // used for converting RCF representations to actuals, used in\n    // predictor-corrector\n    double[] getScale();\n\n    // used for computing errors in RCFcaster before the model is calibrated\n    double[] getSmoothedDeviations();\n\n    // used for determining noise\n    double[] getSmoothedDifferenceDeviations();\n}"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/NormalizedDifferenceTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class NormalizedDifferenceTransformer extends NormalizedTransformer {\n\n    public NormalizedDifferenceTransformer(double[] weights, Deviation[] deviation) {\n        super(weights, deviation);\n    }\n\n    @Override\n    public void invertInPlace(float[] values, double[] previousInput) {\n        checkArgument(values.length >= previousInput.length, \"have to be at least as much as input\");\n        super.invertInPlace(values, previousInput);\n        for (int i = 0; i < previousInput.length; i++) {\n            double output = values[i] + previousInput[i];\n            values[i] = (float) output;\n        }\n    }\n\n    /**\n     * inverts a forecast (and upper and lower limits) provided by RangeVector range\n     * the values are scaled by the factor used in the transformation note that the\n     * expected difference maintained in deviation[j + inputLength] is added for\n     * each attribute j, once for each iteration; and the resulting value is added\n     * back as an inverse of the differencing operation.\n     * \n     * @param ranges        provides p50 values with upper and lower estimates\n     * @param baseDimension the number of variables being forecast (often 1)\n     * @param previousInput the last input of length baseDimension\n     */\n    @Override\n    public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,\n            double[] correction) {\n\n        int inputLength = weights.length;\n        int horizon = ranges.values.length / baseDimension;\n        double[] last = Arrays.copyOf(previousInput, previousInput.length);\n        checkArgument(correction.length >= inputLength, \" incorrect length \");\n        for (int i = 0; i < horizon; i++) {\n            for (int j = 0; j < inputLength; j++) {\n                double weight = (weights[j] == 0) ? 0 : getScale(j, deviations) / weights[j];\n                ranges.scale(i * baseDimension + j, (float) weight);\n                double shift = last[j] + getShift(j, deviations);\n                ranges.shift(i * baseDimension + j, (float) shift);\n                last[j] = ranges.values[i * baseDimension + j];\n            }\n        }\n    }\n\n    /**\n     * a transformation that differences and then normalizes the results of\n     * multivariate values\n     * \n     * @param internalTimeStamp timestamp corresponding to this operation; used to\n     *                          ensure smoothness at 0\n     * @param inputPoint        the actual input\n     * @param previousInput     the previous input\n     * @param initials          an array containing normalization statistics, used\n     *                          only for the initial segment; otherwise it is null\n     * @param clipFactor        the factor used in clipping the normalized values\n     * @return the transformed values to be shingled and used in RCF\n     */\n    @Override\n    public float[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,\n            Deviation[] initials, double clipFactor) {\n        double[] input = new double[inputPoint.length];\n        for (int i = 0; i < input.length; i++) {\n            input[i] = (internalTimeStamp == 0) ? 0 : inputPoint[i] - previousInput[i];\n        }\n        return super.transformValues(internalTimeStamp, input, null, initials, clipFactor);\n    }\n\n    @Override\n    public double[] getShift(double[] previous) {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = getShift(i, deviations) + previous[i];\n        }\n        return answer;\n    }\n\n    @Override\n    protected double getShift(int i, Deviation[] devs) {\n        return devs[i + weights.length].getMean();\n    }\n\n    @Override\n    protected double getScale(int i, Deviation[] devs) {\n        return (devs[i + weights.length].getDeviation() + 1.0);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/NormalizedTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class NormalizedTransformer extends WeightedTransformer {\n\n    public NormalizedTransformer(double[] weights, Deviation[] deviation) {\n        super(weights, deviation);\n    }\n\n    protected double clipValue(double clipfactor) {\n        return clipfactor;\n    }\n\n    protected double getScale(int i, Deviation[] devs) {\n        return (Math.abs(devs[i + 2 * weights.length].getMean()) + 1.0);\n    }\n\n    protected double getShift(int i, Deviation[] devs) {\n        return devs[i].getMean();\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/SubtractMATransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class SubtractMATransformer extends WeightedTransformer {\n\n    public SubtractMATransformer(double[] weights, Deviation[] deviations) {\n        super(weights, deviations);\n    }\n\n    @Override\n    protected double getShift(int i, Deviation[] devs) {\n        return devs[i].getMean();\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/preprocessor/transform/WeightedTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n/**\n * A weighted transformer maintains several data structures ( currently 3X) that\n * measure discounted averages and the corresponding standard deviations. for\n * input length X. The element i corresponds to discounted average of the\n * variable i, element (X+i) corresponds to the discounted average of the single\n * step differences of the same variable i, and element (2X+i) corresponds to\n * difference of variable i and the dicounted average, to capture second order\n * differences These quantities together can help answer a number of estimation\n * questions of a time series, and in particular help solve for simple linear\n * drifts. Even though the discounted averages are not obviously required --\n * they are useful in forecasts.\n *\n */\n@Getter\n@Setter\npublic class WeightedTransformer implements ITransformer {\n\n    public static int NUMBER_OF_STATS = 5;\n\n    double[] weights;\n\n    Deviation[] deviations;\n\n    public WeightedTransformer(double[] weights, Deviation[] deviations) {\n        checkArgument(NUMBER_OF_STATS * weights.length == deviations.length, \"incorrect lengths\");\n        this.weights = Arrays.copyOf(weights, weights.length);\n        this.deviations = new Deviation[deviations.length];\n        for (int i = 0; i < deviations.length; i++) {\n            checkArgument(deviations[i] != null, \"cannot be null\");\n            this.deviations[i] = deviations[i].copy();\n        }\n    }\n\n    /**\n     * the inversion does not require previousInput; note that weight == 0, would\n     * produce 0 values in the inversion\n     *\n     * @param values        what the RCF would like to observe\n     * @param previousInput what was the real (or previously imputed) observation\n     */\n    @Override\n    public void invertInPlace(float[] values, double[] previousInput) {\n        double output = 0;\n        int length = min(weights.length, values.length);\n        for (int i = 0; i < length; i++) {\n            output = (weights[i] == 0) ? 0 : values[i] * getScale(i, deviations) / weights[i];\n            output += getShift(i, deviations);\n            values[i] = (float) output;\n        }\n    }\n\n    /**\n     * inverts a forecast (and upper and lower limits) provided by RangeVector range\n     * note that the expected difference maintained in deviation[j + inputLength] is\n     * added for each attribute j\n     * \n     * @param ranges        provides p50 values with upper and lower estimates\n     * @param baseDimension the number of variables being forecast (often 1)\n     * @param previousInput the last input of length baseDimension\n     * @param correction    correction due to last anomaly updates the RangeVector\n     *                      to the inverse transform and applies correction\n     */\n    public void invertForecastRange(RangeVector ranges, int baseDimension, double[] previousInput,\n            double[] correction) {\n        int horizon = ranges.values.length / baseDimension;\n        int inputLength = weights.length;\n        checkArgument(correction.length >= inputLength, \" incorrect length \");\n        for (int i = 0; i < horizon; i++) {\n            for (int j = 0; j < inputLength; j++) {\n                double weight = (weights[j] == 0) ? 0 : getScale(j, deviations) / weights[j];\n                ranges.scale(i * baseDimension + j, (float) weight);\n                ranges.shift(i * baseDimension + j, (float) (getShift(j, deviations) + i * getDrift(j, deviations)));\n            }\n        }\n    }\n\n    /**\n     * updates the 3*inputPoint.length statistics; the statistic i corresponds to\n     * discounted average of variable i and statistic i + inputPoint.length\n     * corresponds to the discounted average single step difference\n     * \n     * @param inputPoint    the input\n     * @param previousInput the previous input\n     * @param missingValues any missing values (in range 0..(inputLength-1))\n     */\n    public void updateDeviation(double[] inputPoint, double[] previousInput, int[] missingValues) {\n        checkArgument(inputPoint.length * NUMBER_OF_STATS == deviations.length, \"incorrect lengths\");\n        checkArgument(inputPoint.length == previousInput.length, \" lengths must match\");\n        boolean[] missing = new boolean[inputPoint.length];\n        Arrays.fill(missing, false);\n        if (missingValues != null) {\n            for (int index : missingValues) {\n                missing[index] = true;\n            }\n        }\n        for (int i = 0; i < inputPoint.length; i++) {\n            if (!missing[i]) {\n                deviations[i].update(inputPoint[i]);\n                deviations[i + 2 * inputPoint.length].update(deviations[i].getDeviation());\n            }\n            // the differenced quantities have to be updated\n            if (deviations[i + inputPoint.length].getCount() == 0) {\n                deviations[i + inputPoint.length].update(0);\n            } else {\n                deviations[i + inputPoint.length].update(inputPoint[i] - previousInput[i]);\n            }\n            deviations[i + 3 * inputPoint.length].update(deviations[i + inputPoint.length].getMean());\n            deviations[i + 4 * inputPoint.length].update(deviations[i + inputPoint.length].getDeviation());\n        }\n    }\n\n    /**\n     * a normalization function\n     *\n     * @param value      argument to be normalized\n     * @param shift      the shift in the value\n     * @param scale      the scaling factor\n     * @param clipFactor the output value is bound is in [-clipFactor,clipFactor]\n     * @return the normalized value\n     */\n    protected double normalize(double value, double shift, double scale, double clipFactor) {\n        checkArgument(scale > 0, \" should be non-negative\");\n        double t = (value - shift) / (scale);\n        if (t >= clipFactor) {\n            return clipFactor;\n        }\n        if (t < -clipFactor) {\n            return -clipFactor;\n        }\n        return t;\n    }\n\n    /**\n     * a transformation that normalizes the multivariate values\n     *\n     * @param internalTimeStamp timestamp corresponding to this operation; used to\n     *                          ensure smoothness at 0\n     * @param inputPoint        the actual input\n     * @param previousInput     the previous input\n     * @param initials          an array containing normalization statistics, used\n     *                          only for the initial segment; otherwise it is null\n     * @param clipFactor        the factor used in clipping the normalized values\n     * @return the transformed values to be shingled and used in RCF\n     */\n    @Override\n    public float[] transformValues(int internalTimeStamp, double[] inputPoint, double[] previousInput,\n            Deviation[] initials, double clipFactor) {\n        float[] output = new float[inputPoint.length];\n        for (int i = 0; i < inputPoint.length; i++) {\n            Deviation[] devs = (initials == null) ? deviations : initials;\n            output[i] = (float) (weights[i]\n                    * normalize(inputPoint[i], getShift(i, devs), getScale(i, devs), clipValue(clipFactor)));\n        }\n        return output;\n    }\n\n    protected double clipValue(double clipfactor) {\n        return Double.MAX_VALUE;\n    }\n\n    public Deviation[] getDeviations() {\n        Deviation[] answer = new Deviation[deviations.length];\n        for (int i = 0; i < deviations.length; i++) {\n            answer[i] = deviations[i].copy();\n        }\n        return answer;\n    }\n\n    public double[] getWeights() {\n        return Arrays.copyOf(weights, weights.length);\n    }\n\n    public void setWeights(double[] weights) {\n        checkArgument(weights.length == this.weights.length, \" incorrect length\");\n        this.weights = Arrays.copyOf(weights, weights.length);\n    }\n\n    protected double getScale(int i, Deviation[] devs) {\n        return (1.0);\n    }\n\n    protected double getShift(int i, Deviation[] devs) {\n        return 0;\n    }\n\n    protected double getDrift(int i, Deviation[] devs) {\n        return devs[i + 3 * weights.length].getMean();\n    }\n\n    @Override\n    public double[] getScale() {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = (weights[i] == 0) ? 0 : getScale(i, deviations) / weights[i];\n        }\n        return answer;\n    }\n\n    @Override\n    public double[] getShift(double[] previous) {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = getShift(i, deviations);\n        }\n        return answer;\n    }\n\n    public double[] getSmoothedDeviations() {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = Math.abs(deviations[i + 2 * weights.length].getMean());\n        }\n        return answer;\n    }\n\n    public double[] getSmoothedDifferenceDeviations() {\n        double[] answer = new double[weights.length];\n        for (int i = 0; i < weights.length; i++) {\n            answer[i] = Math.abs(deviations[i + 4 * weights.length].getMean());\n        }\n        return answer;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/ConditionalTreeSample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport java.util.ArrayList;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.stream.Collector;\n\nimport com.amazon.randomcutforest.tree.BoundingBox;\n\npublic class ConditionalTreeSample {\n\n    /**\n     * the index of the point in the PointStore which is used to construct the\n     * sample for a query\n     */\n    public int pointStoreIndex;\n\n    /**\n     * the bounding box in the tree of the node which is the parent of the point\n     * used to construct the sample Note that the bounding box is in the projective\n     * space defined by the tree\n     */\n    protected BoundingBox parentOfLeafBox;\n\n    /**\n     * L1 distance of the sampled point (in the projective space of the tree) L1\n     * distancce is chosen since the entire notion of RCF is oriented towards L1\n     * sampling\n     */\n\n    public double distance;\n\n    /**\n     * the point in the tree corresponding to the sample\n     */\n\n    public float[] leafPoint;\n\n    /**\n     * weight of the point ; useful for deduplication -- this can also be resued if\n     * trees are assigned weights\n     */\n    public double weight;\n\n    public ConditionalTreeSample(int pointStoreIndex, BoundingBox box, double distance, float[] leafPoint) {\n        this.pointStoreIndex = pointStoreIndex;\n        this.parentOfLeafBox = box;\n        this.distance = distance;\n        this.leafPoint = leafPoint;\n        this.weight = 1.0;\n    }\n\n    public static Collector<ConditionalTreeSample, ArrayList<ConditionalTreeSample>, ArrayList<ConditionalTreeSample>> collector = Collector\n            .of(ArrayList::new, ArrayList::add, (left, right) -> {\n                left.addAll(right);\n                return left;\n            }, list -> list);\n    // the collector specifically does not try to sort/dedup since we could (and\n    // would) be running the\n    // collector in a parallel mode\n\n    public static List<ConditionalTreeSample> dedup(List<ConditionalTreeSample> list) {\n        list.sort(Comparator.comparingInt(o -> o.pointStoreIndex));\n        List<ConditionalTreeSample> newList = new ArrayList<>();\n        newList.add(list.get(0));\n        for (int j = 1; j < list.size(); j++) {\n            if (list.get(j).pointStoreIndex == newList.get(newList.size() - 1).pointStoreIndex) {\n                newList.get(newList.size() - 1).weight += list.get(j).weight;\n            } else {\n                newList.add(list.get(j));\n            }\n        }\n        return newList;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/ConvergingAccumulator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\n/**\n * An accumulator which can be used to short-circuit the number of trees visited\n * if the responses from the trees seen so far appear to be converging to a\n * value. for an example\n *\n * @param <R> The result type being accumulated.\n * @see com.amazon.randomcutforest.RandomCutForest\n */\npublic interface ConvergingAccumulator<R> {\n    /**\n     * Add a new result value to this accumulator.\n     *\n     * @param value A single result value which should be accumulated together with\n     *              other results.\n     */\n    void accept(R value);\n\n    /**\n     * @return 'true' if the accumulator has converged and we can stop accepting new\n     *         values, 'false' otherwise.\n     */\n    boolean isConverged();\n\n    /**\n     * @return the number of values that have been accepted by this accumulator.\n     */\n    int getValuesAccepted();\n\n    /**\n     * @return the accumulated.\n     */\n    R getAccumulatedValue();\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/DensityOutput.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\n/**\n * DensityOutput extends InterpolationMeasure with methods for computing density\n * estimates.\n */\npublic class DensityOutput extends InterpolationMeasure {\n\n    /**\n     * Default scaling factor (q) to use in the getDensity method.\n     */\n    public static final double DEFAULT_SUM_OF_POINTS_SCALING_FACTOR = 0.001;\n\n    /**\n     * Create a new DensityOutput object with the given number of spatial\n     * dimensions. Note that the number of half-dimensions will be 2 * dimensions.\n     *\n     * @param dimensions The number of spatial dimensions.\n     * @param sampleSize The samplesize of each tree in forest, which may be used\n     *                   for normalization.\n     */\n    public DensityOutput(int dimensions, int sampleSize) {\n        super(dimensions, sampleSize);\n    }\n\n    /**\n     * A copy constructor that creates a deep copy of the base DensityOutput.\n     *\n     * @param base An InterpolationMeasure instance that we want to copy.\n     */\n    public DensityOutput(InterpolationMeasure base) {\n        super(base);\n    }\n\n    /**\n     * Compute a scalar density estimate. The scaling factor q is multiplied by the\n     * sum of points measure and added to the denominator in the density expression\n     * to prevent divide-by-0 errors.\n     *\n     * @param q                 A scaling factor applied to the sum of points in the\n     *                          measure.\n     * @param manifoldDimension The number of dimensions of the submanifold on which\n     *                          we are estimating a density.\n     * @return a scalar density estimate.\n     */\n    public double getDensity(double q, int manifoldDimension) {\n        double sumOfPts = measure.getHighLowSum() / sampleSize;\n\n        if (sumOfPts <= 0.0) {\n            return 0.0;\n        }\n\n        double sumOfFactors = 0;\n\n        for (int i = 0; i < dimensions; i++) {\n            double t = probMass.getHighLowSum(i) > 0 ? distances.getHighLowSum(i) / probMass.getHighLowSum(i) : 0;\n            if (t > 0) {\n                t = Math.exp(Math.log(t) * manifoldDimension) * probMass.getHighLowSum(i);\n            }\n            sumOfFactors += t;\n        }\n\n        return sumOfPts / (q * sumOfPts + sumOfFactors);\n    }\n\n    /**\n     * Compute a scalar density estimate. This method uses the default scaling\n     * factor and the full number of dimensions.\n     *\n     * @return a scalar density estimate.\n     */\n    public double getDensity() {\n        return getDensity(DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);\n    }\n\n    /**\n     * Compute a directional density estimate. The scaling factor q is multiplied by\n     * the sum of points measure and added to the denominator in the density\n     * expression to prevent divide-by-0 errors.\n     *\n     * @param q                 A scaling factor applied to the sum of points in the\n     *                          measure.\n     * @param manifoldDimension The number of dimensions of the submanifold on which\n     *                          we are estimating a density.\n     * @return a directional density estimate.\n     */\n    public DiVector getDirectionalDensity(double q, int manifoldDimension) {\n        double density = getDensity(q, manifoldDimension);\n        double sumOfPts = measure.getHighLowSum(); // normalization not performed since this would be used in a ratio\n        DiVector factors = new DiVector(super.getDimensions());\n\n        if (sumOfPts > 0) {\n            factors = measure.scale(density / sumOfPts);\n        }\n\n        return factors;\n    }\n\n    /**\n     * Compute a directional density estimate. This method uses the default scaling\n     * factor and the full number of dimensions.\n     *\n     * @return a scalar density estimate.\n     */\n    public DiVector getDirectionalDensity() {\n        return getDirectionalDensity(DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/DiVector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\n\nimport java.util.Arrays;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;\n\n/**\n * A DiVector is used when we want to track a quantity in both the positive and\n * negative directions for each dimension in a manifold. For example, when using\n * a {@link AnomalyAttributionVisitor} to compute the attribution of the anomaly\n * score to dimension of the input point, we want to know if the anomaly score\n * attributed to the ith coordinate of the input point is due to that coordinate\n * being unusually high or unusually low.\n */\npublic class DiVector {\n\n    /**\n     * An array of values corresponding to the positive direction in each dimension.\n     */\n    public final double[] high;\n    /**\n     * An array of values corresponding to the negative direction in each dimension.\n     */\n    public final double[] low;\n    private final int dimensions;\n\n    /**\n     * Construct a new DiVector with the given number of spatial dimensions. In the\n     * result, {@link #high} and {@link #low} will each contain this many variates.\n     *\n     * @param dimensions The number of dimensions of data to store.\n     */\n    public DiVector(int dimensions) {\n        checkArgument(dimensions > 0, \"dimensions must be greater than 0\");\n        this.dimensions = dimensions;\n        high = new double[dimensions];\n        low = new double[dimensions];\n    }\n\n    /**\n     * Construct a new DiVector with the given number of spatial dimensions. In the\n     * result, {@link #high} and {@link #low} will each contain this many variates.\n     *\n     * @param high the high vector\n     * @param low  the low vector.\n     */\n    public DiVector(double[] high, double[] low) {\n        checkArgument(high.length == low.length, \"dimensions must be equal\");\n        this.dimensions = high.length;\n        this.high = Arrays.copyOf(high, high.length);\n        this.low = Arrays.copyOf(low, low.length);\n    }\n\n    /**\n     * Create a deep copy of the base DiVector.\n     *\n     * @param base The DiVector to copy.\n     */\n    public DiVector(DiVector base) {\n        this.dimensions = base.dimensions;\n        high = Arrays.copyOf(base.high, dimensions);\n        low = Arrays.copyOf(base.low, dimensions);\n    }\n\n    /**\n     * Add the values of {@link #high} and {@link #low} from the right vector to the\n     * left vector and return the left vector. This method is used to accumulate\n     * DiVector results.\n     *\n     * @param left  The DiVector we are modifying. After calling this method, the\n     *              low and high values in the DiVector will contain a sum of the\n     *              previous values and the corresponding values from the right\n     *              vector.\n     * @param right A DiVector that we want to add to the left vector. This DiVector\n     *              is not modified by the method.\n     * @return the modified left vector.\n     */\n    public static DiVector addToLeft(DiVector left, DiVector right) {\n        checkNotNull(left, \"left must not be null\");\n        checkNotNull(right, \"right must not be null\");\n        checkArgument(left.dimensions == right.dimensions, \"dimensions must be the same\");\n\n        for (int i = 0; i < left.dimensions; i++) {\n            left.high[i] += right.high[i];\n            left.low[i] += right.low[i];\n        }\n\n        return left;\n    }\n\n    /**\n     * @return the number of spatial dimensions of this DiVector.\n     */\n    public int getDimensions() {\n        return dimensions;\n    }\n\n    /**\n     * Return a new DiVector where each value in high and low is equal to z times\n     * the corresponding value in this DiVector.\n     *\n     * @param z The scaling factor.\n     * @return a new DiVector where each value in high and low is equal to z times\n     *         the corresponding value in this DiVector.\n     */\n    public DiVector scale(double z) {\n        DiVector result = new DiVector(dimensions);\n        for (int i = 0; i < dimensions; i++) {\n            result.high[i] = high[i] * z;\n            result.low[i] = low[i] * z;\n        }\n        return result;\n    }\n\n    /**\n     * If the L1 norm of this DiVector is positive, scale the values in high and low\n     * so that the new L1 norm is equal to the target value. If the current L1 norm\n     * is 0, do nothing.\n     *\n     * @param targetNorm The target L1 norm value.\n     */\n    public void renormalize(double targetNorm) {\n        double norm = getHighLowSum();\n        if (norm > 0) {\n            double scaleFactor = targetNorm / norm;\n            for (int i = 0; i < dimensions; i++) {\n                high[i] = high[i] * scaleFactor;\n                low[i] = low[i] * scaleFactor;\n            }\n        }\n    }\n\n    /**\n     * Apply the given function to each component of DiVector. That is, each entry\n     * of both the high and low arrays is transformed using this function.\n     *\n     * @param function A function to apply to every entry of the high and low arrays\n     *                 in this DiVector.\n     */\n    public void componentwiseTransform(Function<Double, Double> function) {\n        for (int i = 0; i < dimensions; i++) {\n            high[i] = function.apply(high[i]);\n            low[i] = function.apply(low[i]);\n        }\n    }\n\n    /**\n     * Return the sum of high and low in the ith coordinate.\n     *\n     * @param i A coordinate index\n     * @return the sum of high and low in the ith coordinate.\n     */\n    public double getHighLowSum(int i) {\n        return high[i] + low[i];\n    }\n\n    /**\n     * @return the sum of all values in the high and low arrays.\n     */\n    public double getHighLowSum() {\n        double score = 0.0;\n        for (int i = 0; i < dimensions; i++) {\n            score += high[i] + low[i];\n        }\n        return score;\n    }\n\n    public DiVector lift(Function<double[], double[]> projection) {\n        return new DiVector(projection.apply(high), projection.apply(low));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/InterpolationMeasure.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static java.lang.Math.round;\n\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\n/**\n * An InterpolationMeasure is used by\n * {@link com.amazon.randomcutforest.interpolation.SimpleInterpolationVisitor}\n * to store certain geometric quantities during a tree traversal.\n */\npublic class InterpolationMeasure {\n\n    public final DiVector measure;\n    public final DiVector distances;\n    public final DiVector probMass;\n    protected final int dimensions;\n    protected int sampleSize;\n\n    /**\n     * Create a new InterpolationMeasure object with the given number of spatial\n     * dimensions. Note that the number of half-dimensions will be 2 * dimensions.\n     *\n     * @param dimensions The number of spatial dimensions.\n     * @param sampleSize The samplesize of each tree in forest, which may be used\n     *                   for normalization.\n     */\n    public InterpolationMeasure(int dimensions, int sampleSize) {\n        checkArgument(dimensions > 0, \"dimensions must be greater than 0\");\n        this.sampleSize = sampleSize;\n        this.dimensions = dimensions;\n        measure = new DiVector(dimensions);\n        distances = new DiVector(dimensions);\n        probMass = new DiVector(dimensions);\n    }\n\n    /**\n     * A copy constructor that creates a deep copy of the base InterpolationMeasure.\n     *\n     * @param base An InterpolationMeasure instance that we want to copy.\n     */\n    public InterpolationMeasure(InterpolationMeasure base) {\n        this.sampleSize = base.sampleSize;\n        this.dimensions = base.dimensions;\n        measure = new DiVector(base.measure);\n        distances = new DiVector(base.distances);\n        probMass = new DiVector(base.probMass);\n    }\n\n    protected InterpolationMeasure(int sampleSize, DiVector measure, DiVector distances, DiVector probMass) {\n\n        checkArgument(measure.getDimensions() == distances.getDimensions(),\n                \"measure.getDimensions() should be equal to distances.getDimensions()\");\n        checkArgument(measure.getDimensions() == probMass.getDimensions(),\n                \"measure.getDimensions() should be equal to probMass.getDimensions()\");\n\n        this.sampleSize = sampleSize;\n        this.dimensions = measure.getDimensions();\n        this.measure = measure;\n        this.distances = distances;\n        this.probMass = probMass;\n    }\n\n    /**\n     * Add the values of {@link #measure}, {@link #distances}, and {@link #probMass}\n     * from the right InterpolationMeasure to the left InterpolationMeasure and\n     * return the left InterpolationMeasure. This method is used to accumulate\n     * InterpolationMeasure results.\n     *\n     * @param left  The InterpolationMeasure we are modifying. After calling this\n     *              method, fields in this InterpolationMeasure will contain a sum\n     *              of the previous values and the corresponding values from the\n     *              right InterpolationMeasure.\n     * @param right An InterpolationMeasure that we want to add to the left vector.\n     *              This InterpolationMeasure is not modified by the method.\n     * @return the modified left vector.\n     */\n    public static InterpolationMeasure addToLeft(InterpolationMeasure left, InterpolationMeasure right) {\n        checkNotNull(left, \"left must not be null\");\n        checkNotNull(right, \"right must not be null\");\n        checkArgument(left.dimensions == right.dimensions, \"dimensions must be the same\");\n        left.sampleSize += right.sampleSize;\n        DiVector.addToLeft(left.distances, right.distances);\n        DiVector.addToLeft(left.measure, right.measure);\n        DiVector.addToLeft(left.probMass, right.probMass);\n\n        return left;\n    }\n\n    /**\n     * Return a {@link Collector} which can be used to the collect many\n     * InterpolationMeasure results into a single, final result.\n     *\n     * @param dimensions    The number of spatial dimensions in the\n     *                      InterpolationMeasures being collected.\n     * @param sampleSize    The sample size of the Random Cut Trees that were\n     *                      measured.\n     * @param numberOfTrees The number of trees whose measures we are collecting\n     *                      into a final result. This value is used for scaling.\n     * @return an interpolation measure containing the aggregated, scaled result.\n     */\n    public static Collector<InterpolationMeasure, InterpolationMeasure, InterpolationMeasure> collector(int dimensions,\n            int sampleSize, int numberOfTrees) {\n        return Collector.of(() -> new InterpolationMeasure(dimensions, sampleSize), InterpolationMeasure::addToLeft,\n                InterpolationMeasure::addToLeft, result -> result.scale(1.0 / numberOfTrees));\n    }\n\n    /**\n     * @return the number of spatial dimensions in this InterpolationMeasure.\n     */\n    public int getDimensions() {\n        return dimensions;\n    }\n\n    /**\n     * @return the sample size of the Random Cut Tree that we are measuring.\n     */\n    public int getSampleSize() {\n        return sampleSize;\n    }\n\n    /**\n     * Return a new InterpolationMeasure will all values scaled by the given factor.\n     *\n     * @param z The scale factor.\n     * @return a new InterpolationMeasure will all values scaled by the given\n     *         factor.\n     */\n    public InterpolationMeasure scale(double z) {\n        return new InterpolationMeasure((int) round(sampleSize * z), measure.scale(z), distances.scale(z),\n                probMass.scale(z));\n    }\n\n    public InterpolationMeasure lift(Function<double[], double[]> projection) {\n        return new InterpolationMeasure(sampleSize, measure.lift(projection), distances.lift(projection),\n                probMass.lift(projection));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/Neighbor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.Set;\nimport java.util.function.BiConsumer;\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.function.Supplier;\nimport java.util.stream.Collector;\n\n/**\n * A Neighbor represents a point together with a distance, where the distance is\n * with respect to some query point. That is, we think of this point as being a\n * neighbor of the query point. If the feature is enabled in the forest, a\n * Neighbor will also contain a set of sequence indexes containing the times\n * this point was added to the forest.\n */\npublic class Neighbor {\n\n    /**\n     * The neighbor point.\n     */\n    public final float[] point;\n\n    /**\n     * The distance between the neighbor point and the query point it was created\n     * from.\n     */\n    public final double distance;\n\n    /**\n     * A list of sequence indexes corresponding to the times when this neighbor\n     * point was added to the forest. If sequence indexes are not enabled for the\n     * forest, then this list will be empty.\n     */\n    public final List<Long> sequenceIndexes;\n\n    public int count;\n\n    /**\n     * Create a new Neighbor.\n     *\n     * @param point           The neighbor point.\n     * @param distance        The distance between the neighbor point and the query\n     *                        point was created from.\n     * @param sequenceIndexes A list of sequence indexes corresponding to the times\n     *                        when this neighbor point was added to the forest.\n     * @param count           The number of copies\n     */\n    public Neighbor(float[] point, double distance, List<Long> sequenceIndexes, int count) {\n        this.point = point;\n        this.distance = distance;\n        this.sequenceIndexes = sequenceIndexes;\n        this.count = count;\n    }\n\n    public Neighbor(float[] point, double distance, List<Long> sequenceIndexes) {\n        this(point, distance, sequenceIndexes, 1);\n    }\n\n    /**\n     * Get Neighbor collector which merges duplicate Neighbors and sorts them in\n     * ascending order of distance\n     *\n     * @return Neighbor collector\n     */\n    public static Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> collector() {\n        return new CollectorImpl();\n    }\n\n    /**\n     * Merge sequence indexes of other Neighbor to itself\n     *\n     * @param other other Neighbor whose sequenceIndexes need to be merged\n     */\n    private void mergeSequenceIndexes(Neighbor other) {\n        this.sequenceIndexes.addAll(other.sequenceIndexes);\n        this.count += other.count;\n    }\n\n    /**\n     * Get hash code for the Point associated with object\n     *\n     * @return hash code for the Point\n     */\n    private int getHashCodeForPoint() {\n        return Arrays.hashCode(point);\n    }\n\n    private static class CollectorImpl\n            implements Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> {\n\n        @Override\n        public Supplier<Map<Integer, Neighbor>> supplier() {\n            return HashMap::new;\n        }\n\n        @Override\n        public BiConsumer<Map<Integer, Neighbor>, Optional<Neighbor>> accumulator() {\n            return (neighborsMap, neighborOptional) -> {\n                if (neighborOptional.isPresent()) {\n                    mergeNeighborIfNeededAndPut(neighborsMap, neighborOptional.get());\n                }\n            };\n        }\n\n        @Override\n        public BinaryOperator<Map<Integer, Neighbor>> combiner() {\n            return (left, right) -> {\n                right.forEach((k, v) -> mergeNeighborIfNeededAndPut(left, v));\n                return left;\n            };\n        }\n\n        @Override\n        public Function<Map<Integer, Neighbor>, List<Neighbor>> finisher() {\n            return map -> {\n                List<Neighbor> combinedResult = new ArrayList<>();\n                map.forEach((k, v) -> {\n                    v.sequenceIndexes.sort(Long::compareTo);\n                    combinedResult.add(v);\n                });\n                Comparator<Neighbor> comparator = Comparator.comparingDouble(n -> n.distance);\n                combinedResult.sort(comparator);\n                return combinedResult;\n            };\n        }\n\n        @Override\n        public Set<Characteristics> characteristics() {\n            return Collections.emptySet();\n        }\n\n        private void mergeNeighborIfNeededAndPut(Map<Integer, Neighbor> neighborsMap, Neighbor currentNeighbor) {\n            Neighbor existingNeighbor = neighborsMap.get(currentNeighbor.getHashCodeForPoint());\n            if (existingNeighbor != null) {\n                existingNeighbor.mergeSequenceIndexes(currentNeighbor);\n            } else {\n                neighborsMap.put(currentNeighbor.getHashCodeForPoint(), currentNeighbor);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDiVectorAccumulator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\n/**\n * A converging accumulator using a one-sided standard deviation tests. The\n * accumulator tests the sum of entries (i.e., the \"high-low sum\") in the\n * submitted DiVectors for convergence and returns the sum of all submitted\n * DiVectors.\n */\npublic class OneSidedConvergingDiVectorAccumulator extends OneSidedStDevAccumulator<DiVector> {\n\n    /**\n     * Create a new converging accumulator that uses a one-sided standard deviation\n     * test.\n     *\n     * @param dimensions        The number of dimensions in the DiVectors being\n     *                          accumulated.\n     * @param highIsCritical    Set to 'true' if we care more about high values of\n     *                          the converging scalar than low values. Set to\n     *                          'false' if the opposite is true.\n     * @param precision         The number of witnesses required before declaring\n     *                          convergence will be at least 1.0 / precision.\n     * @param minValuesAccepted The user-specified minimum number of values visited\n     *                          before returning a result. Note that\n     *                          {@link #isConverged()} may return true before\n     *                          accepting this number of results if the\n     * @param maxValuesAccepted The maximum number of values that will be accepted\n     *                          by this accumulator.\n     */\n    public OneSidedConvergingDiVectorAccumulator(int dimensions, boolean highIsCritical, double precision,\n            int minValuesAccepted, int maxValuesAccepted) {\n        super(highIsCritical, precision, minValuesAccepted, maxValuesAccepted);\n        accumulatedValue = new DiVector(dimensions);\n    }\n\n    /**\n     * Compute the \"high-low sum\" for the given DiVector.\n     *\n     * @param result A new result DiVector computed by a Random Cut Tree.\n     * @return the \"high-low sum\" for the given DiVector.\n     */\n    @Override\n    protected double getConvergingValue(DiVector result) {\n        return result.getHighLowSum();\n    }\n\n    /**\n     * Add the DiVector to the aggregate DiVector in this accumulator.\n     *\n     * @param result The new result to add to the accumulated value.\n     */\n    @Override\n    protected void accumulateValue(DiVector result) {\n        DiVector.addToLeft(accumulatedValue, result);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDoubleAccumulator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\n/**\n * A converging accumulator using a one-sided standard deviation tests. The\n * accumulator tests the submitted values for convergence and returns the sum of\n * all submitted values.\n */\npublic class OneSidedConvergingDoubleAccumulator extends OneSidedStDevAccumulator<Double> {\n\n    /**\n     * Create a new converging accumulator that uses a one-sided standard deviation\n     * test.\n     *\n     * @param highIsCritical    Set to 'true' if we care more about high values of\n     *                          the converging scalar than low values. Set to\n     *                          'false' if the opposite is true.\n     * @param precision         The number of witnesses required before declaring\n     *                          convergence will be at least 1.0 / precision.\n     * @param minValuesAccepted The user-specified minimum number of values visited\n     *                          before returning a result. Note that\n     *                          {@link #isConverged()} may return true before\n     *                          accepting this number of results if the\n     * @param maxValuesAccepted The maximum number of values that will be accepted\n     *                          by this accumulator.\n     */\n    public OneSidedConvergingDoubleAccumulator(boolean highIsCritical, double precision, int minValuesAccepted,\n            int maxValuesAccepted) {\n        super(highIsCritical, precision, minValuesAccepted, maxValuesAccepted);\n        accumulatedValue = 0.0;\n    }\n\n    /**\n     * We are testing for convergence directly on the submitted double values, hence\n     * we just return the argument as-is.\n     *\n     * @param result A new result value computed by a Random Cut Tree.\n     * @return the result value.\n     */\n    @Override\n    protected double getConvergingValue(Double result) {\n        return result;\n    }\n\n    /**\n     * Add the result to the sum of result values.\n     *\n     * @param result The new result to add to the accumulated value.\n     */\n    @Override\n    protected void accumulateValue(Double result) {\n        accumulatedValue += result;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/OneSidedStDevAccumulator.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static java.lang.Math.max;\n\n/**\n * This accumulator checks to see if a result is converging by testing the\n * sample mean and standard deviation of a scalar value computed from the\n * result. As the name implies, the accumulator performs a one-sided check,\n * comparing the new value the current sample mean and updating its converged\n * status only if the difference is positive (or negative, if highIsCritical is\n * set to false. This accumulator is intended to be used with values where we\n * care more about outliers in one direction. For example, if our statistic is\n * anomaly score, we are normally more concerned with high anomaly scores than\n * low ones.\n *\n * @param <R> The type of the value being accumulated.\n */\npublic abstract class OneSidedStDevAccumulator<R> implements ConvergingAccumulator<R> {\n\n    /**\n     * When testing for convergence, we use ALPHA times the sample standard\n     * deviation to define our interval.\n     */\n    private static final double ALPHA = 0.5;\n    /**\n     * The minimum number of values that have to be accepted by this accumulator\n     * before we start testing for convergence.\n     */\n    private final int minValuesAccepted;\n    /**\n     * The number of witnesses needed to declare convergence.\n     */\n    private final int convergenceThreshold;\n    /**\n     * Set to 'true' if we care more about high values of the converging scalar than\n     * low values. Set to 'false' if the opposite is true.\n     */\n    private final boolean highIsCritical;\n    /**\n     * This value is +1 if highIsCritical is 'true', and -1 if highIsCritical is\n     * fault. It is used in the converegence test.\n     */\n    private final int sign;\n    /**\n     * The value accumulated until now.\n     */\n    protected R accumulatedValue;\n    /**\n     * The number of values accepted by this accumulator until now.\n     */\n    private int valuesAccepted;\n    /**\n     * The number of values that are 'witnesses' to convergence until now. See\n     * {@link #accept}.\n     */\n    private int witnesses;\n    /**\n     * The current sum of the converging scalar value. Used to compute the sample\n     * mean.\n     */\n    private double sumConvergeVal;\n    /**\n     * The current sum of squares of the converging scalar value. Used to compute\n     * the sample standard deviation.\n     */\n    private double sumSqConvergeVal;\n\n    /**\n     * Create a new converging accumulator that uses a one-sided standard deviation\n     * test.\n     *\n     * @param highIsCritical    Set to 'true' if we care more about high values of\n     *                          the converging scalar than low values. Set to\n     *                          'false' if the opposite is true.\n     * @param precision         The number of witnesses required before declaring\n     *                          convergence will be at least 1.0 / precision.\n     * @param minValuesAccepted The user-specified minimum number of values visited\n     *                          before returning a result. Note that\n     *                          {@link #isConverged()} may return true before\n     *                          accepting this number of results if the\n     * @param maxValuesAccepted The maximum number of values that will be accepted\n     *                          by this accumulator.\n     */\n    public OneSidedStDevAccumulator(boolean highIsCritical, double precision, int minValuesAccepted,\n            int maxValuesAccepted) {\n\n        this.highIsCritical = highIsCritical;\n        this.convergenceThreshold = precision < 1.0 / maxValuesAccepted ? maxValuesAccepted : (int) (1.0 / precision);\n        this.minValuesAccepted = Math.min(minValuesAccepted, maxValuesAccepted);\n        valuesAccepted = 0;\n        witnesses = 0;\n        sumConvergeVal = 0.0;\n        sumSqConvergeVal = 0.0;\n        sign = highIsCritical ? 1 : -1;\n        accumulatedValue = null;\n    }\n\n    /**\n     * Given a new result value, add it to the accumulated value and update\n     * convergence statistics.\n     *\n     * @param result The new value being accumulated.\n     */\n    @Override\n    public void accept(R result) {\n        accumulateValue(result);\n        double value = getConvergingValue(result);\n        sumConvergeVal += value;\n        sumSqConvergeVal += value * value;\n        valuesAccepted++;\n\n        if (valuesAccepted >= minValuesAccepted) {\n            // note that using the last seen value in the deviation dampens its effect\n\n            // floating point comparisons!\n            if (sign * (value - getMean()) + 1e-6 > ALPHA * getDeviation()) {\n                witnesses++;\n            }\n        }\n    }\n\n    /**\n     * @return the number of values accepted until now.\n     */\n    @Override\n    public int getValuesAccepted() {\n        return valuesAccepted;\n    }\n\n    /**\n     * @return 'true' if the accumulated value has converged, 'false' otherwise.\n     */\n    @Override\n    public boolean isConverged() {\n        return witnesses >= convergenceThreshold;\n    }\n\n    /**\n     * @return the accumulated value.\n     */\n    @Override\n    public R getAccumulatedValue() {\n        return accumulatedValue;\n    }\n\n    /**\n     * Given a new result value, compute its converging scalar value.\n     *\n     * @param result A new result value computed by a Random Cut Tree.\n     * @return the scalar value used to measure convergence for this result type.\n     */\n    protected abstract double getConvergingValue(R result);\n\n    /**\n     * Add the new result to the accumulated value.\n     *\n     * @param result The new result to add to the accumulated value.\n     */\n    protected abstract void accumulateValue(R result);\n\n    /**\n     * Return the number of witnesses\n     */\n    public int getWitnesses() {\n        return witnesses;\n    }\n\n    /**\n     * @return the mean of the values\n     */\n    public double getMean() {\n        return (valuesAccepted == 0) ? 0 : sumConvergeVal / valuesAccepted;\n    }\n\n    /**\n     * it is possible that valuesAccepted is not large hence applying Bessel\n     * correction\n     * \n     * @return the standard deviation of the accepted values\n     */\n    public double getDeviation() {\n        if (valuesAccepted <= 1) {\n            return 0;\n        }\n\n        double mean = sumConvergeVal / valuesAccepted;\n        double stdev = max(0, sumSqConvergeVal / valuesAccepted - mean * mean);\n\n        stdev = Math.sqrt(valuesAccepted * stdev / (valuesAccepted - 1));\n        return stdev;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/RangeVector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\n\n/**\n * A RangeVector is used when we want to track a quantity and its upper and\n * lower bounds\n */\npublic class RangeVector {\n\n    public final float[] values;\n\n    /**\n     * An array of values corresponding to the upper ranges in each dimension.\n     */\n    public final float[] upper;\n    /**\n     * An array of values corresponding to the lower ranges in each dimension\n     */\n    public final float[] lower;\n\n    public RangeVector(int dimensions) {\n        checkArgument(dimensions > 0, \"dimensions must be greater than 0\");\n        values = new float[dimensions];\n        upper = new float[dimensions];\n        lower = new float[dimensions];\n    }\n\n    /**\n     * Construct a new RangeVector with the given number of spatial dimensions.\n     * \n     * @param values the values being estimated in a range\n     * @param upper  the higher values of the ranges\n     * @param lower  the lower values in the ranges\n     */\n    public RangeVector(float[] values, float[] upper, float[] lower) {\n        checkArgument(values.length > 0, \" dimensions must be > 0\");\n        checkArgument(values.length == upper.length && upper.length == lower.length, \"dimensions must be equal\");\n        for (int i = 0; i < values.length; i++) {\n            checkArgument(upper[i] >= values[i] && values[i] >= lower[i], \"incorrect semantics\");\n        }\n        this.values = Arrays.copyOf(values, values.length);\n        this.upper = Arrays.copyOf(upper, upper.length);\n        this.lower = Arrays.copyOf(lower, lower.length);\n    }\n\n    public RangeVector(float[] values) {\n        checkArgument(values.length > 0, \"dimensions must be > 0 \");\n        this.values = Arrays.copyOf(values, values.length);\n        this.upper = Arrays.copyOf(values, values.length);\n        this.lower = Arrays.copyOf(values, values.length);\n    }\n\n    /**\n     * Create a deep copy of the base RangeVector.\n     *\n     * @param base The RangeVector to copy.\n     */\n    public RangeVector(RangeVector base) {\n        int dimensions = base.values.length;\n        this.values = Arrays.copyOf(base.values, dimensions);\n        this.upper = Arrays.copyOf(base.upper, dimensions);\n        this.lower = Arrays.copyOf(base.lower, dimensions);\n    }\n\n    public void shift(int i, float shift) {\n        checkArgument(i >= 0 && i < values.length, \"incorrect index\");\n        values[i] += shift;\n        // managing precision\n        upper[i] = max(values[i], upper[i] + shift);\n        lower[i] = min(values[i], lower[i] + shift);\n    }\n\n    public void scale(int i, float weight) {\n        checkArgument(i >= 0 && i < values.length, \"incorrect index\");\n        checkArgument(weight >= 0, \" negative weight not permitted\");\n        values[i] = values[i] * weight;\n        // managing precision\n        upper[i] = max(upper[i] * weight, values[i]);\n        lower[i] = min(lower[i] * weight, values[i]);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/SampleSummary.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.util.Weighted.prefixPick;\nimport static java.lang.Math.max;\nimport static java.util.stream.Collectors.toCollection;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class SampleSummary {\n\n    public static double DEFAULT_PERCENTILE = 0.9;\n\n    /**\n     * a collection of summarized points (reminiscent of typical sets from the\n     * perspective of information theory, Cover and Thomas, Chapter 3) which should\n     * be the mean/median of a spatially continuous distribution with central\n     * tendency. If the input is a collection of samples that correspond to an union\n     * of two such well separated distributions, for example as in the example data\n     * of RCF paper then the output should be the two corresponding central points.\n     */\n    public float[][] summaryPoints;\n\n    /**\n     * a measure of comparison among the typical points;\n     */\n    public float[] relativeWeight;\n\n    // some measure of the extent of the points\n    public float[][] measure;\n\n    /**\n     * number of samples, often the number of summary\n     */\n    public double weightOfSamples;\n    /**\n     * the global mean\n     */\n    public float[] mean;\n\n    public float[] median;\n\n    /**\n     * This is the global deviation, without any filtering on the TreeSamples\n     */\n    public float[] deviation;\n\n    /**\n     * an upper percentile corresponding to the points, computed agnostic to\n     * dimension\n     */\n    public float[] upper;\n\n    /**\n     * a lower percentile corresponding to the points\n     */\n    public float[] lower;\n\n    public SampleSummary(int dimensions) {\n        this.weightOfSamples = 0;\n        this.summaryPoints = new float[1][];\n        this.summaryPoints[0] = new float[dimensions];\n        this.relativeWeight = new float[1];\n        this.measure = new float[1][];\n        this.measure[0] = new float[dimensions];\n        this.median = new float[dimensions];\n        this.mean = new float[dimensions];\n        this.deviation = new float[dimensions];\n        this.upper = new float[dimensions];\n        this.lower = new float[dimensions];\n    }\n\n    // for older tests\n    public SampleSummary(float[] point) {\n        this(toDoubleArray(point), 1.0f);\n    }\n\n    public SampleSummary(double[] point, float weight) {\n        this(point.length);\n        this.weightOfSamples = weight;\n        this.summaryPoints[0] = toFloatArray(point);\n        this.relativeWeight[0] = weight;\n        this.measure[0] = new float[point.length];\n        System.arraycopy(this.summaryPoints[0], 0, this.median, 0, point.length);\n        System.arraycopy(this.summaryPoints[0], 0, this.mean, 0, point.length);\n        System.arraycopy(this.summaryPoints[0], 0, this.upper, 0, point.length);\n        System.arraycopy(this.summaryPoints[0], 0, this.lower, 0, point.length);\n    }\n\n    void addTypical(float[][] summaryPoints, float[] relativeWeight, float[][] measure) {\n        checkArgument(summaryPoints.length == relativeWeight.length, \"incorrect lengths of fields\");\n        checkArgument(summaryPoints.length == measure.length, \"incorrect lengths of fields\");\n        if (summaryPoints.length > 0) {\n            int dimension = summaryPoints[0].length;\n            this.summaryPoints = new float[summaryPoints.length][];\n            this.measure = new float[summaryPoints.length][];\n            for (int i = 0; i < summaryPoints.length; i++) {\n                checkArgument(dimension == summaryPoints[i].length, \" incorrect length points\");\n                checkArgument(dimension == measure[i].length, \" incorrect length points\");\n                this.summaryPoints[i] = Arrays.copyOf(summaryPoints[i], dimension);\n                this.measure[i] = Arrays.copyOf(measure[i], dimension);\n            }\n            this.relativeWeight = Arrays.copyOf(relativeWeight, relativeWeight.length);\n        }\n    }\n\n    public SampleSummary(List<Weighted<float[]>> points, SampleSummary clusters) {\n        this(points, DEFAULT_PERCENTILE);\n        this.addTypical(clusters.summaryPoints, clusters.relativeWeight, clusters.measure);\n    }\n\n    public SampleSummary(List<Weighted<float[]>> points, float[][] summaryPoints, float[] relativeWeight,\n            float[] measure, double percentile) {\n        this(points, percentile);\n        float[][] transformedMeasure = new float[measure.length][];\n        double factor = 0;\n        for (int j = 0; j < this.upper.length; j++) {\n            factor += max(0, this.upper[j] - this.lower[j]);\n        }\n        for (int i = 0; i < measure.length; i++) {\n            transformedMeasure[i] = new float[this.upper.length];\n            for (int j = 0; j < this.upper.length; j++) {\n                transformedMeasure[i][j] = (factor == 0) ? 0\n                        : (float) (max(0, this.upper[j] - this.lower[j]) * measure[i] / factor);\n            }\n        }\n        this.addTypical(summaryPoints, relativeWeight, transformedMeasure);\n    }\n\n    public SampleSummary(List<Weighted<float[]>> points, float[][] summaryPoints, float[] relativeWeight,\n            float[] measure) {\n        this(points, summaryPoints, relativeWeight, measure, DEFAULT_PERCENTILE);\n    }\n\n    public SampleSummary(List<Weighted<float[]>> points) {\n        this(points, DEFAULT_PERCENTILE);\n    }\n\n    /**\n     * constructs a summary of the weighted points based on the percentile envelopes\n     * by picking 1-percentile and percentile fractional rank of the items useful in\n     * surfacing a robust range of values\n     * \n     * @param points     weighted points\n     * @param percentile value corresponding to bounds\n     */\n    public SampleSummary(List<Weighted<float[]>> points, double percentile) {\n        checkArgument(points.size() > 0, \"point list cannot be empty\");\n        checkArgument(percentile > 0.5, \" has to be more than 0.5\");\n        checkArgument(percentile < 1.0, \"has to be less than 1\");\n        int dimension = points.get(0).index.length;\n        double[] coordinateSum = new double[dimension];\n        double[] coordinateSumSquare = new double[dimension];\n        double totalWeight = 0;\n        for (Weighted<float[]> e : points) {\n            checkArgument(e.index.length == dimension, \"points have to be of same length\");\n            float weight = e.weight;\n            checkArgument(!Float.isNaN(weight), \" weights must be non-NaN values \");\n            checkArgument(Float.isFinite(weight), \" weights must be finite \");\n            checkArgument(weight >= 0, \"weights have to be non-negative\");\n            totalWeight += weight;\n            for (int i = 0; i < dimension; i++) {\n                int index = i;\n                checkArgument(!Float.isNaN(e.index[i]),\n                        () -> \" improper input, in coordinate \" + index + \", must be non-NaN values\");\n                checkArgument(Float.isFinite(e.index[i]),\n                        () -> \" improper input, in coordinate \" + index + \", must be finite values\");\n                coordinateSum[i] += e.index[i] * weight;\n                coordinateSumSquare[i] += e.index[i] * e.index[i] * weight;\n            }\n        }\n        checkArgument(totalWeight > 0, \" weights cannot all be 0\");\n        this.weightOfSamples = totalWeight;\n        this.mean = new float[dimension];\n        this.deviation = new float[dimension];\n        this.median = new float[dimension];\n        this.upper = new float[dimension];\n        this.lower = new float[dimension];\n\n        for (int i = 0; i < dimension; i++) {\n            this.mean[i] = (float) (coordinateSum[i] / totalWeight);\n            this.deviation[i] = (float) Math.sqrt(max(0.0, coordinateSumSquare[i] / totalWeight - mean[i] * mean[i]));\n        }\n        for (int i = 0; i < dimension; i++) {\n            int index = i;\n            ArrayList<Weighted<Float>> list = points.stream().map(e -> new Weighted<>(e.index[index], e.weight))\n                    .collect(toCollection(ArrayList::new));\n            list.sort((o1, o2) -> Float.compare(o1.index, o2.index));\n            this.lower[i] = prefixPick(list, totalWeight * (1.0 - percentile)).index;\n            this.median[i] = prefixPick(list, totalWeight / 2.0).index;\n            this.upper[i] = prefixPick(list, totalWeight * percentile).index;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/returntypes/TimedRangeVector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\n\n/**\n * ThresholdedRandomCutForests handle time internally and thus the forecast of\n * values also correpond to the next sequential timestamps. The RangeVector\n * corresponds to the forecast from RCF (based on the inverse of the\n * transformation applied by TRCF as it invokes RCF). The timeStamps correspond\n * to the predicted timestamps The upper and lower ranges are also present\n * similar to RangeVector\n *\n * Note that if the timestamps cannot be predicted meaningfully (for example in\n * STREAMING_IMPUTE mode), then those entries would be 0\n */\npublic class TimedRangeVector {\n\n    public final RangeVector rangeVector;\n\n    public final long[] timeStamps;\n\n    public final long[] upperTimeStamps;\n\n    public final long[] lowerTimeStamps;\n\n    public TimedRangeVector(int dimensions, int horizon) {\n        checkArgument(dimensions > 0, \"dimensions must be greater than 0\");\n        checkArgument(horizon > 0, \"horizon must be greater than 0\");\n        checkArgument(dimensions % horizon == 0, \"horizon should divide dimensions\");\n        rangeVector = new RangeVector(dimensions);\n        timeStamps = new long[horizon];\n        upperTimeStamps = new long[horizon];\n        lowerTimeStamps = new long[horizon];\n    }\n\n    public TimedRangeVector(RangeVector rangeVector, long[] timestamps, long[] upperTimeStamps,\n            long[] lowerTimeStamps) {\n        checkArgument(rangeVector.values.length % timestamps.length == 0,\n                \" dimensions must be be divisible by horizon\");\n        checkArgument(timestamps.length == upperTimeStamps.length && upperTimeStamps.length == lowerTimeStamps.length,\n                \"horizon must be equal\");\n        this.rangeVector = new RangeVector(rangeVector);\n        for (int i = 0; i < timestamps.length; i++) {\n            checkArgument(upperTimeStamps[i] >= timestamps[i] && timestamps[i] >= lowerTimeStamps[i],\n                    \"incorrect semantics\");\n        }\n        this.timeStamps = Arrays.copyOf(timestamps, timestamps.length);\n        this.lowerTimeStamps = Arrays.copyOf(lowerTimeStamps, lowerTimeStamps.length);\n        this.upperTimeStamps = Arrays.copyOf(upperTimeStamps, upperTimeStamps.length);\n    }\n\n    public TimedRangeVector(TimedRangeVector base) {\n        this.rangeVector = new RangeVector(base.rangeVector);\n        this.timeStamps = Arrays.copyOf(base.timeStamps, base.timeStamps.length);\n        this.lowerTimeStamps = Arrays.copyOf(base.lowerTimeStamps, base.lowerTimeStamps.length);\n        this.upperTimeStamps = Arrays.copyOf(base.upperTimeStamps, base.upperTimeStamps.length);\n    }\n\n    /**\n     * Create a deep copy of the base RangeVector.\n     *\n     * @param base The RangeVector to copy.\n     */\n    public TimedRangeVector(RangeVector base, int horizon) {\n        checkArgument(base.values.length % horizon == 0, \"incorrect lengths\");\n        this.rangeVector = new RangeVector(base);\n        this.timeStamps = new long[horizon];\n        this.upperTimeStamps = new long[horizon];\n        this.lowerTimeStamps = new long[horizon];\n    }\n\n    public void shiftTime(int i, long shift) {\n        checkArgument(i >= 0 && i < timeStamps.length, \"incorrect index\");\n        timeStamps[i] += shift;\n        // managing precision\n        upperTimeStamps[i] = max(timeStamps[i], upperTimeStamps[i] + shift);\n        lowerTimeStamps[i] = min(timeStamps[i], lowerTimeStamps[i] + shift);\n    }\n\n    public void scaleTime(int i, double weight) {\n        checkArgument(i >= 0 && i < timeStamps.length, \"incorrect index\");\n        checkArgument(weight > 0, \" negative weight not permitted\");\n        timeStamps[i] = (long) (timeStamps[i] * weight);\n        // managing precision\n        upperTimeStamps[i] = max((long) (upperTimeStamps[i] * weight), timeStamps[i]);\n        lowerTimeStamps[i] = min((long) (lowerTimeStamps[i] * weight), timeStamps[i]);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/AnomalyAttributionRunner.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.nio.charset.StandardCharsets;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.returntypes.DiVector;\n\n/**\n * A command-line application that computes anomaly attribution. Points are read\n * from STDIN and output is written to STDOUT. Output consists of the original\n * input point with the anomaly attribution vector appended.\n */\npublic class AnomalyAttributionRunner extends SimpleRunner {\n\n    public AnomalyAttributionRunner() {\n        super(AnomalyAttributionRunner.class.getName(),\n                \"Compute directional anomaly scores from the input rows and append them to the output rows.\",\n                AnomalyAttributionTransformer::new);\n    }\n\n    public static void main(String... args) throws IOException {\n        AnomalyAttributionRunner runner = new AnomalyAttributionRunner();\n        runner.parse(args);\n        System.out.println(\"Reading from stdin... (Ctrl-c to exit)\");\n        runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),\n                new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));\n        System.out.println(\"Done.\");\n    }\n\n    public static class AnomalyAttributionTransformer implements LineTransformer {\n        private final RandomCutForest forest;\n\n        public AnomalyAttributionTransformer(RandomCutForest forest) {\n            this.forest = forest;\n        }\n\n        @Override\n        public List<String> getResultValues(double... point) {\n            DiVector attribution = forest.getAnomalyAttribution(point);\n            forest.update(point);\n\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < attribution.getDimensions(); i++) {\n                result.add(Double.toString(attribution.low[i]));\n                result.add(Double.toString(attribution.high[i]));\n            }\n\n            return result;\n        }\n\n        @Override\n        public List<String> getEmptyResultValue() {\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < 2 * forest.getDimensions(); i++) {\n                result.add(\"NA\");\n            }\n            return result;\n        }\n\n        @Override\n        public List<String> getResultColumnNames() {\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < forest.getDimensions(); i++) {\n                result.add(\"anomaly_low_\" + i);\n                result.add(\"anomaly_high_\" + i);\n            }\n\n            return result;\n        }\n\n        @Override\n        public RandomCutForest getForest() {\n            return forest;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/AnomalyScoreRunner.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Collections;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.RandomCutForest;\n\n/**\n * A command-line application that computes anomaly scores. Points are read from\n * STDIN and output is written to STDOUT. Output consists of the original input\n * point with the anomaly score appended.\n */\npublic class AnomalyScoreRunner extends SimpleRunner {\n\n    public AnomalyScoreRunner() {\n        super(AnomalyScoreRunner.class.getName(),\n                \"Compute scalar anomaly scores from the input rows and append them to the output rows.\",\n                AnomalyScoreTransformer::new);\n    }\n\n    public static void main(String... args) throws IOException {\n        AnomalyScoreRunner runner = new AnomalyScoreRunner();\n        runner.parse(args);\n        System.out.println(\"Reading from stdin... (Ctrl-c to exit)\");\n        runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),\n                new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));\n        System.out.println(\"Done.\");\n    }\n\n    public static class AnomalyScoreTransformer implements LineTransformer {\n        private final RandomCutForest forest;\n\n        public AnomalyScoreTransformer(RandomCutForest forest) {\n            this.forest = forest;\n        }\n\n        @Override\n        public List<String> getResultValues(double... point) {\n            double score = forest.getAnomalyScore(point);\n            forest.update(point);\n            return Collections.singletonList(Double.toString(score));\n        }\n\n        @Override\n        public List<String> getEmptyResultValue() {\n            return Collections.singletonList(\"NA\");\n        }\n\n        @Override\n        public List<String> getResultColumnNames() {\n            return Collections.singletonList(\"anomaly_score\");\n        }\n\n        @Override\n        public RandomCutForest getForest() {\n            return forest;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/ArgumentParser.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.function.Consumer;\nimport java.util.function.Function;\n\n/**\n * A utility class for parsing command-line arguments.\n */\npublic class ArgumentParser {\n\n    public static final String ARCHIVE_NAME = \"randomcutforest-core-1.0.jar\";\n    private final String runnerClass;\n    private final String runnerDescription;\n    private final Map<String, Argument<?>> shortFlags;\n    private final Map<String, Argument<?>> longFlags;\n    private final IntegerArgument numberOfTrees;\n    private final IntegerArgument sampleSize;\n    private final IntegerArgument windowSize;\n    private final IntegerArgument shingleSize;\n    private final BooleanArgument shingleCyclic;\n    private final StringArgument delimiter;\n    private final BooleanArgument headerRow;\n    private final IntegerArgument randomSeed;\n\n    /**\n     * Create a new ArgumentParser.The runner class and runner description will be\n     * used in help text.\n     * \n     * @param runnerClass       The name of the runner class where this argument\n     *                          parser is being invoked.\n     * @param runnerDescription A description of the runner class where this\n     *                          argument parser is being invoked.\n     */\n    public ArgumentParser(String runnerClass, String runnerDescription) {\n        this.runnerClass = runnerClass;\n        this.runnerDescription = runnerDescription;\n        shortFlags = new HashMap<>();\n        longFlags = new HashMap<>();\n\n        numberOfTrees = new IntegerArgument(\"-n\", \"--number-of-trees\", \"Number of trees to use in the forest.\", 100,\n                n -> checkArgument(n > 0, \"number of trees should be greater than 0\"));\n\n        addArgument(numberOfTrees);\n\n        sampleSize = new IntegerArgument(\"-s\", \"--sample-size\", \"Number of points to keep in sample for each tree.\",\n                256, n -> checkArgument(n > 0, \"sample size should be greater than 0\"));\n\n        addArgument(sampleSize);\n\n        windowSize = new IntegerArgument(\"-w\", \"--window-size\", \"Window size of the sample or 0 for no window.\", 0,\n                n -> checkArgument(n > 0, \"window size should be greater than 0\"));\n\n        addArgument(windowSize);\n\n        shingleSize = new IntegerArgument(\"-g\", \"--shingle-size\", \"Shingle size to use.\", 1,\n                n -> checkArgument(n > 0, \"shingle size should be greater than 0\"));\n\n        addArgument(shingleSize);\n\n        shingleCyclic = new BooleanArgument(\"-c\", \"--shingle-cyclic\",\n                \"Set to 'true' to use cyclic shingles instead of linear shingles.\", false);\n\n        addArgument(shingleCyclic);\n\n        delimiter = new StringArgument(\"-d\", \"--delimiter\", \"The character or string used as a field delimiter.\", \",\");\n\n        addArgument(delimiter);\n\n        headerRow = new BooleanArgument(null, \"--header-row\", \"Set to 'true' if the data contains a header row.\",\n                false);\n\n        addArgument(headerRow);\n\n        randomSeed = new IntegerArgument(null, \"--random-seed\", \"Random seed to use in the Random Cut Forest\", 42);\n\n        addArgument(randomSeed);\n    }\n\n    /**\n     * Add a new argument to this argument parser.\n     * \n     * @param argument An Argument instance for a command-line argument that should\n     *                 be parsed.\n     */\n    protected void addArgument(Argument<?> argument) {\n        checkNotNull(argument, \"argument should not be null\");\n\n        checkArgument(argument.getShortFlag() == null || !shortFlags.containsKey(argument.getShortFlag()),\n                String.format(\"An argument mapping already exists for %s\", argument.getShortFlag()));\n\n        checkArgument(!longFlags.containsKey(argument.getLongFlag()),\n                String.format(\"An argument mapping already exists for %s\", argument.getLongFlag()));\n\n        if (argument.getShortFlag() != null) {\n            shortFlags.put(argument.getShortFlag(), argument);\n        }\n\n        longFlags.put(argument.getLongFlag(), argument);\n    }\n\n    /**\n     * Remove the argument with the given long flag from help messages. This allows\n     * subclasses to suppress arguments as needed. The argument will still exist in\n     * this object with its default value.\n     *\n     * @param longFlag The long flag corresponding to the argument being removed\n     */\n    protected void removeArgument(String longFlag) {\n        Argument<?> argument = longFlags.get(longFlag);\n        if (argument != null) {\n            longFlags.remove(longFlag);\n            shortFlags.remove(argument.getShortFlag());\n        }\n    }\n\n    /**\n     * Parse the given array of command-line arguments.\n     * \n     * @param arguments An array of command-line arguments.\n     */\n    public void parse(String... arguments) {\n        int i = 0;\n        while (i < arguments.length) {\n            String flag = arguments[i];\n\n            try {\n                if (shortFlags.containsKey(flag)) {\n                    shortFlags.get(flag).parse(arguments[++i]);\n                } else if (longFlags.containsKey(flag)) {\n                    longFlags.get(flag).parse(arguments[++i]);\n                } else if (\"-h\".equals(flag) || \"--help\".equals(flag)) {\n                    printUsage();\n                    Runtime.getRuntime().exit(0);\n                } else {\n                    throw new IllegalArgumentException(\"Unknown argument: \" + flag);\n                }\n            } catch (Exception e) {\n                printUsageAndExit(\"%s: %s\", e.getClass().getName(), e.getMessage());\n            }\n\n            i++;\n        }\n    }\n\n    /**\n     * Print a usage message to STDOUT.\n     */\n    public void printUsage() {\n        System.out.println(\n                String.format(\"Usage: java -cp %s %s [options] < input_file > output_file\", ARCHIVE_NAME, runnerClass));\n        System.out.println();\n        System.out.println(runnerDescription);\n        System.out.println();\n        System.out.println(\"Options:\");\n\n        longFlags.values().stream().map(Argument::getHelpMessage).sorted()\n                .forEach(msg -> System.out.println(\"\\t\" + msg));\n\n        System.out.println();\n        System.out.println(\"\\t--help, -h: Print this help message and exit.\");\n    }\n\n    /**\n     * Print an error message, the usage message, and exit the application.\n     * \n     * @param errorMessage  An error message to show the user.\n     * @param formatObjects An array of format objects that will be interpolated\n     *                      into the error message using {@link String#format}.\n     */\n    public void printUsageAndExit(String errorMessage, Object... formatObjects) {\n        System.err.println(\"Error: \" + String.format(errorMessage, formatObjects));\n        printUsage();\n        System.exit(1);\n    }\n\n    /**\n     * @return the user-specified value of the number-of-trees parameter.\n     */\n    public int getNumberOfTrees() {\n        return numberOfTrees.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the sample-size parameter.\n     */\n    public int getSampleSize() {\n        return sampleSize.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the window-size parameter\n     */\n    public int getWindowSize() {\n        return windowSize.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the time-decay parameter\n     */\n    public double getTimeDecay() {\n        if (getWindowSize() > 0) {\n            return 1.0 / getWindowSize();\n        } else {\n            return 0.0;\n        }\n    }\n\n    /**\n     * @return the user-specified value of the shingle-size parameter\n     */\n    public int getShingleSize() {\n        return shingleSize.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the shingle-cyclic parameter\n     */\n    public boolean getShingleCyclic() {\n        return shingleCyclic.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the delimiter parameter\n     */\n    public String getDelimiter() {\n        return delimiter.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the header-row parameter\n     */\n    public boolean getHeaderRow() {\n        return headerRow.getValue();\n    }\n\n    /**\n     * @return the user-specified value of the random-seed parameter\n     */\n    public int getRandomSeed() {\n        return randomSeed.getValue();\n    }\n\n    public static class Argument<T> {\n\n        private final String shortFlag;\n        private final String longFlag;\n        private final String description;\n        private final T defaultValue;\n        private final Function<String, T> parseFunction;\n        private final Consumer<T> validateFunction;\n        private T value;\n\n        public Argument(String shortFlag, String longFlag, String description, T defaultValue,\n                Function<String, T> parseFunction, Consumer<T> validateFunction) {\n            this.shortFlag = shortFlag;\n            this.longFlag = longFlag;\n            this.description = description;\n            this.defaultValue = defaultValue;\n            this.parseFunction = parseFunction;\n            this.validateFunction = validateFunction;\n            value = defaultValue;\n        }\n\n        public Argument(String shortFlag, String longFlag, String description, T defaultValue,\n                Function<String, T> parseFunction) {\n            this(shortFlag, longFlag, description, defaultValue, parseFunction, t -> {\n            });\n        }\n\n        public String getShortFlag() {\n            return shortFlag;\n        }\n\n        public String getLongFlag() {\n            return longFlag;\n        }\n\n        public String getDescription() {\n            return description;\n        }\n\n        public T getDefaultValue() {\n            return defaultValue;\n        }\n\n        public String getHelpMessage() {\n            if (shortFlag != null) {\n                return String.format(\"%s, %s: %s (default: %s)\", longFlag, shortFlag, description, defaultValue);\n            } else {\n                return String.format(\"%s: %s (default: %s)\", longFlag, description, defaultValue);\n            }\n        }\n\n        public void parse(String string) {\n            value = parseFunction.apply(string);\n            validateFunction.accept(value);\n        }\n\n        public T getValue() {\n            return value;\n        }\n    }\n\n    public static class StringArgument extends Argument<String> {\n        public StringArgument(String shortFlag, String longFlag, String description, String defaultValue,\n                Consumer<String> validateFunction) {\n            super(shortFlag, longFlag, description, defaultValue, x -> x, validateFunction);\n        }\n\n        public StringArgument(String shortFlag, String longFlag, String description, String defaultValue) {\n            super(shortFlag, longFlag, description, defaultValue, x -> x);\n        }\n    }\n\n    public static class BooleanArgument extends Argument<Boolean> {\n        public BooleanArgument(String shortFlag, String longFlag, String description, boolean defaultValue) {\n            super(shortFlag, longFlag, description, defaultValue, Boolean::parseBoolean);\n        }\n    }\n\n    public static class IntegerArgument extends Argument<Integer> {\n        public IntegerArgument(String shortFlag, String longFlag, String description, int defaultValue,\n                Consumer<Integer> validateFunction) {\n            super(shortFlag, longFlag, description, defaultValue, Integer::parseInt, validateFunction);\n        }\n\n        public IntegerArgument(String shortFlag, String longFlag, String description, int defaultValue) {\n            super(shortFlag, longFlag, description, defaultValue, Integer::parseInt);\n        }\n    }\n\n    public static class DoubleArgument extends Argument<Double> {\n        public DoubleArgument(String shortFlag, String longFlag, String description, double defaultValue,\n                Consumer<Double> validateFunction) {\n            super(shortFlag, longFlag, description, defaultValue, Double::parseDouble, validateFunction);\n        }\n\n        public DoubleArgument(String shortFlag, String longFlag, String description, double defaultValue) {\n            super(shortFlag, longFlag, description, defaultValue, Double::parseDouble);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/ImputeRunner.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\nimport java.util.StringJoiner;\n\n/**\n * A command-line application that imputes missing values. Points are read from\n * STDIN and output is written to STDOUT. One output point is written for each\n * input point. If the input point does not contain any missing value\n * indicators, then it is copied as-is to the output. If an input point contains\n * one or more missing value indicators, then the missing values are imputed and\n * the imputed point is written to the output.\n */\npublic class ImputeRunner extends SimpleRunner {\n\n    private String missingValueMarker;\n    private int numberOfMissingValues;\n    private int[] missingIndexes;\n\n    public ImputeRunner() {\n        super(new ImputeArgumentParser(), UpdateOnlyTransformer::new);\n    }\n\n    public static void main(String... args) throws IOException {\n        ImputeRunner runner = new ImputeRunner();\n        runner.parse(args);\n        System.out.println(\"Reading from stdin... (Ctrl-c to exit)\");\n        runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),\n                new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));\n        System.out.println(\"Done.\");\n    }\n\n    @Override\n    protected void prepareAlgorithm(int dimensions) {\n        super.prepareAlgorithm(dimensions);\n        missingIndexes = new int[dimensions];\n        missingValueMarker = ((ImputeArgumentParser) argumentParser).getMissingValueMarker();\n    }\n\n    @Override\n    protected void processLine(String[] values, PrintWriter out) {\n\n        numberOfMissingValues = 0;\n        for (int i = 0; i < getPointSize(); i++) {\n            if (missingValueMarker.equals(values[i])) {\n                missingIndexes[numberOfMissingValues++] = i;\n                values[i] = \"0\";\n            }\n        }\n\n        if (numberOfMissingValues > 0) {\n            parsePoint(values);\n            double[] imputedPoint = algorithm.getForest().imputeMissingValues(pointBuffer, numberOfMissingValues,\n                    missingIndexes);\n            StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());\n            Arrays.stream(imputedPoint).mapToObj(Double::toString).forEach(joiner::add);\n            out.println(joiner.toString());\n        } else {\n            super.processLine(values, out);\n        }\n    }\n\n    public static class ImputeArgumentParser extends ArgumentParser {\n\n        private final StringArgument missingValueMarker;\n\n        public ImputeArgumentParser() {\n            super(ImputeRunner.class.getName(),\n                    \"Read rows with missing values and write rows with missing values imputed.\");\n\n            missingValueMarker = new StringArgument(null, \"--missing-value-marker\",\n                    \"String used to represent a missing value in the data.\", \"NA\");\n\n            addArgument(missingValueMarker);\n\n            removeArgument(\"--shingle-size\");\n            removeArgument(\"--shingle-cyclic\");\n        }\n\n        public String getMissingValueMarker() {\n            return missingValueMarker.getValue();\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/LineTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.util.List;\n\nimport com.amazon.randomcutforest.RandomCutForest;\n\n/**\n * This interface is used by SimpleRunner to transform input lines into output\n * lines.\n */\npublic interface LineTransformer {\n\n    /**\n     * For the given parsed input point, return a list of string values that should\n     * be written as output. The list of strings will be joined together using the\n     * user-specified delimiter.\n     * \n     * @param point A point value that was parsed from the input stream.\n     * @return a list of string values that should be written as output.\n     */\n    List<String> getResultValues(double[] point);\n\n    /**\n     * @return a list of string values that should be written to the output when\n     *         processing a line if there is no input point available. This method\n     *         is invoked when shingling is enabled before the first shingle is\n     *         full.\n     */\n    List<String> getEmptyResultValue();\n\n    /**\n     * @return a list of column names to write to the output if headers are enabled.\n     */\n    List<String> getResultColumnNames();\n\n    /**\n     * @return the RandomCutForest instance which is being used internally to\n     *         process lines.\n     */\n    RandomCutForest getForest();\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/SimpleDensityRunner.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStreamReader;\nimport java.io.OutputStreamWriter;\nimport java.io.PrintWriter;\nimport java.nio.charset.StandardCharsets;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.returntypes.DiVector;\n\n/**\n * A command-line application that computes directional density. Points are read\n * from STDIN and output is written to STDOUT. Output consists of the original\n * input point with the directional density vector appended.\n */\npublic class SimpleDensityRunner extends SimpleRunner {\n\n    public SimpleDensityRunner() {\n        super(SimpleDensityRunner.class.getName(),\n                \"Compute directional density vectors from the input rows and append them to the output rows.\",\n                SimpleDensityRunner.SimpleDensityTransformer::new);\n    }\n\n    public static void main(String... args) throws IOException {\n        SimpleDensityRunner runner = new SimpleDensityRunner();\n        runner.parse(args);\n        System.out.println(\"Reading from stdin... (Ctrl-c to exit)\");\n        runner.run(new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)),\n                new PrintWriter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8)));\n        System.out.println(\"Done.\");\n    }\n\n    public static class SimpleDensityTransformer implements LineTransformer {\n        private final RandomCutForest forest;\n\n        public SimpleDensityTransformer(RandomCutForest forest) {\n            this.forest = forest;\n        }\n\n        @Override\n        public List<String> getResultValues(double... point) {\n            DiVector densityFactors = forest.getSimpleDensity(point).getDirectionalDensity();\n            forest.update(point);\n\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < forest.getDimensions(); i++) {\n                result.add(String.format(\"%f\", densityFactors.high[i]));\n                result.add(String.format(\"%f\", densityFactors.low[i]));\n            }\n            return result;\n        }\n\n        @Override\n        public List<String> getEmptyResultValue() {\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < 2 * forest.getDimensions(); i++) {\n                result.add(\"NA\");\n            }\n            return result;\n        }\n\n        @Override\n        public List<String> getResultColumnNames() {\n            List<String> result = new ArrayList<>(2 * forest.getDimensions());\n            for (int i = 0; i < forest.getDimensions(); i++) {\n                result.add(String.format(\"prob_mass_%d_up\", i));\n                result.add(String.format(\"prob_mass_%d_down\", i));\n            }\n            return result;\n        }\n\n        @Override\n        public RandomCutForest getForest() {\n            return forest;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/SimpleRunner.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.StringJoiner;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.util.ShingleBuilder;\n\n/**\n * A simple command-line application that parses command-line arguments, creates\n * a RandomCutForest instance based on those arguments, reads values from STDIN\n * and writes results to STDOUT.\n */\npublic class SimpleRunner {\n\n    protected final ArgumentParser argumentParser;\n    protected final Function<RandomCutForest, LineTransformer> algorithmInitializer;\n    protected LineTransformer algorithm;\n    protected ShingleBuilder shingleBuilder;\n    protected double[] pointBuffer;\n    protected double[] shingleBuffer;\n    protected int lineNumber;\n\n    /**\n     * Create a new SimpleRunner.\n     * \n     * @param runnerClass          The name of the runner class. This will be\n     *                             displayed in the help text.\n     * @param runnerDescription    A description of the runner class. This will be\n     *                             displayed in the help text.\n     * @param algorithmInitializer A factory method to create a new LineTransformer\n     *                             instance from a RandomCutForest.\n     */\n    public SimpleRunner(String runnerClass, String runnerDescription,\n            Function<RandomCutForest, LineTransformer> algorithmInitializer) {\n        this(new ArgumentParser(runnerClass, runnerDescription), algorithmInitializer);\n    }\n\n    /**\n     * Create a new SimpleRunner.\n     * \n     * @param argumentParser       A argument parser that will be used by this\n     *                             runner to parse command-line arguments.\n     * @param algorithmInitializer A factory method to create a new LineTransformer\n     *                             instance from a RandomCutForest.\n     */\n    public SimpleRunner(ArgumentParser argumentParser,\n            Function<RandomCutForest, LineTransformer> algorithmInitializer) {\n        this.argumentParser = argumentParser;\n        this.algorithmInitializer = algorithmInitializer;\n    }\n\n    /**\n     * Parse the given command-line arguments.\n     * \n     * @param arguments An array of command-line arguments.\n     */\n    public void parse(String... arguments) {\n        argumentParser.parse(arguments);\n    }\n\n    /**\n     * Read data from an input stream, apply the desired transformation, and write\n     * the result to an output stream.\n     * \n     * @param in  An input stream where input values will be read.\n     * @param out An output stream where the result values will be written.\n     * @throws IOException if IO errors are encountered during reading or writing.\n     */\n    public void run(BufferedReader in, PrintWriter out) throws IOException {\n        String line;\n        while ((line = in.readLine()) != null) {\n            lineNumber++;\n            String[] values = line.split(argumentParser.getDelimiter());\n\n            if (pointBuffer == null) {\n                prepareAlgorithm(values.length);\n            }\n\n            if (lineNumber == 1 && argumentParser.getHeaderRow()) {\n                writeHeader(values, out);\n                continue;\n            }\n\n            processLine(values, out);\n        }\n\n        finish(out);\n        out.flush();\n    }\n\n    /**\n     * Set up the internal RandomCutForest instance and line transformer.\n     * \n     * @param dimensions The number of dimensions in the input data.\n     */\n    protected void prepareAlgorithm(int dimensions) {\n        pointBuffer = new double[dimensions];\n        shingleBuilder = new ShingleBuilder(dimensions, argumentParser.getShingleSize(),\n                argumentParser.getShingleCyclic());\n        shingleBuffer = new double[shingleBuilder.getShingledPointSize()];\n\n        RandomCutForest forest = RandomCutForest.builder().numberOfTrees(argumentParser.getNumberOfTrees())\n                .sampleSize(argumentParser.getSampleSize()).dimensions(shingleBuilder.getShingledPointSize())\n                .timeDecay(argumentParser.getTimeDecay()).randomSeed(argumentParser.getRandomSeed()).build();\n\n        algorithm = algorithmInitializer.apply(forest);\n    }\n\n    /**\n     * Write a header row to the output stream.\n     * \n     * @param values The array of values that are used to create the header. These\n     *               values will be joined together using the user-specified\n     *               delimiter.\n     * @param out    The output stream where the header will be written.\n     */\n    protected void writeHeader(String[] values, PrintWriter out) {\n        StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());\n        Arrays.stream(values).forEach(joiner::add);\n        algorithm.getResultColumnNames().forEach(joiner::add);\n        out.println(joiner.toString());\n    }\n\n    /**\n     * Process a single line of input data and write the result to the output\n     * stream.\n     * \n     * @param values An array of string values taken from the input stream. These\n     *               values will be parsed into an array of doubles before being\n     *               transformed and written to the output stream.\n     * @param out    The output stream where the transformed line will be written.\n     */\n    protected void processLine(String[] values, PrintWriter out) {\n        if (values.length != pointBuffer.length) {\n            throw new IllegalArgumentException(\n                    String.format(\"Wrong number of values on line %d. Exected %d but found %d.\", lineNumber,\n                            pointBuffer.length, values.length));\n        }\n\n        parsePoint(values);\n        shingleBuilder.addPoint(pointBuffer);\n\n        List<String> result;\n        if (shingleBuilder.isFull()) {\n            shingleBuilder.getShingle(shingleBuffer);\n            result = algorithm.getResultValues(shingleBuffer);\n        } else {\n            result = algorithm.getEmptyResultValue();\n        }\n\n        StringJoiner joiner = new StringJoiner(argumentParser.getDelimiter());\n        Arrays.stream(values).forEach(joiner::add);\n        result.forEach(joiner::add);\n\n        out.println(joiner.toString());\n    }\n\n    /**\n     * Parse the array of string values into doubles and write them to an internal\n     * buffer.\n     * \n     * @param stringValues An array of string-encoded double values.\n     */\n    protected void parsePoint(String[] stringValues) {\n        for (int i = 0; i < pointBuffer.length; i++) {\n            pointBuffer[i] = Double.parseDouble(stringValues[i]);\n        }\n    }\n\n    /**\n     * This method is used to write any final output to the output stream after the\n     * input stream has beeen fully processed.\n     * \n     * @param out The output stream where additional output text may be written.\n     */\n    protected void finish(PrintWriter out) {\n\n    }\n\n    /**\n     * @return the size of the internal point buffer.\n     */\n    protected int getPointSize() {\n        return pointBuffer != null ? pointBuffer.length : 0;\n    }\n\n    /**\n     * @return the size of the internal shingled point buffer.\n     */\n    protected int getShingleSize() {\n        return shingleBuffer != null ? shingleBuffer.length : 0;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/runner/UpdateOnlyTransformer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport java.util.Collections;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.RandomCutForest;\n\n/**\n * A line transformer that updates its internal RandomCutForest instance but\n * does not produce any output.\n */\npublic class UpdateOnlyTransformer implements LineTransformer {\n\n    private final RandomCutForest forest;\n\n    public UpdateOnlyTransformer(RandomCutForest forest) {\n        this.forest = forest;\n    }\n\n    @Override\n    public List<String> getResultValues(double... point) {\n        forest.update(point);\n        return Collections.emptyList();\n    }\n\n    @Override\n    public List<String> getEmptyResultValue() {\n        return Collections.emptyList();\n    }\n\n    @Override\n    public List<String> getResultColumnNames() {\n        return Collections.emptyList();\n    }\n\n    @Override\n    public RandomCutForest getForest() {\n        return forest;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/AbstractStreamSampler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.Config;\n\npublic abstract class AbstractStreamSampler<P> implements IStreamSampler<P> {\n    /**\n     * The decay factor used for generating the weight of the point. For greater\n     * values of timeDecay we become more biased in favor of recent points.\n     */\n    protected double timeDecay;\n\n    /**\n     * The sequence index corresponding to the most recent change to\n     * {@code timeDecay}.\n     */\n    protected long mostRecentTimeDecayUpdate = 0;\n\n    /**\n     * most recent timestamp, used to determine lastUpdateOfTimeDecay\n     */\n    protected long maxSequenceIndex = 0;\n\n    /**\n     * The accumulated sum of timeDecay before the last update\n     */\n    protected double accumuluatedTimeDecay = 0;\n\n    /**\n     * The random number generator used in sampling.\n     */\n    protected ReplayableRandom random;\n\n    /**\n     * The point evicted by the last call to {@link #update}, or null if the new\n     * point was not accepted by the sampler.\n     */\n    protected transient ISampled<P> evictedPoint;\n\n    /**\n     * the fraction of points admitted to the sampler even when the sampler can\n     * accept (not full) this helps control the initial behavior of the points and\n     * ensure robustness by ensuring that the samplers do not all sample the initial\n     * set of points.\n     */\n    protected final double initialAcceptFraction;\n\n    /**\n     * a function that computes the probability of admittance of a new value when\n     * the sampler is not full Note that a value can always be admitted if it has a\n     * weight smaller than some sampled value\n     *\n     * this function provides a mechanism for different trees to smoothly diverge --\n     * most previous versions corresponded to initialFraction = 1, and the samplers\n     * only diverge after all of them store all the first sampleSize points. In\n     * contrast the method (which can be changed in a subclass) admits the first\n     * initialFraction * sampleSize number of points and then becomes a monotonic\n     * decreasing function.\n     *\n     * This function is supposed to be a parallel to the outputAfter() setting in\n     * the forest which controls how scores are emitted\n     * \n     * @param currentSize the current size of the sampler\n     * @return the probability of admitting the next point\n     */\n    protected double initialAcceptProbability(int currentSize) {\n        if (currentSize < initialAcceptFraction * capacity) {\n            return 1.0;\n        } else if (initialAcceptFraction >= 1.0) {\n            return 0;\n        } else {\n            return 1 - (1.0 * currentSize / capacity - initialAcceptFraction) / (1 - initialAcceptFraction);\n        }\n    }\n\n    /**\n     * The number of points in the sample when full.\n     */\n    protected final int capacity;\n\n    /**\n     * This field is used to temporarily store the result from a call to\n     * {@link #acceptPoint} for use in the subsequent call to {@link #addPoint}.\n     *\n     * Visible for testing.\n     */\n    protected AcceptPointState acceptPointState;\n\n    public boolean acceptPoint(long sequenceIndex) {\n        return acceptPoint(sequenceIndex, 1.0f);\n    }\n\n    public abstract boolean acceptPoint(long sequenceIndex, float weight);\n\n    @Override\n    public abstract void addPoint(P pointIndex);\n\n    public AbstractStreamSampler(Builder<?> builder) {\n        this.capacity = builder.capacity;\n        this.initialAcceptFraction = builder.initialAcceptFraction;\n        this.timeDecay = builder.timeDecay;\n        if (builder.random != null) {\n            this.random = new ReplayableRandom(builder.random);\n        } else {\n            this.random = new ReplayableRandom(builder.randomSeed);\n        }\n    }\n\n    /**\n     * Weight is computed as <code>-log(w(i)) + log(-log(u(i))</code>, where\n     *\n     * <ul>\n     * <li><code>w(i) = exp(timeDecay * sequenceIndex)</code></li>\n     * <li><code>u(i)</code> is chosen uniformly from (0, 1)</li>\n     * </ul>\n     * <p>\n     * A higher score means lower priority. So the points with the lower score have\n     * higher chance of making it to the sample.\n     *\n     * @param sequenceIndex The sequenceIndex of the point whose score is being\n     *                      computed.\n     * @param sampleWeight  the positive weight (often 1.0) used in sampling; the\n     *                      weight should be checked in the calling routine\n     * @return the weight value used to define point priority\n     */\n    protected float computeWeight(long sequenceIndex, float sampleWeight) {\n        double randomNumber = 0d;\n        while (randomNumber == 0d) {\n            randomNumber = random.nextDouble();\n        }\n        maxSequenceIndex = (maxSequenceIndex < sequenceIndex) ? sequenceIndex : maxSequenceIndex;\n        return (float) (-(sequenceIndex - mostRecentTimeDecayUpdate) * timeDecay - accumuluatedTimeDecay\n                + Math.log(-Math.log(randomNumber) / sampleWeight));\n    }\n\n    /**\n     * Sets the timeDecay on the fly. Note that the assumption is that the times\n     * stamps corresponding to changes to timeDecay and sequenceIndexes are\n     * non-decreasing -- the sequenceIndexes can be out of order among themselves\n     * within two different times when timeDecay was changed.\n     * \n     * @param newTimeDecay the new sampling rate\n     */\n    public void setTimeDecay(double newTimeDecay) {\n        // accumulatedTimeDecay keeps track of adjustments and is zeroed out when the\n        // arrays are exported for some reason\n        accumuluatedTimeDecay += (maxSequenceIndex - mostRecentTimeDecayUpdate) * timeDecay;\n        timeDecay = newTimeDecay;\n        mostRecentTimeDecayUpdate = maxSequenceIndex;\n    }\n\n    /**\n     * @return the time decay value that determines the rate of decay of previously\n     *         seen points. Larger values of time decay indicate a greater bias\n     *         toward recent points. A value of 0 corresponds to a uniform sample\n     *         over the stream.\n     */\n    public double getTimeDecay() {\n        return timeDecay;\n    }\n\n    public long getMaxSequenceIndex() {\n        return maxSequenceIndex;\n    }\n\n    public void setMaxSequenceIndex(long index) {\n        maxSequenceIndex = index;\n    }\n\n    public long getMostRecentTimeDecayUpdate() {\n        return mostRecentTimeDecayUpdate;\n    }\n\n    public void setMostRecentTimeDecayUpdate(long index) {\n        mostRecentTimeDecayUpdate = index;\n    }\n\n    @Override\n    public <T> void setConfig(String name, T value, Class<T> clazz) {\n        if (Config.TIME_DECAY.equals(name)) {\n            checkArgument(Double.class.isAssignableFrom(clazz),\n                    String.format(\"Setting '%s' must be a double value\", name));\n            setTimeDecay((Double) value);\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    @Override\n    public <T> T getConfig(String name, Class<T> clazz) {\n        checkNotNull(clazz, \"clazz must not be null\");\n        if (Config.TIME_DECAY.equals(name)) {\n            checkArgument(clazz.isAssignableFrom(Double.class),\n                    String.format(\"Setting '%s' must be a double value\", name));\n            return clazz.cast(getTimeDecay());\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    /**\n     * @return the number of points contained by the sampler when full.\n     */\n    @Override\n    public int getCapacity() {\n        return capacity;\n    }\n\n    public double getInitialAcceptFraction() {\n        return initialAcceptFraction;\n    }\n\n    public long getRandomSeed() {\n        return random.randomSeed;\n    }\n\n    protected class ReplayableRandom {\n        long randomSeed;\n        Random testRandom;\n\n        ReplayableRandom(long randomSeed) {\n            this.randomSeed = randomSeed;\n        }\n\n        ReplayableRandom(Random random) {\n            this.testRandom = random;\n        }\n\n        public double nextDouble() {\n            if (testRandom != null) {\n                return testRandom.nextDouble();\n            }\n            Random newRandom = new Random(randomSeed);\n            randomSeed = newRandom.nextLong();\n            return newRandom.nextDouble();\n        }\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        protected int capacity = DEFAULT_SAMPLE_SIZE;\n        protected double timeDecay = 0;\n        protected Random random = null;\n        protected long randomSeed = new Random().nextLong();\n        protected long maxSequenceIndex = 0;\n        protected long sequenceIndexOfMostRecentTimeDecayUpdate = 0;\n        protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;\n\n        public T capacity(int capacity) {\n            this.capacity = capacity;\n            return (T) this;\n        }\n\n        public T randomSeed(long seed) {\n            this.randomSeed = seed;\n            return (T) this;\n        }\n\n        public T random(Random random) {\n            this.random = random;\n            return (T) this;\n        }\n\n        public T maxSequenceIndex(long maxSequenceIndex) {\n            this.maxSequenceIndex = maxSequenceIndex;\n            return (T) this;\n        }\n\n        public T mostRecentTimeDecayUpdate(long sequenceIndexOfMostRecentTimeDecayUpdate) {\n            this.sequenceIndexOfMostRecentTimeDecayUpdate = sequenceIndexOfMostRecentTimeDecayUpdate;\n            return (T) this;\n        }\n\n        public T initialAcceptFraction(double initialAcceptFraction) {\n            this.initialAcceptFraction = initialAcceptFraction;\n            return (T) this;\n        }\n\n        public T timeDecay(double timeDecay) {\n            this.timeDecay = timeDecay;\n            return (T) this;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/AcceptPointState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\n\n/**\n * A container class used by {@link CompactSampler}. These sampler\n * implementations compute weights during {@link IStreamSampler#acceptPoint} to\n * determine if a new point should be added to the sample. This class retains\n * the sequence index and computed weight from that method call for use in the\n * subsequent {@link IStreamSampler#addPoint} call.\n */\n@Data\n@AllArgsConstructor\npublic class AcceptPointState {\n    private long sequenceIndex;\n    private float weight;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/CompactSampler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.stream.Collectors;\nimport java.util.stream.IntStream;\nimport java.util.stream.Stream;\n\n/**\n * <p>\n * CompactSampler is an implementation of time-based reservoir sampling. When a\n * point is submitted to the sampler, the decision to accept the point gives\n * more weight to newer points compared to older points. The newness of a point\n * is determined by its sequence index, and larger sequence indexes are\n * considered newer.\n * </p>\n * <p>\n * The sampler algorithm is an example of the general weighted reservoir\n * sampling algorithm, which works like this:\n * </p>\n * <ol>\n * <li>For each item i choose a random number u(i) uniformly from the interval\n * (0, 1) and compute the weight function <code>-(1 / c(i)) * log u(i)</code>,\n * for a given coefficient function c(i).</li>\n * <li>For a sample size of N, maintain a list of the N items with the smallest\n * weights.</li>\n * <li>When a new item is submitted to sampler, compute its weight. If it is\n * smaller than the largest weight currently contained in the sampler, then the\n * item with the largest weight is evicted from the sample and replaced by the\n * new item.</li>\n * </ol>\n * <p>\n * The coefficient function used by CompactSampler is:\n * <code>c(i) = exp(timeDecay * sequenceIndex(i))</code>.\n * </p>\n */\npublic class CompactSampler extends AbstractStreamSampler<Integer> {\n\n    /**\n     * When creating a {@code CompactSampler}, the user has the option to disable\n     * storing sequence indexes. If storing sequence indexes is disabled, then this\n     * value is used for the sequence index in {@link ISampled} instances returned\n     * by {@link #getSample()}, {@link #getWeightedSample()}, and\n     * {@link #getEvictedPoint()}.\n     */\n    public static final long SEQUENCE_INDEX_NA = -1L;\n\n    /**\n     * A max-heap containing the weighted points currently in sample. The head\n     * element is the lowest priority point in the sample (or, equivalently, is the\n     * point with the greatest weight).\n     */\n    protected final float[] weight;\n\n    /**\n     * Index values identifying the points in the sample. See\n     * {@link com.amazon.randomcutforest.store.IPointStore}.\n     */\n    protected final int[] pointIndex;\n\n    /**\n     * Sequence indexes of the points in the sample.\n     */\n    protected final long[] sequenceIndex;\n\n    /**\n     * The number of points currently in the sample.\n     */\n    protected int size;\n\n    /**\n     * If true, then the sampler will store sequence indexes along with the sampled\n     * points.\n     */\n    private final boolean storeSequenceIndexesEnabled;\n\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    public static CompactSampler uniformSampler(int sampleSize, long randomSeed, boolean storeSequences) {\n        return new Builder<>().capacity(sampleSize).timeDecay(0).randomSeed(randomSeed)\n                .storeSequenceIndexesEnabled(storeSequences).build();\n    }\n\n    protected CompactSampler(Builder<?> builder) {\n        super(builder);\n        checkArgument(builder.initialAcceptFraction > 0, \" the admittance fraction cannot be <= 0\");\n        checkArgument(builder.capacity > 0, \" sampler capacity cannot be <=0 \");\n\n        this.storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;\n        this.timeDecay = builder.timeDecay;\n        this.maxSequenceIndex = builder.maxSequenceIndex;\n        this.mostRecentTimeDecayUpdate = builder.sequenceIndexOfMostRecentTimeDecayUpdate;\n\n        if (builder.weight != null || builder.pointIndex != null || builder.sequenceIndex != null\n                || builder.validateHeap) {\n            checkArgument(builder.weight != null && builder.weight.length == builder.capacity, \" incorrect state\");\n            checkArgument(builder.pointIndex != null && builder.pointIndex.length == builder.capacity,\n                    \" incorrect state\");\n            checkArgument(\n                    !builder.storeSequenceIndexesEnabled\n                            || builder.sequenceIndex != null && builder.sequenceIndex.length == builder.capacity,\n                    \" incorrect state\");\n            this.weight = builder.weight;\n            this.pointIndex = builder.pointIndex;\n            this.sequenceIndex = builder.sequenceIndex;\n            size = builder.size;\n            reheap(builder.validateHeap);\n        } else {\n            checkArgument(builder.size == 0, \"incorrect state\");\n            size = 0;\n            weight = new float[builder.capacity];\n            pointIndex = new int[builder.capacity];\n            if (storeSequenceIndexesEnabled) {\n                this.sequenceIndex = new long[builder.capacity];\n            } else {\n                this.sequenceIndex = null;\n            }\n        }\n    }\n\n    @Override\n    public boolean acceptPoint(long sequenceIndex, float samplingWeight) {\n        checkArgument(samplingWeight >= 0, \" weight has to be non-negative\");\n        checkState(sequenceIndex >= mostRecentTimeDecayUpdate, \"incorrect sequences submitted to sampler\");\n        evictedPoint = null;\n        if (samplingWeight > 0) {\n            float weight = computeWeight(sequenceIndex, samplingWeight);\n            boolean initial = (size < capacity && random.nextDouble() < initialAcceptProbability(size));\n            if (initial || (weight < this.weight[0])) {\n                acceptPointState = new AcceptPointState(sequenceIndex, weight);\n                if (!initial) {\n                    evictMax();\n                }\n                return true;\n            }\n        } // 0 weight implies ignore sample\n        return false;\n    }\n\n    /**\n     * evicts the maximum weight point from the sampler. can be used repeatedly to\n     * change the size of the sampler and associated tree\n     */\n    public void evictMax() {\n        long evictedIndex = storeSequenceIndexesEnabled ? this.sequenceIndex[0] : 0L;\n        evictedPoint = new Weighted<>(this.pointIndex[0], this.weight[0], evictedIndex);\n        --size;\n        this.weight[0] = this.weight[size];\n        this.pointIndex[0] = this.pointIndex[size];\n        if (storeSequenceIndexesEnabled) {\n            this.sequenceIndex[0] = this.sequenceIndex[size];\n        }\n        swapDown(0);\n    }\n\n    /**\n     * Check to see if the weight at current index is greater than or equal to the\n     * weight at each corresponding child index. If validate is true then throw an\n     * IllegalStateException, otherwise swap the nodes and perform the same check at\n     * the next level. Continue until you reach a level where the parent node's\n     * weight is greater than or equal to both children's weights, or until there\n     * are no more levels to descend.\n     *\n     * @param startIndex The index of node to start with.\n     * @param validate   If true, a violation of the heap property will throw an\n     *                   IllegalStateException. If false, then swap nodes that\n     *                   violate the heap property.\n     */\n    private void swapDown(int startIndex, boolean validate) {\n        int current = startIndex;\n        while (2 * current + 1 < size) {\n            int maxIndex = 2 * current + 1;\n            if (2 * current + 2 < size && weight[2 * current + 2] > weight[maxIndex]) {\n                maxIndex = 2 * current + 2;\n            }\n            if (weight[maxIndex] > weight[current]) {\n                if (validate) {\n                    throw new IllegalStateException(\"the heap property is not satisfied at index \" + current);\n                }\n                swapWeights(current, maxIndex);\n                current = maxIndex;\n            } else {\n                break;\n            }\n        }\n    }\n\n    private void swapDown(int startIndex) {\n        swapDown(startIndex, false);\n    }\n\n    public void reheap(boolean validate) {\n        for (int i = (size + 1) / 2; i >= 0; i--) {\n            swapDown(i, validate);\n        }\n    }\n\n    public void addPoint(Integer pointIndex, float weight, long sequenceIndex) {\n        checkArgument(acceptPointState == null && size < capacity && pointIndex != null, \" operation not permitted\");\n        acceptPointState = new AcceptPointState(sequenceIndex, weight);\n        addPoint(pointIndex);\n    }\n\n    @Override\n    public void addPoint(Integer pointIndex) {\n        if (pointIndex != null) {\n            checkState(size < capacity, \"sampler full\");\n            checkState(acceptPointState != null,\n                    \"this method should only be called after a successful call to acceptSample(long)\");\n            this.weight[size] = acceptPointState.getWeight();\n            this.pointIndex[size] = pointIndex;\n            if (storeSequenceIndexesEnabled) {\n                this.sequenceIndex[size] = acceptPointState.getSequenceIndex();\n            }\n            int current = size++;\n            while (current > 0) {\n                int tmp = (current - 1) / 2;\n                if (this.weight[tmp] < this.weight[current]) {\n                    swapWeights(current, tmp);\n                    current = tmp;\n                } else\n                    break;\n            }\n            acceptPointState = null;\n        }\n    }\n\n    /**\n     * Return the list of sampled points. If this sampler was created with the\n     * {@code storeSequenceIndexesEnabled} flag set to false, then all sequence\n     * indexes in the list will be set to {@link #SEQUENCE_INDEX_NA}.\n     *\n     * @return the list of sampled points.\n     */\n    @Override\n    public List<ISampled<Integer>> getSample() {\n        return streamSample().collect(Collectors.toList());\n    }\n\n    /**\n     * Return the list of sampled points with weights.\n     * \n     * @return the list of sampled points with weights.\n     */\n    public List<Weighted<Integer>> getWeightedSample() {\n        return streamSample().collect(Collectors.toList());\n    }\n\n    private Stream<Weighted<Integer>> streamSample() {\n        reset_weights();\n        return IntStream.range(0, size).mapToObj(i -> {\n            long index = sequenceIndex != null ? sequenceIndex[i] : SEQUENCE_INDEX_NA;\n            return new Weighted<>(pointIndex[i], weight[i], index);\n        });\n    }\n\n    /**\n     * removes the adjustments to weight in accumulated timeDecay and resets the\n     * updates to timeDecay\n     */\n    private void reset_weights() {\n        if (accumuluatedTimeDecay == 0)\n            return;\n        // now the weight computation of every element would not see this subtraction\n        // which implies that every existing element should see the offset as addition\n        for (int i = 0; i < size; i++) {\n            weight[i] += accumuluatedTimeDecay;\n        }\n        accumuluatedTimeDecay = 0;\n    }\n\n    /**\n     * @return the point evicted by the most recent call to {@link #update}, or null\n     *         if no point was evicted.\n     */\n    public Optional<ISampled<Integer>> getEvictedPoint() {\n        return Optional.ofNullable(evictedPoint);\n    }\n\n    /**\n     * @return the number of points currently contained by the sampler.\n     */\n    @Override\n    public int size() {\n        return size;\n    }\n\n    public float[] getWeightArray() {\n        return weight;\n    }\n\n    public int[] getPointIndexArray() {\n        return pointIndex;\n    }\n\n    public long[] getSequenceIndexArray() {\n        return sequenceIndex;\n    }\n\n    public boolean isStoreSequenceIndexesEnabled() {\n        return storeSequenceIndexesEnabled;\n    }\n\n    private void swapWeights(int a, int b) {\n        int tmp = pointIndex[a];\n        pointIndex[a] = pointIndex[b];\n        pointIndex[b] = tmp;\n\n        float tmpDouble = weight[a];\n        weight[a] = weight[b];\n        weight[b] = tmpDouble;\n\n        if (storeSequenceIndexesEnabled) {\n            long tmpLong = sequenceIndex[a];\n            sequenceIndex[a] = sequenceIndex[b];\n            sequenceIndex[b] = tmpLong;\n        }\n    }\n\n    public static class Builder<T extends Builder<T>> extends AbstractStreamSampler.Builder<T> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        private int size = 0;\n        private float[] weight = null;\n        private int[] pointIndex = null;\n        private long[] sequenceIndex = null;\n        private boolean validateHeap = false;\n        private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n\n        public T size(int size) {\n            this.size = size;\n            return (T) this;\n        }\n\n        public T weight(float[] weight) {\n            this.weight = weight;\n            return (T) this;\n        }\n\n        public T pointIndex(int[] pointIndex) {\n            this.pointIndex = pointIndex;\n            return (T) this;\n        }\n\n        public T sequenceIndex(long[] sequenceIndex) {\n            this.sequenceIndex = sequenceIndex;\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        public T validateHeap(boolean validateHeap) {\n            this.validateHeap = validateHeap;\n            return (T) this;\n        }\n\n        public CompactSampler build() {\n            return new CompactSampler(this);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/ISampled.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\n/**\n * A simple wrapper class representing a point that has been sampled by a\n * sampler. A sampled point can be added to or removed from a tree.\n * \n * @param <P> The point representation used by this sampled point.\n */\npublic interface ISampled<P> {\n    /**\n     * Return the sampled value.\n     * \n     * @return the sampled value.\n     */\n    P getValue();\n\n    /**\n     * Return the sequence index of the sampled value.\n     * \n     * @return the sequence index of the sampled value.\n     */\n    long getSequenceIndex();\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/IStreamSampler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport java.util.List;\nimport java.util.Optional;\n\nimport com.amazon.randomcutforest.config.IDynamicConfig;\n\n/**\n * <p>\n * A sampler that can be updated iteratively from a stream of data points. The\n * update operation is broken into two steps: an \"accept\" step and an \"add\"\n * step. During the \"accept\" step, the sampler decides whether to accept a new\n * point into sample. The decision rule will depend on the sampler\n * implementation If the sampler is full, accepting a new point requires the\n * sampler to evict a point currently in the sample. This operation is also part\n * of the accept step.\n * </p>\n *\n * <p>\n * If the outcome of the accept step is to accept a new point, then the sampler\n * continues to the second step to add a point to the sample (if the outcome is\n * not to accept a new point, then this step is not invoked). The reason for\n * this two-step process is because sampler update steps may be interleaved with\n * model update steps in\n * {@link com.amazon.randomcutforest.executor.IUpdatable#update} (see\n * {@link com.amazon.randomcutforest.executor.SamplerPlusTree#update}, for\n * example). In particular, if a new point is accepted into the sampler whose\n * value is equal to an existing point in the sample, then the model may choose\n * to increment the count on the existing point rather than allocate new storage\n * for the duplicate point.\n * </p>\n *\n * @param <P> The point type.\n */\npublic interface IStreamSampler<P> extends IDynamicConfig {\n    /**\n     * Submit a point to the sampler and return true if the point is accepted into\n     * the sample. By default this method chains together the {@link #acceptPoint}\n     * and {@link #addPoint} methods. If a point was evicted from the sample as a\n     * side effect, then the evicted point will be available in\n     * {@link #getEvictedPoint()} until the next call to {@link #addPoint}.\n     *\n     * @param point         The point submitted to the sampler.\n     * @param sequenceIndex the sequence number\n     * @return true if the point is accepted and added to the sample, false if the\n     *         point is rejected.\n     */\n    default boolean update(P point, long sequenceIndex) {\n        if (acceptPoint(sequenceIndex)) {\n            addPoint(point);\n            return true;\n        }\n        return false;\n    }\n\n    /**\n     * This is the first step in a two-step sample operation. In this step, the\n     * sampler makes a decision about whether to accept a new point into the sample.\n     * If it decides to accept the point, then a new point can be added by calling\n     * {@link #addPoint}.\n     *\n     * If a point needs to be evicted before a new point is added, eviction should\n     * happen in this method. If a point is evicted during a call to\n     * {@code acceptSample}, it will be available by calling\n     * {@link #getEvictedPoint()} until the next time {@code acceptSample} is\n     * called.\n     *\n     * @param sequenceIndex The sequence of the the point being submitted to the\n     *                      sampler.\n     * @return true if the point should be added to the sample.\n     */\n    boolean acceptPoint(long sequenceIndex);\n\n    /**\n     * This is the second step in a two-step sample operation. If the\n     * {@link #acceptPoint} method was called and returned true, then this method\n     * should be called to complete the sampling operation by adding the point to\n     * the sample. If a call to {@code addPoint} is not preceded by a successful\n     * call to {@code acceptPoint}, then it may fail with an\n     * {@code IllegalStateException}.\n     * \n     * @param point The point being added to the sample.\n     */\n    void addPoint(P point);\n\n    /**\n     * Return the list of sampled points.\n     * \n     * @return the list of sampled points.\n     */\n    List<ISampled<P>> getSample();\n\n    /**\n     * @return the point that was evicted from the sample in the most recent call to\n     *         {@link #acceptPoint}, or {@code Optional.empty()} if no point was\n     *         evicted.\n     */\n\n    Optional<ISampled<P>> getEvictedPoint();\n\n    /**\n     * @return true if this sampler contains enough points to support the anomaly\n     *         score computation, false otherwise. By default, this will\n     */\n    default boolean isReady() {\n        return size() >= getCapacity() / 4;\n    }\n\n    /**\n     * @return true if the sampler has reached it's full capacity, false otherwise.\n     */\n    default boolean isFull() {\n        return size() >= getCapacity();\n    }\n\n    /**\n     * @return the number of points contained by the sampler when full.\n     */\n    int getCapacity();\n\n    /**\n     * @return the number of points currently contained by the sampler.\n     */\n    int size();\n\n    void setMaxSequenceIndex(long maxSequenceIndex);\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/sampler/Weighted.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport lombok.Data;\n\n/**\n * A container class representing a weighted value. This generic type is used in\n * sampler implementations to store points along with weights that were computed\n * as part of sampling.\n *\n * @param <P> The representation of the point value.\n */\n@Data\npublic class Weighted<P> implements ISampled<P> {\n    /**\n     * The sampled value.\n     */\n    private final P value;\n\n    /**\n     * The weight assigned to this value.\n     */\n    private final float weight;\n\n    /**\n     * The sequence index of the sampled value.\n     */\n    private final long sequenceIndex;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/ExecutionContext.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\n@Data\npublic class ExecutionContext implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private boolean parallelExecutionEnabled;\n    private int threadPoolSize;\n\n    /**\n     * A string to define an \"execution mode\" that can be used to set multiple\n     * configuration options. This field is not currently in use.\n     */\n    private String mode;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/IContextualStateMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\npublic interface IContextualStateMapper<Model, State, ContextState> {\n    State toState(Model model);\n\n    Model toModel(State state, ContextState contextState, long seed);\n\n    default Model toModel(State state, ContextState contextState) {\n        return toModel(state, contextState, 0L);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/IStateMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\npublic interface IStateMapper<Model, State> {\n    State toState(Model model);\n\n    Model toModel(State state, long seed);\n\n    default Model toModel(State state) {\n        return toModel(state, 0L);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/PredictiveRandomCutForestMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.PredictiveRandomCutForest;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\n\n@Getter\n@Setter\npublic class PredictiveRandomCutForestMapper\n        implements IStateMapper<PredictiveRandomCutForest, PredictiveRandomCutForestState> {\n\n    @Override\n    public PredictiveRandomCutForest toModel(PredictiveRandomCutForestState state, long seed) {\n\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n\n        RandomCutForest forest = randomCutForestMapper.toModel(state.getForestState());\n        Preprocessor preprocessor = preprocessorMapper.toModel(state.getPreprocessorStates()[0]);\n\n        ForestMode forestMode = ForestMode.valueOf(state.getForestMode());\n        TransformMethod transformMethod = TransformMethod.valueOf(state.getTransformMethod());\n\n        return new PredictiveRandomCutForest(forestMode, transformMethod, preprocessor, forest);\n    }\n\n    @Override\n    public PredictiveRandomCutForestState toState(PredictiveRandomCutForest model) {\n        PredictiveRandomCutForestState state = new PredictiveRandomCutForestState();\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        randomCutForestMapper.setPartialTreeStateEnabled(true);\n        randomCutForestMapper.setSaveTreeStateEnabled(true);\n        randomCutForestMapper.setCompressionEnabled(true);\n        randomCutForestMapper.setSaveCoordinatorStateEnabled(true);\n        randomCutForestMapper.setSaveExecutorContextEnabled(true);\n\n        state.setForestState(randomCutForestMapper.toState(model.getForest()));\n\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n        state.setPreprocessorStates(\n                new PreprocessorState[] { preprocessorMapper.toState((Preprocessor) model.getPreprocessor()) });\n\n        state.setForestMode(model.getForestMode().name());\n        state.setTransformMethod(model.getTransformMethod().name());\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/PredictiveRandomCutForestState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport static com.amazon.randomcutforest.state.Version.V4_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\n\n@Data\npublic class PredictiveRandomCutForestState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V4_0;\n    RandomCutForestState forestState;\n    private PreprocessorState[] preprocessorStates;\n    private String forestMode;\n    private String transformMethod;\n    private long randomSeed;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/RandomCutForestMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.validateInternalState;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.stream.Collectors;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IComponentModel;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.executor.PointStoreCoordinator;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.sampler.IStreamSampler;\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerMapper;\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerState;\nimport com.amazon.randomcutforest.state.store.PointStoreMapper;\nimport com.amazon.randomcutforest.state.store.PointStoreState;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeContext;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;\nimport com.amazon.randomcutforest.state.tree.RandomCutTreeMapper;\nimport com.amazon.randomcutforest.store.IPointStore;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.tree.ITree;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\n\n/**\n * A utility class for creating a {@link RandomCutForestState} instance from a\n * {@link RandomCutForest} instance and vice versa.\n */\n@Getter\n@Setter\npublic class RandomCutForestMapper\n        implements IContextualStateMapper<RandomCutForest, RandomCutForestState, ExecutionContext> {\n\n    /**\n     * A flag indicating whether the structure of the trees in the forest should be\n     * included in the state object. If true, then data describing the bounding\n     * boxes and cuts defining each tree will be written to the\n     * {@link RandomCutForestState} object produced by the mapper. Tree state is not\n     * saved by default.\n     */\n    private boolean saveTreeStateEnabled = false;\n\n    /**\n     * A flag indicating whether the point store should be included in the\n     * {@link RandomCutForestState} object produced by the mapper. This is saved by\n     * default for compact trees\n     */\n    private boolean saveCoordinatorStateEnabled = true;\n\n    /**\n     * A flag indicating whether the samplers should be included in the\n     * {@link RandomCutForestState} object produced by the mapper. This is saved by\n     * default for all trees.\n     */\n    private boolean saveSamplerStateEnabled = true;\n\n    /**\n     * A flag indicating whether the executor context should be included in the\n     * {@link RandomCutForestState} object produced by the mapper. Executor context\n     * is not saved by default.\n     */\n    private boolean saveExecutorContextEnabled = false;\n\n    /**\n     * If true, then the arrays are compressed via simple data dependent scheme\n     */\n    private boolean compressionEnabled = true;\n\n    /**\n     * if true would require that the samplers populate the trees before the trees\n     * can be used gain. That would correspond to extra time, at the benefit of a\n     * smaller serialization.\n     */\n    private boolean partialTreeStateEnabled = false;\n\n    /**\n     * Create a {@link RandomCutForestState} object representing the state of the\n     * given forest. If the forest is compact and the {@code saveTreeState} flag is\n     * set to true, then structure of the trees in the forest will be included in\n     * the state object. If the flag is set to false, then the state object will\n     * only contain the sampler data for each tree. If the\n     * {@code saveExecutorContext} is true, then the executor context will be\n     * included in the state object.\n     *\n     * @param forest A Random Cut Forest whose state we want to capture.\n     * @return a {@link RandomCutForestState} object representing the state of the\n     *         given forest.\n     * @throws IllegalArgumentException if the {@code saveTreeState} flag is true\n     *                                  and the forest is not compact.\n     */\n    @Override\n    public RandomCutForestState toState(RandomCutForest forest) {\n\n        RandomCutForestState state = new RandomCutForestState();\n\n        state.setNumberOfTrees(forest.getNumberOfTrees());\n        state.setDimensions(forest.getDimensions());\n        state.setTimeDecay(forest.getTimeDecay());\n        state.setSampleSize(forest.getSampleSize());\n        state.setShingleSize(forest.getShingleSize());\n        state.setCenterOfMassEnabled(forest.isCenterOfMassEnabled());\n        state.setOutputAfter(forest.getOutputAfter());\n        state.setStoreSequenceIndexesEnabled(forest.isStoreSequenceIndexesEnabled());\n        state.setTotalUpdates(forest.getTotalUpdates());\n        state.setCompact(true);\n        state.setInternalShinglingEnabled(forest.isInternalShinglingEnabled());\n        state.setBoundingBoxCacheFraction(forest.getBoundingBoxCacheFraction());\n        state.setSaveSamplerStateEnabled(saveSamplerStateEnabled);\n        state.setSaveTreeStateEnabled(saveTreeStateEnabled);\n        state.setSaveCoordinatorStateEnabled(saveCoordinatorStateEnabled);\n        state.setPrecision(forest.getPrecision().name());\n        state.setCompressed(compressionEnabled);\n        state.setPartialTreeState(partialTreeStateEnabled);\n        state.setCurrentlySampling(forest.isCurrentlySampling());\n\n        if (saveExecutorContextEnabled) {\n            ExecutionContext executionContext = new ExecutionContext();\n            executionContext.setParallelExecutionEnabled(forest.isParallelExecutionEnabled());\n            executionContext.setThreadPoolSize(forest.getThreadPoolSize());\n            state.setExecutionContext(executionContext);\n        }\n\n        if (saveCoordinatorStateEnabled) {\n            PointStoreCoordinator<?> pointStoreCoordinator = (PointStoreCoordinator<?>) forest.getUpdateCoordinator();\n            PointStoreMapper mapper = new PointStoreMapper();\n            mapper.setCompressionEnabled(compressionEnabled);\n            mapper.setNumberOfTrees(forest.getNumberOfTrees());\n            PointStoreState pointStoreState = mapper.toState((PointStore) pointStoreCoordinator.getStore());\n            state.setPointStoreState(pointStoreState);\n        }\n        List<CompactSamplerState> samplerStates = null;\n        if (saveSamplerStateEnabled) {\n            samplerStates = new ArrayList<>();\n        }\n        List<ITree<Integer, ?>> trees = null;\n        if (saveTreeStateEnabled) {\n            trees = new ArrayList<>();\n        }\n\n        CompactSamplerMapper samplerMapper = new CompactSamplerMapper();\n        samplerMapper.setCompressionEnabled(compressionEnabled);\n\n        for (IComponentModel<?, ?> component : forest.getComponents()) {\n            SamplerPlusTree<Integer, ?> samplerPlusTree = (SamplerPlusTree<Integer, ?>) component;\n            CompactSampler sampler = (CompactSampler) samplerPlusTree.getSampler();\n            if (samplerStates != null) {\n                samplerStates.add(samplerMapper.toState(sampler));\n            }\n            if (trees != null) {\n                trees.add(samplerPlusTree.getTree());\n            }\n        }\n\n        state.setCompactSamplerStates(samplerStates);\n\n        if (trees != null) {\n            RandomCutTreeMapper treeMapper = new RandomCutTreeMapper();\n            List<CompactRandomCutTreeState> treeStates = trees.stream().map(t -> treeMapper.toState((RandomCutTree) t))\n                    .collect(Collectors.toList());\n            state.setCompactRandomCutTreeStates(treeStates);\n        }\n        return state;\n    }\n\n    /**\n     * Create a {@link RandomCutForest} instance from a\n     * {@link RandomCutForestState}. If the state contains tree states, then trees\n     * will be constructed from the tree state objects. Otherwise, empty trees are\n     * created and populated from the sampler data. The resulting forest should be\n     * equal in distribution to the forest that the state object was created from.\n     *\n     * @param state            A Random Cut Forest state object.\n     * @param executionContext An executor context that will be used to initialize\n     *                         new executors in the Random Cut Forest. If this\n     *                         argument is null, then the mapper will look for an\n     *                         executor context in the state object.\n     * @param seed             A random seed.\n     * @return A Random Cut Forest corresponding to the state object.\n     * @throws NullPointerException if both the {@code executorContext} method\n     *                              argument and the executor context field in the\n     *                              state object are null.\n     */\n    public RandomCutForest toModel(RandomCutForestState state, ExecutionContext executionContext, long seed) {\n\n        ExecutionContext ec;\n        if (executionContext != null) {\n            ec = executionContext;\n        } else {\n            checkNotNull(state.getExecutionContext(),\n                    \"The executor context in the state object is null, an executor context must be passed explicitly to toModel()\");\n            ec = state.getExecutionContext();\n        }\n\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().numberOfTrees(state.getNumberOfTrees())\n                .dimensions(state.getDimensions()).timeDecay(state.getTimeDecay()).sampleSize(state.getSampleSize())\n                .centerOfMassEnabled(state.isCenterOfMassEnabled()).outputAfter(state.getOutputAfter())\n                .parallelExecutionEnabled(ec.isParallelExecutionEnabled()).threadPoolSize(ec.getThreadPoolSize())\n                .storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).shingleSize(state.getShingleSize())\n                .boundingBoxCacheFraction(state.getBoundingBoxCacheFraction())\n                .internalShinglingEnabled(state.isInternalShinglingEnabled()).randomSeed(seed);\n\n        validateInternalState(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_32,\n                \" use randomcutforest-serialization package\");\n        Random random = builder.getRandom();\n        ComponentList<Integer, float[]> components = new ComponentList<>();\n        CompactRandomCutTreeContext context = new CompactRandomCutTreeContext();\n        IPointStore<Integer, float[]> pointStore = new PointStoreMapper().toModel(state.getPointStoreState());\n        PointStoreCoordinator<float[]> coordinator = new PointStoreCoordinator<>(pointStore);\n        coordinator.setTotalUpdates(state.getTotalUpdates());\n        context.setPointStore(pointStore);\n        context.setMaxSize(state.getSampleSize());\n        RandomCutTreeMapper treeMapper = new RandomCutTreeMapper();\n        List<CompactRandomCutTreeState> treeStates = state.isSaveTreeStateEnabled()\n                ? state.getCompactRandomCutTreeStates()\n                : null;\n        CompactSamplerMapper samplerMapper = new CompactSamplerMapper();\n        checkArgument(state.isSaveSamplerStateEnabled(), \"samplers are not saved; no forest to reconstruct\");\n        List<CompactSamplerState> samplerStates = state.getCompactSamplerStates();\n        for (int i = 0; i < state.getNumberOfTrees(); i++) {\n            IStreamSampler<Integer> sampler = samplerMapper.toModel(samplerStates.get(i), random.nextLong());\n\n            ITree<Integer, float[]> tree;\n            if (treeStates != null) {\n                tree = treeMapper.toModel(treeStates.get(i), context, random.nextLong());\n                sampler.getSample().forEach(s -> tree.addPointToPartialTree(s.getValue(), s.getSequenceIndex()));\n                tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, treeStates.get(i).getBoundingBoxCacheFraction());\n                tree.validateAndReconstruct();\n            } else {\n                // using boundingBoxCache for the new tree\n                tree = new RandomCutTree.Builder().capacity(state.getSampleSize()).randomSeed(random.nextLong())\n                        .pointStoreView(pointStore).boundingBoxCacheFraction(state.getBoundingBoxCacheFraction())\n                        .centerOfMassEnabled(state.isCenterOfMassEnabled())\n                        .storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).build();\n                sampler.getSample().forEach(s -> tree.addPoint(s.getValue(), s.getSequenceIndex()));\n            }\n            components.add(new SamplerPlusTree<>(sampler, tree));\n        }\n\n        RandomCutForest forest = new RandomCutForest(builder, coordinator, components, random);\n        if (!state.isCurrentlySampling()) {\n            forest.pauseSampling();\n        }\n        return forest;\n\n    }\n\n    /**\n     * Create a {@link RandomCutForest} instance from a {@link RandomCutForestState}\n     * using the executor context in the state object. See\n     * {@link #toModel(RandomCutForestState, ExecutionContext, long)}.\n     *\n     * @param state A Random Cut Forest state object.\n     * @param seed  A random seed.\n     * @return A Random Cut Forest corresponding to the state object.\n     * @throws NullPointerException if the executor context field in the state\n     *                              object are null.\n     */\n    public RandomCutForest toModel(RandomCutForestState state, long seed) {\n        return toModel(state, null, seed);\n    }\n\n    /**\n     * Create a {@link RandomCutForest} instance from a {@link RandomCutForestState}\n     * using the executor context in the state object. See\n     * {@link #toModel(RandomCutForestState, ExecutionContext, long)}.\n     *\n     * @param state A Random Cut Forest state object.\n     * @return A Random Cut Forest corresponding to the state object.\n     * @throws NullPointerException if the executor context field in the state\n     *                              object are null.\n     */\n    public RandomCutForest toModel(RandomCutForestState state) {\n        return toModel(state, null);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/RandomCutForestState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport static com.amazon.randomcutforest.state.Version.V4_0;\n\nimport java.io.Serializable;\nimport java.util.List;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerState;\nimport com.amazon.randomcutforest.state.store.PointStoreState;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;\n\n/**\n * A class that encapsulates most of the data used in a RandomCutForest such\n * that the forest can be serialized and deserialized.\n */\n@Data\npublic class RandomCutForestState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V4_0;\n\n    private long totalUpdates;\n\n    private double timeDecay;\n\n    private int numberOfTrees;\n\n    private int sampleSize;\n\n    private int shingleSize;\n\n    private int dimensions;\n\n    private int outputAfter;\n\n    private boolean compressed;\n\n    private boolean partialTreeState;\n\n    private double boundingBoxCacheFraction;\n\n    private boolean storeSequenceIndexesEnabled;\n\n    private boolean compact;\n\n    private boolean internalShinglingEnabled;\n\n    private boolean centerOfMassEnabled;\n\n    private String precision;\n\n    private PointStoreState pointStoreState;\n\n    private List<CompactSamplerState> compactSamplerStates;\n\n    private List<CompactRandomCutTreeState> compactRandomCutTreeStates;\n\n    private ExecutionContext executionContext;\n\n    // Mapper options\n\n    private boolean saveTreeStateEnabled;\n\n    private boolean saveSamplerStateEnabled;\n\n    private boolean saveCoordinatorStateEnabled;\n\n    private boolean currentlySampling;\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/Version.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\npublic class Version {\n    public static final String V2_0 = \"2.0\";\n    public static final String V2_1 = \"2.1\";\n    public static final String V3_0 = \"3.0\";\n    public static final String V3_5 = \"3.5\";\n    public static final String V3_7 = \"3.7\";\n    public static final String V3_8 = \"3.8\";\n\n    public static final String V4_0 = \"4.0\";\n\n    public static final String V4_1 = \"4.1\";\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/preprocessor/PreprocessorMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.preprocessor;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getDeviations;\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getStates;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.statistics.DeviationMapper;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class PreprocessorMapper implements IStateMapper<Preprocessor, PreprocessorState> {\n\n    @Override\n    public Preprocessor toModel(PreprocessorState state, long seed) {\n        DeviationMapper deviationMapper = new DeviationMapper();\n        Deviation[] deviations = getDeviations(state.getDeviationStates(), deviationMapper);\n        Deviation[] timeStampDeviations = getDeviations(state.getTimeStampDeviationStates(), deviationMapper);\n        Deviation[] dataQuality = getDeviations(state.getDataQualityStates(), deviationMapper);\n        Preprocessor.Builder<?> preprocessorBuilder = new Preprocessor.Builder<>()\n                .forestMode(ForestMode.valueOf(state.getForestMode())).shingleSize(state.getShingleSize())\n                .dimensions(state.getDimensions()).normalizeTime(state.isNormalizeTime())\n                .imputationMethod(ImputationMethod.valueOf(state.getImputationMethod()))\n                .fillValues(state.getDefaultFill()).inputLength(state.getInputLength()).weights(state.getWeights())\n                .transformMethod(TransformMethod.valueOf(state.getTransformMethod()))\n                .startNormalization(state.getStartNormalization()).useImputedFraction(state.getUseImputedFraction())\n                .timeDeviations(timeStampDeviations).deviations(deviations).dataQuality(dataQuality)\n                .transformDecay(state.getTimeDecay());\n\n        Preprocessor preprocessor = preprocessorBuilder.build();\n        preprocessor.setInitialValues(state.getInitialValues());\n        preprocessor.setInitialTimeStamps(state.getInitialTimeStamps());\n        preprocessor.setClipFactor(state.getClipFactor());\n        preprocessor.setValuesSeen(state.getValuesSeen());\n        preprocessor.setInternalTimeStamp(state.getInternalTimeStamp());\n        preprocessor.setLastShingledInput(state.getLastShingledInput());\n        preprocessor.setLastShingledPoint(toFloatArray(state.getLastShingledPoint()));\n        preprocessor.setPreviousTimeStamps(state.getPreviousTimeStamps());\n        preprocessor.setNormalizeTime(state.isNormalizeTime());\n        preprocessor.setFastForward(state.isFastForward());\n        preprocessor.setNumberOfImputed(state.getNumberOfImputed());\n        return preprocessor;\n    }\n\n    @Override\n    public PreprocessorState toState(Preprocessor model) {\n        PreprocessorState state = new PreprocessorState();\n        state.setShingleSize(model.getShingleSize());\n        state.setDimensions(model.getDimension());\n        state.setInputLength(model.getInputLength());\n        state.setClipFactor(model.getClipFactor());\n        state.setDefaultFill(model.getDefaultFill());\n        state.setImputationMethod(model.getImputationMethod().name());\n        state.setTransformMethod(model.getTransformMethod().name());\n        state.setWeights(model.getWeights());\n        state.setForestMode(model.getMode().name());\n        state.setInitialTimeStamps(model.getInitialTimeStamps());\n        state.setInitialValues(model.getInitialValues());\n        state.setUseImputedFraction(model.getUseImputedFraction());\n        state.setNormalizeTime(model.isNormalizeTime());\n        state.setStartNormalization(model.getStartNormalization());\n        state.setStopNormalization(model.getStopNormalization());\n        state.setPreviousTimeStamps(model.getPreviousTimeStamps());\n        state.setLastShingledInput(model.getLastShingledInput());\n        state.setLastShingledPoint(toDoubleArray(model.getLastShingledPoint()));\n        state.setValuesSeen(model.getValuesSeen());\n        state.setInternalTimeStamp(model.getInternalTimeStamp());\n        DeviationMapper deviationMapper = new DeviationMapper();\n        state.setTimeDecay(model.getTransformDecay());\n        state.setDeviationStates(getStates(model.getDeviationList(), deviationMapper));\n        state.setTimeStampDeviationStates(getStates(model.getTimeStampDeviations(), deviationMapper));\n        state.setDataQualityStates(getStates(model.getDataQuality(), deviationMapper));\n        state.setFastForward(model.isFastForward());\n        state.setNumberOfImputed(model.getNumberOfImputed());\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/preprocessor/PreprocessorState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.preprocessor;\n\nimport static com.amazon.randomcutforest.state.Version.V4_1;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.statistics.DeviationState;\n\n@Data\npublic class PreprocessorState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V4_1;\n    private double useImputedFraction;\n    private String imputationMethod;\n    private String forestMode;\n\n    private String transformMethod;\n    private double[] weights;\n    private double[] lastShingledPoint;\n    private double[] lastShingledInput;\n    private double[] defaultFill;\n    private double timeDecay;\n    private int startNormalization;\n    private int stopNormalization;\n    private int shingleSize;\n    private int dimensions;\n    private int inputLength;\n    private double clipFactor;\n    private boolean normalizeTime;\n    private long[] initialTimeStamps;\n    private double[][] initialValues;\n    private long[] previousTimeStamps;\n    private int valuesSeen;\n    private int internalTimeStamp;\n    @Deprecated\n    private DeviationState dataQualityState;\n    @Deprecated\n    private DeviationState timeStampDeviationState;\n    private DeviationState[] deviationStates;\n\n    private DeviationState[] dataQualityStates;\n    private DeviationState[] timeStampDeviationStates;\n    private boolean fastForward;\n    private int numberOfImputed;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/returntypes/DiVectorMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.returntypes;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.state.IStateMapper;\n\n@Getter\n@Setter\npublic class DiVectorMapper implements IStateMapper<DiVector, DiVectorState> {\n\n    @Override\n    public DiVector toModel(DiVectorState state, long seed) {\n\n        if (state.getHigh() == null || state.getLow() == null) {\n            return null;\n        } else {\n            return new DiVector(state.getHigh(), state.getLow());\n        }\n    }\n\n    @Override\n    public DiVectorState toState(DiVector model) {\n\n        DiVectorState state = new DiVectorState();\n        if (model != null) {\n            state.setHigh(Arrays.copyOf(model.high, model.high.length));\n            state.setLow(Arrays.copyOf(model.low, model.low.length));\n        }\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/returntypes/DiVectorState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.returntypes;\n\nimport java.io.Serializable;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.anomalydetection.AnomalyAttributionVisitor;\n\n/**\n * A DiVector is used when we want to track a quantity in both the positive and\n * negative directions for each dimension in a manifold. For example, when using\n * a {@link AnomalyAttributionVisitor} to compute the attribution of the anomaly\n * score to dimension of the input point, we want to know if the anomaly score\n * attributed to the ith coordinate of the input point is due to that coordinate\n * being unusually high or unusually low.\n *\n * The DiVectorState creates a POJO to be used in serialization.\n */\n@Getter\n@Setter\npublic class DiVectorState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    double[] high;\n    double[] low;\n}"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/sampler/CompactSamplerMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.sampler;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.util.ArrayPacking;\n\n@Getter\n@Setter\npublic class CompactSamplerMapper implements IStateMapper<CompactSampler, CompactSamplerState> {\n\n    /**\n     * This flag is passed to the constructor for {@code CompactSampler} when a new\n     * sampler is constructed in {@link #toModel}. If true, then the sampler will\n     * validate that the weight array in a {@code CompactSamplerState} instance\n     * satisfies the heap property. The heap property is not validated by default.\n     */\n    private boolean validateHeapEnabled = false;\n\n    /**\n     * used to compress data, can be set to false for debug\n     */\n    private boolean compressionEnabled = true;\n\n    @Override\n    public CompactSampler toModel(CompactSamplerState state, long seed) {\n        float[] weight = new float[state.getCapacity()];\n        int[] pointIndex = new int[state.getCapacity()];\n        long[] sequenceIndex;\n\n        int size = state.getSize();\n        System.arraycopy(state.getWeight(), 0, weight, 0, size);\n        System.arraycopy(ArrayPacking.unpackInts(state.getPointIndex(), state.isCompressed()), 0, pointIndex, 0, size);\n        if (state.isStoreSequenceIndicesEnabled()) {\n            sequenceIndex = new long[state.getCapacity()];\n            System.arraycopy(state.getSequenceIndex(), 0, sequenceIndex, 0, size);\n        } else {\n            sequenceIndex = null;\n        }\n\n        return new CompactSampler.Builder<>().capacity(state.getCapacity()).timeDecay(state.getTimeDecay())\n                .randomSeed(state.getRandomSeed()).storeSequenceIndexesEnabled(state.isStoreSequenceIndicesEnabled())\n                .weight(weight).pointIndex(pointIndex).sequenceIndex(sequenceIndex).validateHeap(validateHeapEnabled)\n                .initialAcceptFraction(state.getInitialAcceptFraction())\n                .mostRecentTimeDecayUpdate(state.getSequenceIndexOfMostRecentTimeDecayUpdate())\n                .maxSequenceIndex(state.getMaxSequenceIndex()).size(state.getSize()).build();\n    }\n\n    @Override\n    public CompactSamplerState toState(CompactSampler model) {\n        CompactSamplerState state = new CompactSamplerState();\n        state.setSize(model.size());\n        state.setCompressed(compressionEnabled);\n        state.setCapacity(model.getCapacity());\n        state.setTimeDecay(model.getTimeDecay());\n        state.setSequenceIndexOfMostRecentTimeDecayUpdate(model.getMostRecentTimeDecayUpdate());\n        state.setMaxSequenceIndex(model.getMaxSequenceIndex());\n        state.setInitialAcceptFraction(model.getInitialAcceptFraction());\n        state.setStoreSequenceIndicesEnabled(model.isStoreSequenceIndexesEnabled());\n        state.setRandomSeed(model.getRandomSeed());\n\n        state.setWeight(Arrays.copyOf(model.getWeightArray(), model.size()));\n        state.setPointIndex(ArrayPacking.pack(model.getPointIndexArray(), model.size(), state.isCompressed()));\n        if (model.isStoreSequenceIndexesEnabled()) {\n            state.setSequenceIndex(Arrays.copyOf(model.getSequenceIndexArray(), model.size()));\n\n        }\n\n        return state;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/sampler/CompactSamplerState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.sampler;\n\nimport static com.amazon.randomcutforest.state.Version.V2_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\n/**\n * A data object representing the state of a\n * {@link com.amazon.randomcutforest.sampler.CompactSampler}.\n */\n@Data\npublic class CompactSamplerState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    /**\n     * a version string for extensibility\n     */\n    private String version = V2_0;\n\n    /**\n     * An array of sampler weights.\n     */\n    private float[] weight;\n    /**\n     * An array of index values identifying the points in the sample. These indexes\n     * will correspond to a {@link com.amazon.randomcutforest.store.PointStore}.\n     */\n    private int[] pointIndex;\n    /**\n     * boolean for deciding to store sequence indices\n     */\n    private boolean storeSequenceIndicesEnabled;\n    /**\n     * The sequence indexes of points in the sample.\n     */\n    private long[] sequenceIndex;\n    /**\n     * The number of points in the sample.\n     */\n    private int size;\n    /**\n     * The maximum number of points that the sampler can contain.\n     */\n    private int capacity;\n    /**\n     * The behavior of the sampler at initial sampling\n     */\n    private double initialAcceptFraction;\n    /**\n     * The time-decay parameter for this sampler\n     */\n    private double timeDecay;\n    /**\n     * Last update of timeDecay\n     */\n    private long sequenceIndexOfMostRecentTimeDecayUpdate;\n    /**\n     * maximum timestamp seen in update/computeWeight\n     */\n    private long maxSequenceIndex;\n    /**\n     * boolean indicating if the compression is enabled\n     */\n    private boolean compressed;\n    /**\n     * saving the random state, if desired\n     */\n    private long randomSeed;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/statistics/DeviationMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.statistics;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class DeviationMapper implements IStateMapper<Deviation, DeviationState> {\n\n    @Override\n    public Deviation toModel(DeviationState state, long seed) {\n        return new Deviation(state.getDiscount(), state.getWeight(), state.getSumSquared(), state.getSum(),\n                state.getCount());\n    }\n\n    @Override\n    public DeviationState toState(Deviation model) {\n        DeviationState state = new DeviationState();\n        state.setDiscount(model.getDiscount());\n        state.setSum(model.getSum());\n        state.setSumSquared(model.getSumSquared());\n        state.setWeight(model.getWeight());\n        state.setCount(model.getCount());\n        return state;\n    }\n\n    public static DeviationState[] getStates(Deviation[] list, DeviationMapper mapper) {\n        DeviationState[] states = null;\n        if (list != null) {\n            states = new DeviationState[list.length];\n            for (int i = 0; i < list.length; i++) {\n                states[i] = mapper.toState(list[i]);\n            }\n        }\n        return states;\n    }\n\n    public static Deviation[] getDeviations(DeviationState[] states, DeviationMapper mapper) {\n        Deviation[] deviations = null;\n        if (states != null) {\n            deviations = new Deviation[states.length];\n            for (int i = 0; i < states.length; i++) {\n                deviations[i] = mapper.toModel(states[i]);\n            }\n        }\n        return deviations;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/statistics/DeviationState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.statistics;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\n@Data\npublic class DeviationState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private double discount;\n\n    private double weight;\n\n    private double sumSquared;\n\n    private double sum;\n\n    private int count;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/store/NodeStoreState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.store;\n\nimport static com.amazon.randomcutforest.state.Version.V2_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\n@Data\npublic class NodeStoreState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V2_0;\n\n    private int capacity;\n    private boolean compressed;\n    private int[] cutDimension;\n    private byte[] cutValueData;\n    private String precision;\n    private int root;\n\n    private boolean canonicalAndNotALeaf;\n    private int size;\n    private int[] leftIndex;\n    private int[] rightIndex;\n\n    private int[] nodeFreeIndexes;\n    private int nodeFreeIndexPointer;\n    private int[] leafFreeIndexes;\n    private int leafFreeIndexPointer;\n\n    private boolean partialTreeStateEnabled;\n    private int[] leafMass;\n    private int[] leafPointIndex;\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/store/PointStoreMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.Version;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.store.PointStoreLarge;\nimport com.amazon.randomcutforest.util.ArrayPacking;\n\n@Getter\n@Setter\npublic class PointStoreMapper implements IStateMapper<PointStore, PointStoreState> {\n\n    /**\n     * If true, then the arrays are compressed via simple data dependent scheme\n     */\n    private boolean compressionEnabled = true;\n\n    private int numberOfTrees = 255; // byte encoding as default\n\n    @Override\n    public PointStore toModel(PointStoreState state, long seed) {\n        checkNotNull(state.getRefCount(), \"refCount must not be null\");\n        checkNotNull(state.getPointData(), \"pointData must not be null\");\n        checkArgument(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_32,\n                \"precision must be \" + Precision.FLOAT_32);\n        int indexCapacity = state.getIndexCapacity();\n        int dimensions = state.getDimensions();\n        float[] store = ArrayPacking.unpackFloats(state.getPointData(), state.getCurrentStoreCapacity() * dimensions);\n        int startOfFreeSegment = state.getStartOfFreeSegment();\n        int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());\n        int[] locationList = new int[indexCapacity];\n        Arrays.fill(locationList, PointStore.INFEASIBLE_LOCN);\n        int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());\n        if (!state.getVersion().equals(Version.V3_0)) {\n            int shingleSize = state.getShingleSize();\n            int baseDimension = dimensions / shingleSize;\n            for (int i = 0; i < tempList.length; i++) {\n                locationList[i] = tempList[i] / baseDimension;\n            }\n        } else {\n            int[] duplicateRefs = null;\n            if (state.getDuplicateRefs() != null) {\n                duplicateRefs = ArrayPacking.unpackInts(state.getDuplicateRefs(), state.isCompressed());\n                checkArgument(duplicateRefs.length % 2 == 0, \" corrupt duplicates\");\n                for (int i = 0; i < duplicateRefs.length; i += 2) {\n                    refCount[duplicateRefs[i]] += duplicateRefs[i + 1];\n                }\n            }\n            int nextLocation = 0;\n            for (int i = 0; i < indexCapacity; i++) {\n                if (refCount[i] > 0) {\n                    locationList[i] = tempList[nextLocation];\n                    ++nextLocation;\n                } else {\n                    locationList[i] = PointStoreLarge.INFEASIBLE_LOCN;\n                }\n            }\n        }\n\n        return PointStore.builder().internalRotationEnabled(state.isRotationEnabled())\n                .internalShinglingEnabled(state.isInternalShinglingEnabled()).indexCapacity(indexCapacity)\n                .currentStoreCapacity(state.getCurrentStoreCapacity()).capacity(state.getCapacity())\n                .shingleSize(state.getShingleSize()).dimensions(state.getDimensions()).locationList(locationList)\n                .nextTimeStamp(state.getLastTimeStamp()).startOfFreeSegment(startOfFreeSegment).refCount(refCount)\n                .knownShingle(state.getInternalShingle()).store(store).build();\n    }\n\n    @Override\n    public PointStoreState toState(PointStore model) {\n        model.compact();\n        PointStoreState state = new PointStoreState();\n        state.setVersion(Version.V3_0);\n        state.setCompressed(compressionEnabled);\n        state.setDimensions(model.getDimensions());\n        state.setCapacity(model.getCapacity());\n        state.setShingleSize(model.getShingleSize());\n        state.setDirectLocationMap(false);\n        state.setInternalShinglingEnabled(model.isInternalShinglingEnabled());\n        state.setLastTimeStamp(model.getNextSequenceIndex());\n        if (model.isInternalShinglingEnabled()) {\n            state.setInternalShingle(toDoubleArray(model.getInternalShingle()));\n            state.setRotationEnabled(model.isInternalRotationEnabled());\n        }\n        state.setDynamicResizingEnabled(true);\n\n        state.setCurrentStoreCapacity(model.getCurrentStoreCapacity());\n        state.setIndexCapacity(model.getIndexCapacity());\n\n        state.setStartOfFreeSegment(model.getStartOfFreeSegment());\n        state.setPrecision(Precision.FLOAT_32.name());\n        int[] refcount = model.getRefCount();\n        int[] tempList = model.getLocationList();\n        int[] locationList = new int[model.getIndexCapacity()];\n        int[] duplicateRefs = new int[2 * model.getIndexCapacity()];\n        int size = 0;\n        int duplicateSize = 0;\n        for (int i = 0; i < refcount.length; i++) {\n            if (refcount[i] > 0) {\n                locationList[size] = tempList[i];\n                ++size;\n                if (refcount[i] > numberOfTrees) {\n                    duplicateRefs[duplicateSize] = i;\n                    duplicateRefs[duplicateSize + 1] = refcount[i] - numberOfTrees;\n                    refcount[i] = numberOfTrees;\n                    duplicateSize += 2;\n                }\n            }\n        }\n        state.setRefCount(ArrayPacking.pack(refcount, refcount.length, state.isCompressed()));\n        state.setDuplicateRefs(ArrayPacking.pack(duplicateRefs, duplicateSize, state.isCompressed()));\n        state.setLocationList(ArrayPacking.pack(locationList, size, state.isCompressed()));\n        state.setPointData(ArrayPacking.pack(model.getStore(), model.getStartOfFreeSegment()));\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/store/PointStoreState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.store;\n\nimport static com.amazon.randomcutforest.state.Version.V2_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\n/**\n * A class for storing the state of a\n * {@link com.amazon.randomcutforest.store.PointStore}. Depending on which kind\n * of point store was serialized, one of the fields {@code doubleData} or\n * {@code floatData} will be null.\n */\n@Data\npublic class PointStoreState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    /**\n     * version string for future extensibility\n     */\n    private String version = V2_0;\n    /**\n     * size of each point saved\n     */\n    private int dimensions;\n    /**\n     * capacity of the store\n     */\n    private int capacity;\n    /**\n     * shingle size of the points\n     */\n    private int shingleSize;\n    /**\n     * precision of points in the point store state\n     */\n    private String precision;\n    /**\n     * location beyond which the store has no useful information\n     */\n    private int startOfFreeSegment;\n    /**\n     * Point data converted to raw bytes.\n     */\n    private byte[] pointData;\n    /**\n     * use compressed representatiomn for arrays\n     */\n    private boolean compressed;\n    /**\n     * An array of reference counts for each stored point.\n     */\n    private int[] refCount;\n    /**\n     * is direct mapping enabled\n     */\n    private boolean directLocationMap;\n    /**\n     * location data for indirect maps\n     */\n    private int[] locationList;\n    /**\n     * reverse location data to be usable in future\n     */\n    private int[] reverseLocationList;\n    /**\n     * flag to avoid null issues in the future\n     */\n    private boolean reverseAvailable;\n    /**\n     * boolean indicating use of overlapping shingles; need not be used in certain\n     * cases\n     */\n    private boolean internalShinglingEnabled;\n    /**\n     * internal shingle\n     */\n    private double[] internalShingle;\n    /**\n     * last timestamp\n     */\n    private long lastTimeStamp;\n    /**\n     * rotation for internal shingles\n     */\n    private boolean rotationEnabled;\n    /**\n     * dynamic resizing\n     */\n    private boolean dynamicResizingEnabled;\n    /**\n     * current store capacity\n     */\n    private int currentStoreCapacity;\n    /**\n     * current index capacity\n     */\n    private int indexCapacity;\n\n    /**\n     * reduces the effect of repeated points; used in version 3.0\n     */\n    private int[] duplicateRefs;\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/tree/AbstractNodeStoreMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.tree;\n\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\n\nimport java.util.concurrent.ArrayBlockingQueue;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.state.IContextualStateMapper;\nimport com.amazon.randomcutforest.state.Version;\nimport com.amazon.randomcutforest.state.store.NodeStoreState;\nimport com.amazon.randomcutforest.tree.AbstractNodeStore;\nimport com.amazon.randomcutforest.util.ArrayPacking;\n\n@Getter\n@Setter\npublic class AbstractNodeStoreMapper\n        implements IContextualStateMapper<AbstractNodeStore, NodeStoreState, CompactRandomCutTreeContext> {\n\n    private int root;\n\n    @Override\n    public AbstractNodeStore toModel(NodeStoreState state, CompactRandomCutTreeContext compactRandomCutTreeContext,\n            long seed) {\n        int capacity = state.getCapacity();\n        int[] cutDimension = null;\n        int[] leftIndex = null;\n        int[] rightIndex = null;\n        float[] cutValue = null;\n        if (root != Null && root < capacity) {\n            cutDimension = ArrayPacking.unpackInts(state.getCutDimension(), capacity, state.isCompressed());\n            cutValue = ArrayPacking.unpackFloats(state.getCutValueData(), capacity);\n            leftIndex = ArrayPacking.unpackInts(state.getLeftIndex(), capacity, state.isCompressed());\n            rightIndex = ArrayPacking.unpackInts(state.getRightIndex(), capacity, state.isCompressed());\n            reverseBits(state.getSize(), leftIndex, rightIndex, capacity);\n        }\n        // note boundingBoxCache is not set deliberately\n        return AbstractNodeStore.builder().capacity(capacity).useRoot(root).leftIndex(leftIndex).rightIndex(rightIndex)\n                .cutDimension(cutDimension).cutValues(cutValue).dimension(compactRandomCutTreeContext.getDimension())\n                .build();\n    }\n\n    @Override\n    public NodeStoreState toState(AbstractNodeStore model) {\n        NodeStoreState state = new NodeStoreState();\n        int capacity = model.getCapacity();\n        state.setVersion(Version.V3_0);\n        state.setCapacity(capacity);\n        state.setCompressed(true);\n        state.setPartialTreeStateEnabled(true);\n        state.setPrecision(Precision.FLOAT_32.name());\n\n        int[] leftIndex = model.getLeftIndex();\n        int[] rightIndex = model.getRightIndex();\n        int[] cutDimension = model.getCutDimension();\n        float[] cutValues = model.getCutValues();\n\n        int[] map = new int[capacity];\n        int size = reorderNodesInBreadthFirstOrder(map, leftIndex, rightIndex, capacity);\n        state.setSize(size);\n        boolean check = root != Null && root < capacity;\n        state.setCanonicalAndNotALeaf(check);\n        if (check) { // can have a canonical representation saving a lot of space\n            int[] reorderedLeftArray = new int[size];\n            int[] reorderedRightArray = new int[size];\n            int[] reorderedCutDimension = new int[size];\n            float[] reorderedCutValue = new float[size];\n            for (int i = 0; i < size; i++) {\n                reorderedLeftArray[i] = (leftIndex[map[i]] < capacity) ? 1 : 0;\n                reorderedRightArray[i] = (rightIndex[map[i]] < capacity) ? 1 : 0;\n                reorderedCutDimension[i] = cutDimension[map[i]];\n                reorderedCutValue[i] = cutValues[map[i]];\n            }\n            state.setLeftIndex(ArrayPacking.pack(reorderedLeftArray, state.isCompressed()));\n            state.setRightIndex(ArrayPacking.pack(reorderedRightArray, state.isCompressed()));\n            state.setSize(model.size());\n            state.setCutDimension(ArrayPacking.pack(reorderedCutDimension, state.isCompressed()));\n            state.setCutValueData(ArrayPacking.pack(reorderedCutValue));\n        }\n        return state;\n    }\n\n    /**\n     * The follong function takes a pair of left and right indices for a regular\n     * binary tree (each node has 0 or 2 children) and where internal nodes are in\n     * the range [0..capacity-1] the indices are represented as : 0 for internal\n     * node; 1 for leaf node; the root is 0 and every non-leaf node is added to a\n     * queue; the number assigned to that node is the number in the queue Note that\n     * this implies that the left/right children can be represented by bit-arrays\n     *\n     * This function reflates the bits to the queue numbers\n     *\n     * @param size       the size of the two arrays, typically this is capacity; but\n     *                   can be different in RCF2.0\n     * @param leftIndex  the left bitarray\n     * @param rightIndex the right bitarray\n     * @param capacity   the number of internal nodes (one less than number of\n     *                   leaves)\n     */\n    protected static void reverseBits(int size, int[] leftIndex, int[] rightIndex, int capacity) {\n        int nodeCounter = 1;\n        for (int i = 0; i < size; i++) {\n            if (leftIndex[i] != 0) {\n                leftIndex[i] = nodeCounter++;\n            } else {\n                leftIndex[i] = capacity;\n            }\n            if (rightIndex[i] != 0) {\n                rightIndex[i] = nodeCounter++;\n            } else {\n                rightIndex[i] = capacity;\n            }\n        }\n        for (int i = size; i < leftIndex.length; i++) {\n            leftIndex[i] = rightIndex[i] = capacity;\n        }\n    }\n\n    /**\n     * The following function reorders the nodes stored in the tree in a breadth\n     * first order; Note that a regular binary tree where each internal node has 2\n     * chidren, as is the case for AbstractRandomCutTree or any tree produced in a\n     * Random Forest ensemble (not restricted to Random Cut Forests), has maxsize -\n     * 1 internal nodes for maxSize number of leaves. The leaves are numbered 0 +\n     * (maxsize), 1 + (maxSize), ..., etc. in that BFS ordering. The root is node 0.\n     *\n     * Note that if the binary tree is a complete binary tree, then the numbering\n     * would correspond to the well known heuristic where children of node index i\n     * are numbered 2*i and 2*i + 1. The trees in AbstractCompactRandomCutTree will\n     * not be complete binary trees. But a similar numbering enables us to compress\n     * the entire structure of the tree into two bit arrays corresponding to\n     * presence of left and right children. The idea can be viewed as similar to\n     * Zak's numbering for regular binary trees Lexicographic generation of binary\n     * trees, S. Zaks, TCS volume 10, pages 63-82, 1980, that uses depth first\n     * numbering. However an extensive literature exists on this topic.\n     *\n     * The overall relies on the extra advantage that we can use two bit sequences;\n     * the left and right child pointers which appears to be simple. While it is\n     * feasible to always maintain this order, that would complicate the standard\n     * binary search tree pattern and this tranformation is used when the tree is\n     * serialized. Note that while there is savings in representing the tree\n     * structure into two bit arrays, the bulk of the serialization corresponds to\n     * the payload at the nodes (cuts, dimensions for internal nodes and index to\n     * pointstore, number of copies for the leaves). The translation to the bits is\n     * handled by the NodeStoreMapper. The algorithm here corresponds to just\n     * producing the cannoical order.\n     *\n     * The algorithm renumbers the nodes in BFS ordering.\n     */\n    public int reorderNodesInBreadthFirstOrder(int[] map, int[] leftIndex, int[] rightIndex, int capacity) {\n\n        if ((root != Null) && (root < capacity)) {\n            int currentNode = 0;\n            ArrayBlockingQueue<Integer> nodeQueue = new ArrayBlockingQueue<>(capacity);\n            nodeQueue.add(root);\n            while (!nodeQueue.isEmpty()) {\n                int head = nodeQueue.poll();\n                int leftChild = leftIndex[head];\n                if (leftChild < capacity) {\n                    nodeQueue.add(leftChild);\n                }\n                int rightChild = rightIndex[head];\n                if (rightChild < capacity) {\n                    nodeQueue.add(rightChild);\n                }\n                map[currentNode] = head;\n                currentNode++;\n            }\n            return currentNode;\n        }\n        return 0;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/tree/CompactRandomCutTreeContext.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.tree;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.store.IPointStore;\n\n@Data\npublic class CompactRandomCutTreeContext {\n    private int maxSize;\n    private int dimension;\n    private IPointStore<?, ?> pointStore;\n    private Precision precision;\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/tree/CompactRandomCutTreeState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.tree;\n\nimport static com.amazon.randomcutforest.state.Version.V2_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.store.NodeStoreState;\n\n@Data\npublic class CompactRandomCutTreeState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V2_0;\n    private int root;\n    private int maxSize;\n    private int outputAfter;\n    private boolean storeSequenceIndexesEnabled;\n    private boolean centerOfMassEnabled;\n    private NodeStoreState nodeStoreState;\n    private double boundingBoxCacheFraction;\n    private boolean partialTreeState;\n    private long seed;\n    private int id;\n    private int dimensions;\n    private long staticSeed;\n    private float weight;\n    private byte[] auxiliaryData;\n    private boolean hasAuxiliaryData;\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/state/tree/RandomCutTreeMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.tree;\n\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.state.IContextualStateMapper;\nimport com.amazon.randomcutforest.state.Version;\nimport com.amazon.randomcutforest.tree.AbstractNodeStore;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\n\n@Getter\n@Setter\npublic class RandomCutTreeMapper\n        implements IContextualStateMapper<RandomCutTree, CompactRandomCutTreeState, CompactRandomCutTreeContext> {\n\n    @Override\n    public RandomCutTree toModel(CompactRandomCutTreeState state, CompactRandomCutTreeContext context, long seed) {\n\n        int dimension = (state.getDimensions() != 0) ? state.getDimensions() : context.getPointStore().getDimensions();\n        context.setDimension(dimension);\n        AbstractNodeStoreMapper nodeStoreMapper = new AbstractNodeStoreMapper();\n        nodeStoreMapper.setRoot(state.getRoot());\n        AbstractNodeStore nodeStore = nodeStoreMapper.toModel(state.getNodeStoreState(), context);\n\n        // boundingBoxcache is not set deliberately;\n        // it should be set after the partial tree is complete\n        // likewise all the leaves, including the root, should be set to\n        // nodeStore.getCapacity()\n        // such that when the partial tree is filled, the correct mass is computed\n        // note that this has no effect on the cuts -- since a single node tree has no\n        // cuts\n        // uncommenting and using the following line would result in such an incorrect\n        // computation\n        // in testRoundTripForSingleNodeForest() where the masses of the trees would be\n        // different by 1\n        // and thus outputAfter() would be triggered differently.\n        // int newRoot = state.getRoot();\n        int newRoot = nodeStore.isLeaf(state.getRoot()) ? nodeStore.getCapacity() : state.getRoot();\n        RandomCutTree tree = new RandomCutTree.Builder().dimension(dimension)\n                .storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).capacity(state.getMaxSize())\n                .setRoot(newRoot).randomSeed(state.getSeed()).pointStoreView(context.getPointStore())\n                .nodeStore(nodeStore).centerOfMassEnabled(state.isCenterOfMassEnabled())\n                .outputAfter(state.getOutputAfter()).build();\n        return tree;\n    }\n\n    @Override\n    public CompactRandomCutTreeState toState(RandomCutTree model) {\n        CompactRandomCutTreeState state = new CompactRandomCutTreeState();\n        state.setVersion(Version.V3_0);\n        int root = model.getRoot();\n        AbstractNodeStoreMapper nodeStoreMapper = new AbstractNodeStoreMapper();\n        nodeStoreMapper.setRoot(root);\n        state.setNodeStoreState(nodeStoreMapper.toState(model.getNodeStore()));\n        // the compression of nodeStore would change the root\n        if ((root != Null) && (root < model.getNumberOfLeaves() - 1)) {\n            root = 0; // reordering is forced\n        }\n        state.setRoot(root);\n        state.setMaxSize(model.getNumberOfLeaves());\n        state.setPartialTreeState(true);\n        state.setStoreSequenceIndexesEnabled(model.isStoreSequenceIndexesEnabled());\n        state.setCenterOfMassEnabled(model.isCenterOfMassEnabled());\n        state.setBoundingBoxCacheFraction(model.getBoundingBoxCacheFraction());\n        state.setOutputAfter(model.getOutputAfter());\n        state.setSeed(model.getRandomSeed());\n        state.setDimensions(model.getDimension());\n\n        return state;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/statistics/Deviation.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.statistics;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\n/**\n * This class maintains a simple discounted statistics. Setters are avoided\n * except for discount rate which is useful as initialization from raw scores\n */\npublic class Deviation {\n\n    protected double discount;\n\n    protected double weight = 0;\n\n    protected double sumSquared = 0;\n\n    protected double sum = 0;\n\n    protected int count = 0;\n\n    public Deviation() {\n        discount = 0;\n    }\n\n    public Deviation(double discount) {\n        checkArgument(0 <= discount && discount < 1, \"incorrect discount parameter\");\n        this.discount = discount;\n    }\n\n    public Deviation(double discount, double weight, double sumSquared, double sum, int count) {\n        this.discount = discount;\n        this.weight = weight;\n        this.sumSquared = sumSquared;\n        this.sum = sum;\n        this.count = count;\n    }\n\n    public Deviation copy() {\n        return new Deviation(this.discount, this.weight, this.sumSquared, this.sum, this.count);\n    }\n\n    public double getMean() {\n        return (weight <= 0) ? 0 : sum / weight;\n    }\n\n    public void update(double score) {\n        double factor = 1 - discount;\n        sum = sum * factor + score;\n        sumSquared = sumSquared * factor + score * score;\n        weight = weight * factor + 1.0;\n        ++count;\n    }\n\n    public double getDeviation() {\n        if (weight <= 0) {\n            return 0;\n        }\n        double temp = sum / weight;\n        double answer = sumSquared / weight - temp * temp;\n        return (answer > 0) ? Math.sqrt(answer) : 0;\n    }\n\n    public boolean isEmpty() {\n        return weight == 0;\n    }\n\n    public double getDiscount() {\n        return discount;\n    }\n\n    public void setDiscount(double discount) {\n        checkArgument(discount >= 0, \"cannot be negative\");\n        checkArgument(discount < 1, \"can be at most 1\");\n        this.discount = discount;\n    }\n\n    public double getSum() {\n        return sum;\n    }\n\n    public double getSumSquared() {\n        return sumSquared;\n    }\n\n    public double getWeight() {\n        return weight;\n    }\n\n    public int getCount() {\n        return count;\n    }\n\n    public void setCount(int count) {\n        this.count = count;\n    }\n\n    public void reset() {\n        weight = 0;\n        sum = 0;\n        count = 0;\n        sumSquared = 0;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/IPointStore.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\n/**\n * A store for points of precision type P, which can be double[] or float[]\n * which can be added to a store by the update coordinator and made accessible\n * to the trees in a read only manner.\n * \n * @param <Point> type of input point\n */\npublic interface IPointStore<PointReference, Point> extends IPointStoreView<Point> {\n    /**\n     * Adds to the store; there may be a loss of precision if enableFloat is on in\n     * the Forest level. But external interface of the forest is double[]\n     *\n     * Note that delete is automatic, that is when no trees are accessing the point\n     * \n     * @param point             point to be added\n     * @param sequenceNum       sequence number of the point\n     * @param updateShingleOnly only update the shingle but do not generate a point\n     *                          useful when we do not want to add a point with too\n     *                          many imputed values\n     * @return reference of the stored point\n     */\n    PointReference add(Point point, long sequenceNum, boolean updateShingleOnly);\n\n    default PointReference add(Point point, long sequenceNum) {\n        return add(point, sequenceNum, false);\n    }\n\n    // increments and returns the incremented value\n    int incrementRefCount(int index);\n\n    // decrements and returns the decremented value\n    int decrementRefCount(int index);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/IPointStoreView.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport java.util.List;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.summarization.ICluster;\n\n/**\n * A view of the PointStore that forces a read only access to the store.\n */\npublic interface IPointStoreView<Point> {\n\n    int getDimensions();\n\n    int getCapacity();\n\n    float[] getNumericVector(int index);\n\n    float[] getInternalShingle();\n\n    long getNextSequenceIndex();\n\n    float[] transformToShingledPoint(Point input);\n\n    boolean isInternalRotationEnabled();\n\n    boolean isInternalShinglingEnabled();\n\n    int getShingleSize();\n\n    int[] transformIndices(int[] indexList);\n\n    /**\n     * Prints the point given the index, irrespective of the encoding of the point.\n     * Used in exceptions and error messages\n     * \n     * @param index index of the point in the store\n     * @return a string that can be printed\n     */\n    String toString(int index);\n\n    /**\n     * a function that exposes an L1 clustering of the points stored in pointstore\n     * \n     * @param maxAllowed              the maximum number of clusters one is\n     *                                interested in\n     * @param shrinkage               a parameter used in CURE algorithm that can\n     *                                produce a combination of behaviors (=1\n     *                                corresponds to centroid clustering, =0\n     *                                resembles robust Minimum Spanning Tree)\n     * @param numberOfRepresentatives another parameter used to control the\n     *                                plausible (potentially non-spherical) shapes\n     *                                of the clusters\n     * @param separationRatio         a parameter that controls how aggressively we\n     *                                go below maxAllowed -- this is often set to a\n     *                                DEFAULT_SEPARATION_RATIO_FOR_MERGE\n     * @param distance                a distance function\n     * @param previous                a (possibly null) list of previous clusters\n     *                                which can be used to seed the current clusters\n     *                                to ensure some smoothness\n     * @return a list of clusters\n     */\n\n    List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,\n            double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/IndexIntervalManager.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Stack;\n\n/**\n * This class defines common functionality for Store classes, including\n * maintaining the stack of free pointers.\n */\n\npublic class IndexIntervalManager {\n\n    protected int capacity;\n    protected int[] freeIndexesStart;\n    protected int[] freeIndexesEnd;\n    protected int lastInUse;\n\n    public IndexIntervalManager(int capacity) {\n        checkArgument(capacity > 0, \"incorrect parameters\");\n        freeIndexesEnd = new int[1];\n        freeIndexesStart = new int[1];\n        lastInUse = 1;\n        this.capacity = capacity;\n        freeIndexesStart[0] = 0;\n        freeIndexesEnd[0] = capacity - 1;\n    }\n\n    static BitSet toBits(int[] refCount) {\n        checkArgument(refCount != null, \"not a meaningful array input\");\n        BitSet bits = new BitSet(refCount.length);\n        for (int i = 0; i < refCount.length; i++) {\n            if (refCount[i] > 0) {\n                bits.set(i);\n            }\n        }\n        return bits;\n    }\n\n    public IndexIntervalManager(int[] refCount, int capacity) {\n        this(capacity, refCount.length, toBits(refCount));\n    }\n\n    public IndexIntervalManager(int capacity, int length, BitSet bits) {\n        checkArgument(bits != null, \" null bitset not allowed\");\n        this.capacity = capacity;\n        int first = bits.nextClearBit(0);\n        Stack<int[]> stack = new Stack<>();\n        while (first < length) {\n            int last = bits.nextSetBit(first) - 1;\n            if (last >= first) {\n                stack.push(new int[] { first, last });\n                first = bits.nextClearBit(last + 1);\n                if (first < 0) {\n                    break;\n                }\n            } else { // we do not distinguish between all full and all empty\n                if (first < length - 1) {\n                    if (bits.nextClearBit(first + 1) == first + 1) {\n                        stack.push(new int[] { first, length - 1 });\n                    } else {\n                        stack.push(new int[] { first, first });\n                    }\n                } else {\n                    stack.push(new int[] { length - 1, length - 1 });\n                }\n                break;\n            }\n        }\n        lastInUse = stack.size();\n        freeIndexesEnd = new int[lastInUse + 1];\n        freeIndexesStart = new int[lastInUse + 1];\n        this.capacity = capacity;\n        int count = 0;\n        while (stack.size() > 0) {\n            int[] interval = stack.pop();\n            freeIndexesStart[count] = interval[0];\n            freeIndexesEnd[count] = interval[1];\n            ++count;\n        }\n    }\n\n    public void extendCapacity(int newCapacity) {\n        checkArgument(newCapacity > capacity, \" incorrect call, we can only increase capacity\");\n        // the current capacity need not be the final capacity, for example in case of\n        // point store\n        if (freeIndexesStart.length == lastInUse) {\n            freeIndexesStart = Arrays.copyOf(freeIndexesStart, lastInUse + 1);\n            freeIndexesEnd = Arrays.copyOf(freeIndexesEnd, lastInUse + 1);\n        }\n        freeIndexesStart[lastInUse] = capacity;\n        freeIndexesEnd[lastInUse] = (newCapacity - 1);\n        lastInUse += 1;\n        capacity = newCapacity;\n\n    }\n\n    public boolean isEmpty() {\n        return (lastInUse == 0);\n    }\n\n    /**\n     * @return the maximum number of nodes whose data can be stored.\n     */\n    public int getCapacity() {\n        return capacity;\n    }\n\n    /**\n     * @return the number of indices which are being maintained\n     */\n    public int size() {\n        int sum = 0;\n        for (int i = 0; i < lastInUse; i++) {\n            sum += freeIndexesEnd[i] - freeIndexesStart[i] + 1;\n        }\n        return sum;\n    }\n\n    /**\n     * Take an index from the free index stack.\n     * \n     * @return a free index that can be used to store a value.\n     */\n    public int takeIndex() {\n        checkState(lastInUse > 0, \"store is full\");\n        int answer = freeIndexesStart[lastInUse - 1];\n        if (answer == freeIndexesEnd[lastInUse - 1]) {\n            lastInUse -= 1;\n        } else {\n            freeIndexesStart[lastInUse - 1] = answer + 1;\n        }\n        return answer;\n    }\n\n    /**\n     * Release an index. After the release, the index value may be returned in a\n     * future call to {@link #takeIndex()}.\n     * \n     * @param index The index value to release.\n     */\n    public void releaseIndex(int index) {\n        if (lastInUse > 0) {\n            int start = freeIndexesStart[lastInUse - 1];\n            int end = freeIndexesEnd[lastInUse - 1];\n            if (start == index + 1) {\n                freeIndexesStart[lastInUse - 1] = index;\n                return;\n            } else if (end + 1 == index) {\n                freeIndexesEnd[lastInUse - 1] = index;\n                return;\n            }\n        }\n        if (freeIndexesStart.length == lastInUse) {\n            freeIndexesStart = Arrays.copyOf(freeIndexesStart, lastInUse + 1);\n            freeIndexesEnd = Arrays.copyOf(freeIndexesEnd, lastInUse + 1);\n        }\n\n        freeIndexesStart[lastInUse] = index;\n        freeIndexesEnd[lastInUse] = index;\n        lastInUse += 1;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/PointStore.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.summarization.Summarizer.iterativeClustering;\nimport static java.lang.Math.max;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.Vector;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.MultiCenter;\nimport com.amazon.randomcutforest.util.ArrayUtils;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic abstract class PointStore implements IPointStore<Integer, float[]> {\n\n    public static int INFEASIBLE_POINTSTORE_INDEX = -1;\n\n    public static int INFEASIBLE_LOCN = (int) -1;\n    /**\n     * an index manager to manage free locations\n     */\n    protected IndexIntervalManager indexManager;\n    /**\n     * generic store class\n     */\n    protected float[] store;\n    /**\n     * generic internal shingle, note that input is doubles\n     */\n    protected float[] internalShingle;\n    /**\n     * enable rotation of shingles; use a cyclic buffer instead of sliding window\n     */\n    boolean rotationEnabled;\n    /**\n     * last seen timestamp for internal shingling\n     */\n    protected long nextSequenceIndex;\n\n    /**\n     * refCount[i] counts of the number of trees that are currently using the point\n     * determined by locationList[i] or (for directLocationMapping) the point at\n     * store[i * dimensions]\n     */\n    protected byte[] refCount;\n\n    protected HashMap<Integer, Integer> refCountMap;\n    /**\n     * first location where new data can be safely copied;\n     */\n    int startOfFreeSegment;\n    /**\n     * overall dimension of the point (after shingling)\n     */\n    int dimensions;\n    /**\n     * shingle size, if known. Setting shingle size = 1 rules out overlapping\n     */\n    int shingleSize;\n    /**\n     * number of original dimensions which are shingled to produce and overall point\n     * dimensions = shingleSize * baseDimensions. However there is a possibility\n     * that even though the data is shingled, we may not choose to use the\n     * overlapping (say for out of order updates).\n     */\n    int baseDimension;\n\n    /**\n     * maximum capacity\n     */\n    int capacity;\n    /**\n     * current capacity of store (number of shingled points)\n     */\n    int currentStoreCapacity;\n\n    /**\n     * enabling internal shingling\n     */\n    boolean internalShinglingEnabled;\n\n    abstract void setInfeasiblePointstoreLocationIndex(int index);\n\n    abstract void extendLocationList(int newCapacity);\n\n    abstract void setLocation(int index, int location);\n\n    abstract int getLocation(int index);\n\n    /**\n     * Decrement the reference count for the given index.\n     *\n     * @param index The index value.\n     * @throws IllegalArgumentException if the index value is not valid.\n     * @throws IllegalArgumentException if the current reference count for this\n     *                                  index is non positive.\n     */\n    @Override\n    public int decrementRefCount(int index) {\n        checkArgument(index >= 0 && index < locationListLength(), \" index not supported by store\");\n        checkArgument((refCount[index] & 0xff) > 0, \" cannot decrement index\");\n        Integer value = refCountMap.remove(index);\n        if (value == null) {\n            if ((refCount[index] & 0xff) == 1) {\n                indexManager.releaseIndex(index);\n                refCount[index] = (byte) 0;\n                setInfeasiblePointstoreLocationIndex(index);\n                return 0;\n            } else {\n                int newVal = (byte) ((refCount[index] & 0xff) - 1);\n                refCount[index] = (byte) newVal;\n                return newVal;\n            }\n        } else {\n            if (value > 1) {\n                refCountMap.put(index, value - 1);\n            }\n            return value - 1 + (refCount[index] & 0xff);\n        }\n    }\n\n    /**\n     * takes an index from the index manager and rezises if necessary also adjusts\n     * refCount size to have increment/decrement be seamless\n     *\n     * @return an index from the index manager\n     */\n    int takeIndex() {\n        if (indexManager.isEmpty()) {\n            if (indexManager.getCapacity() < capacity) {\n                int oldCapacity = indexManager.getCapacity();\n                int newCapacity = Math.min(capacity, 1 + (int) Math.floor(1.1 * oldCapacity));\n                indexManager.extendCapacity(newCapacity);\n                refCount = Arrays.copyOf(refCount, newCapacity);\n                extendLocationList(newCapacity);\n            } else {\n                throw new IllegalStateException(\" index manager in point store is full \");\n            }\n        }\n        return indexManager.takeIndex();\n    }\n\n    protected int getAmountToWrite(float[] tempPoint) {\n        if (checkShingleAlignment(startOfFreeSegment, tempPoint)) {\n            if (!rotationEnabled\n                    || startOfFreeSegment % dimensions == (nextSequenceIndex - 1) * baseDimension % dimensions) {\n                return baseDimension;\n            }\n        } else if (!rotationEnabled) {\n            return dimensions;\n\n        }\n        // the following adds the padding for what exists;\n        // then the padding for the new part; all mod (dimensions)\n        // note that the expression is baseDimension when the condition\n        // startOfFreeSegment % dimensions == (nextSequenceIndex-1)*baseDimension %\n        // dimension\n        // is met\n        return dimensions + (dimensions - startOfFreeSegment % dimensions\n                + (int) ((nextSequenceIndex) * baseDimension) % dimensions) % dimensions;\n    }\n\n    /**\n     * Add a point to the point store and return the index of the stored point.\n     *\n     * @param point       The point being added to the store.\n     * @param sequenceNum sequence number of the point\n     * @return the index value of the stored point.\n     * @throws IllegalArgumentException if the length of the point does not match\n     *                                  the point store's dimensions.\n     * @throws IllegalStateException    if the point store is full.\n     */\n    public int add(double[] point, long sequenceNum) {\n        return add(toFloatArray(point), sequenceNum, false);\n    }\n\n    public Integer add(float[] point, long sequenceNum, boolean updateShingleOnly) {\n        checkArgument(internalShinglingEnabled || point.length == dimensions,\n                \"point.length must be equal to dimensions\");\n        checkArgument(!internalShinglingEnabled || point.length == baseDimension,\n                \"point.length must be equal to dimensions\");\n\n        float[] tempPoint = point;\n        nextSequenceIndex++;\n        if (internalShinglingEnabled) {\n            // rotation is supported via the output and input is unchanged\n            tempPoint = constructShingleInPlace(internalShingle, point, false);\n            if (nextSequenceIndex < shingleSize || updateShingleOnly) {\n                return INFEASIBLE_POINTSTORE_INDEX;\n            }\n        }\n        int nextIndex;\n\n        int amountToWrite = getAmountToWrite(tempPoint);\n\n        if (startOfFreeSegment > currentStoreCapacity * dimensions - amountToWrite) {\n            // try compaction and then resizing\n            compact();\n            // the compaction can change the array contents\n            amountToWrite = getAmountToWrite(tempPoint);\n            if (startOfFreeSegment > currentStoreCapacity * dimensions - amountToWrite) {\n                resizeStore();\n                checkState(startOfFreeSegment + amountToWrite <= currentStoreCapacity * dimensions, \"out of space\");\n            }\n        }\n\n        nextIndex = takeIndex();\n\n        setLocation(nextIndex, startOfFreeSegment - dimensions + amountToWrite);\n        if (amountToWrite <= dimensions) {\n            copyPoint(tempPoint, dimensions - amountToWrite, startOfFreeSegment, amountToWrite);\n        } else {\n            copyPoint(tempPoint, 0, startOfFreeSegment + amountToWrite - dimensions, dimensions);\n        }\n        startOfFreeSegment += amountToWrite;\n\n        refCount[nextIndex] = 1;\n        return nextIndex;\n    }\n\n    /**\n     * Increment the reference count for the given index. This operation assumes\n     * that there is currently a point stored at the given index and will throw an\n     * exception if that's not the case.\n     *\n     * @param index The index value.\n     * @throws IllegalArgumentException if the index value is not valid.\n     * @throws IllegalArgumentException if the current reference count for this\n     *                                  index is non positive.\n     */\n    public int incrementRefCount(int index) {\n        checkArgument(index >= 0 && index < locationListLength(), \" index not supported by store\");\n        checkArgument((refCount[index] & 0xff) > 0, \" not in use\");\n        Integer value = refCountMap.remove(index);\n        if (value == null) {\n            if ((refCount[index] & 0xff) == 255) {\n                refCountMap.put(index, 1);\n                return 256;\n            } else {\n                int newVal = (byte) ((refCount[index] & 0xff) + 1);\n                refCount[index] = (byte) newVal;\n                return newVal;\n            }\n        } else {\n            refCountMap.put(index, value + 1);\n            return value + 1;\n        }\n    }\n\n    @Override\n    public int getDimensions() {\n        return dimensions;\n    }\n\n    /**\n     * maximum capacity, in number of points of size dimensions\n     */\n    public int getCapacity() {\n        return capacity;\n    }\n\n    /**\n     * capacity of the indices\n     */\n    public int getIndexCapacity() {\n        return indexManager.getCapacity();\n    }\n\n    /**\n     * used in mapper\n     *\n     * @return gets the shingle size (if known, otherwise is 1)\n     */\n    public int getShingleSize() {\n        return shingleSize;\n    }\n\n    /**\n     * gets the current store capacity in the number of points with dimension many\n     * values\n     *\n     * @return capacity in number of points\n     */\n    public int getCurrentStoreCapacity() {\n        return currentStoreCapacity;\n    }\n\n    /**\n     * used for mappers\n     *\n     * @return the store that stores the values\n     */\n    public float[] getStore() {\n        return store;\n    }\n\n    /**\n     * used for mapper\n     *\n     * @return the array of counts referring to different points\n     */\n    public int[] getRefCount() {\n        int[] newarray = new int[refCount.length];\n        for (int i = 0; i < refCount.length; i++) {\n            newarray[i] = refCount[i] & 0xff;\n            Integer value = refCountMap.get(i);\n            if (value != null) {\n                newarray[i] += value;\n            }\n        }\n        return newarray;\n    }\n\n    /**\n     * useful in mapper to not copy\n     *\n     * @return the length of the prefix\n     */\n    public int getStartOfFreeSegment() {\n        return startOfFreeSegment;\n    }\n\n    /**\n     * used in mapper\n     *\n     * @return if shingling is performed internally\n     */\n    public boolean isInternalShinglingEnabled() {\n        return internalShinglingEnabled;\n    }\n\n    /**\n     * used in mapper and in extrapolation\n     *\n     * @return the last timestamp seen\n     */\n    public long getNextSequenceIndex() {\n        return nextSequenceIndex;\n    }\n\n    /**\n     * used to obtain the most recent shingle seen so far in case of internal\n     * shingling\n     *\n     * @return for internal shingling, returns the last seen shingle\n     */\n    public float[] getInternalShingle() {\n        checkState(internalShinglingEnabled, \"internal shingling is not enabled\");\n        return copyShingle();\n    }\n\n    /**\n     * The following function eliminates redundant information that builds up in the\n     * point store and shrinks the point store\n     */\n\n    abstract int locationListLength();\n\n    void alignBoundaries(int initial, int freshStart) {\n        int locn = freshStart;\n        for (int i = 0; i < initial; i++) {\n            store[locn] = 0;\n            ++locn;\n        }\n\n    }\n\n    public void compact() {\n        Vector<Integer[]> reverseReference = new Vector<>();\n        for (int i = 0; i < locationListLength(); i++) {\n            int locn = getLocation(i);\n            if (locn < currentStoreCapacity * dimensions && locn >= 0) {\n                reverseReference.add(new Integer[] { locn, i });\n            }\n        }\n        reverseReference.sort((o1, o2) -> o1[0].compareTo(o2[0]));\n        int freshStart = 0;\n        int jStatic = 0;\n        int jDynamic = 0;\n        int jEnd = reverseReference.size();\n        while (jStatic < jEnd) {\n            int blockStart = reverseReference.get(jStatic)[0];\n            int blockEnd = blockStart + dimensions;\n            int initial = 0;\n            if (rotationEnabled) {\n                initial = (dimensions - freshStart + blockStart) % dimensions;\n            }\n            int k = jStatic + 1;\n            jDynamic = jStatic + 1;\n            while (k < jEnd) {\n                int newElem = reverseReference.get(k)[0];\n                if (blockEnd >= newElem) {\n                    k += 1;\n                    jDynamic += 1;\n                    blockEnd = max(blockEnd, newElem + dimensions);\n                } else {\n                    k = jEnd;\n                }\n            }\n\n            alignBoundaries(initial, freshStart);\n            freshStart += initial;\n\n            int start = freshStart;\n            for (int i = blockStart; i < blockEnd; i++) {\n                assert (!rotationEnabled || freshStart % dimensions == i % dimensions);\n\n                if (jStatic < jEnd) {\n                    int locn = reverseReference.get(jStatic)[0];\n                    if (i == locn) {\n                        int newIdx = reverseReference.get(jStatic)[1];\n                        setLocation(newIdx, freshStart);\n                        jStatic += 1;\n                    }\n                }\n                freshStart += 1;\n            }\n            copyTo(start, blockStart, blockEnd - blockStart);\n\n            if (jStatic != jDynamic) {\n                throw new IllegalStateException(\"There is discepancy in indices\");\n            }\n        }\n        startOfFreeSegment = freshStart;\n    }\n\n    /**\n     * returns the number of copies of a point\n     *\n     * @param i index of a point\n     * @return number of copies of the point managed by the store\n     */\n    public int getRefCount(int i) {\n        int val = refCount[i] & 0xff;\n        Integer value = refCountMap.get(i);\n        if (value != null) {\n            val += value;\n        }\n        return val;\n    }\n\n    @Override\n    public boolean isInternalRotationEnabled() {\n        return rotationEnabled;\n    }\n\n    /**\n     *\n     * @return the number of indices stored\n     */\n    public abstract int size();\n\n    public abstract int[] getLocationList();\n\n    /**\n     * transforms a point to a shingled point if internal shingling is turned on\n     *\n     * @param point new input values\n     * @return shingled point\n     */\n    @Override\n    public float[] transformToShingledPoint(float[] point) {\n        checkNotNull(point, \"point must not be null\");\n        if (internalShinglingEnabled && point.length == baseDimension) {\n            return constructShingleInPlace(copyShingle(), point, rotationEnabled);\n        }\n        return ArrayUtils.cleanCopy(point);\n    }\n\n    private float[] copyShingle() {\n        if (!rotationEnabled) {\n            return Arrays.copyOf(internalShingle, dimensions);\n        } else {\n            float[] answer = new float[dimensions];\n            int offset = (int) (nextSequenceIndex * baseDimension);\n            for (int i = 0; i < dimensions; i++) {\n                answer[(offset + i) % dimensions] = internalShingle[i];\n            }\n            return answer;\n        }\n    }\n\n    /**\n     * the following function is used to update the shingle in place; it can be used\n     * to produce new copies as well\n     *\n     * @param target the array containing the shingled point\n     * @param point  the new values\n     * @return the array which now contains the updated shingle\n     */\n    protected float[] constructShingleInPlace(float[] target, float[] point, boolean rotationEnabled) {\n        if (!rotationEnabled) {\n            for (int i = 0; i < dimensions - baseDimension; i++) {\n                target[i] = target[i + baseDimension];\n            }\n            for (int i = 0; i < baseDimension; i++) {\n                target[dimensions - baseDimension + i] = (point[i] == 0.0) ? 0.0f : point[i];\n            }\n        } else {\n            int offset = ((int) (nextSequenceIndex * baseDimension) % dimensions);\n            for (int i = 0; i < baseDimension; i++) {\n                target[offset + i] = (point[i] == 0.0) ? 0.0f : point[i];\n            }\n        }\n        return target;\n    }\n\n    /**\n     * for extrapolation and imputation, in presence of internal shingling we need\n     * to update the list of missing values from the space of the input dimensions\n     * to the shingled dimensions\n     *\n     * @param indexList list of missing values in the input point\n     * @return list of missing values in the shingled point\n     */\n    @Override\n    public int[] transformIndices(int[] indexList) {\n        checkArgument(internalShinglingEnabled, \" only allowed for internal shingling\");\n        checkArgument(indexList.length <= baseDimension, \" incorrect length\");\n        int[] results = Arrays.copyOf(indexList, indexList.length);\n        if (!rotationEnabled) {\n            for (int i = 0; i < indexList.length; i++) {\n                checkArgument(results[i] < baseDimension, \"incorrect index\");\n                results[i] += dimensions - baseDimension;\n            }\n        } else {\n            int offset = ((int) (nextSequenceIndex * baseDimension) % dimensions);\n            for (int i = 0; i < indexList.length; i++) {\n                checkArgument(results[i] < baseDimension, \"incorrect index\");\n                results[i] = (results[i] + offset) % dimensions;\n            }\n        }\n        return results;\n    }\n\n    /**\n     * a builder\n     */\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        protected int dimensions;\n        protected int shingleSize = 1;\n        protected int baseDimension;\n        protected boolean internalRotationEnabled = false;\n        protected boolean internalShinglingEnabled = false;\n        protected int capacity;\n        protected Optional<Integer> initialPointStoreSize = Optional.empty();\n        protected int currentStoreCapacity = 0;\n        protected int indexCapacity = 0;\n        protected float[] store = null;\n        protected double[] knownShingle = null;\n        protected int[] locationList = null;\n        protected int[] refCount = null;\n        protected long nextTimeStamp = 0;\n        protected int startOfFreeSegment = 0;\n\n        // dimension of the points being stored\n        public T dimensions(int dimensions) {\n            this.dimensions = dimensions;\n            return (T) this;\n        }\n\n        // maximum number of points in the store\n        public T capacity(int capacity) {\n            this.capacity = capacity;\n            return (T) this;\n        }\n\n        // initial size of the pointstore, dynamicResizing must be on\n        // and value cannot exceed capacity\n        public T initialSize(int initialPointStoreSize) {\n            this.initialPointStoreSize = Optional.of(initialPointStoreSize);\n            return (T) this;\n        }\n\n        // shingleSize for opportunistic compression\n        public T shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return (T) this;\n        }\n\n        // is internal shingling enabled\n        public T internalShinglingEnabled(boolean internalShinglingEnabled) {\n            this.internalShinglingEnabled = internalShinglingEnabled;\n            return (T) this;\n        }\n\n        // are shingles rotated\n        public T internalRotationEnabled(boolean internalRotationEnabled) {\n            this.internalRotationEnabled = internalRotationEnabled;\n            return (T) this;\n        }\n\n        @Deprecated\n        public T directLocationEnabled(boolean value) {\n            return (T) this;\n        }\n\n        @Deprecated\n        public T dynamicResizingEnabled(boolean value) {\n            return (T) this;\n        }\n\n        // the size of the array storing the specific points\n        // this is used for serialization\n        public T currentStoreCapacity(int currentStoreCapacity) {\n            this.currentStoreCapacity = currentStoreCapacity;\n            return (T) this;\n        }\n\n        // the size of the pointset being tracked\n        // this is used for serialization\n        public T indexCapacity(int indexCapacity) {\n            this.indexCapacity = indexCapacity;\n            return (T) this;\n        }\n\n        // last known shingle, if internalshingle is on\n        // this shingle is not rotated\n        // this is used for serialization\n        public T knownShingle(double[] knownShingle) {\n            this.knownShingle = knownShingle;\n            return (T) this;\n        }\n\n        // count of the points being tracked\n        // used for serialization\n        public T refCount(int[] refCount) {\n            this.refCount = refCount;\n            return (T) this;\n        }\n\n        // location of the points being tracked, if not directmapped\n        // used for serialization\n        public T locationList(int[] locationList) {\n            this.locationList = locationList;\n            return (T) this;\n        }\n\n        public T store(float[] store) {\n            this.store = store;\n            return (T) this;\n        }\n\n        // location of where points can be written\n        // used for serialization\n        public T startOfFreeSegment(int startOfFreeSegment) {\n            this.startOfFreeSegment = startOfFreeSegment;\n            return (T) this;\n        }\n\n        // the next timeStamp to accept\n        // used for serialization\n        public T nextTimeStamp(long nextTimeStamp) {\n            this.nextTimeStamp = nextTimeStamp;\n            return (T) this;\n        }\n\n        public PointStore build() {\n            if (shingleSize * capacity < Character.MAX_VALUE) {\n                return new PointStoreSmall(this);\n            } else {\n                return new PointStoreLarge(this);\n            }\n        }\n    }\n\n    public PointStore(PointStore.Builder builder) {\n        checkArgument(builder.dimensions > 0, \"dimensions must be greater than 0\");\n        checkArgument(builder.capacity > 0, \"capacity must be greater than 0\");\n        checkArgument(builder.shingleSize == 1 || builder.dimensions == builder.shingleSize\n                || builder.dimensions % builder.shingleSize == 0, \"incorrect use of shingle size\");\n        /**\n         * the following checks are due to mappers (kept for future)\n         */\n        if (builder.refCount != null || builder.locationList != null || builder.knownShingle != null) {\n            checkArgument(builder.refCount != null, \"reference count must be present\");\n            checkArgument(builder.locationList != null, \"location list must be present\");\n            checkArgument(builder.refCount.length == builder.indexCapacity, \"incorrect reference count length\");\n            // following may change if IndexManager is dynamically resized as well\n            checkArgument(builder.locationList.length == builder.indexCapacity, \" incorrect length of locations\");\n            checkArgument(\n                    builder.knownShingle == null\n                            || builder.internalShinglingEnabled && builder.knownShingle.length == builder.dimensions,\n                    \"incorrect shingling information\");\n        }\n\n        this.shingleSize = builder.shingleSize;\n        this.dimensions = builder.dimensions;\n        this.internalShinglingEnabled = builder.internalShinglingEnabled;\n        this.rotationEnabled = builder.internalRotationEnabled;\n        this.baseDimension = this.dimensions / this.shingleSize;\n        this.capacity = builder.capacity;\n        this.refCountMap = new HashMap<>();\n\n        if (builder.refCount == null) {\n            int size = (int) builder.initialPointStoreSize.orElse(builder.capacity);\n            currentStoreCapacity = size;\n            this.indexManager = new IndexIntervalManager(size);\n            startOfFreeSegment = 0;\n            refCount = new byte[size];\n            if (internalShinglingEnabled) {\n                nextSequenceIndex = 0;\n                internalShingle = new float[dimensions];\n            }\n            store = new float[currentStoreCapacity * dimensions];\n        } else {\n            this.refCount = new byte[builder.refCount.length];\n            for (int i = 0; i < refCount.length; i++) {\n                if (builder.refCount[i] >= 0 && builder.refCount[i] <= 255) {\n                    refCount[i] = (byte) builder.refCount[i];\n                } else if (builder.refCount[i] > 255) {\n                    refCount[i] = (byte) 255;\n                    refCountMap.put(i, builder.refCount[i] - 255);\n                }\n            }\n            this.startOfFreeSegment = builder.startOfFreeSegment;\n            this.nextSequenceIndex = builder.nextTimeStamp;\n            this.currentStoreCapacity = builder.currentStoreCapacity;\n            if (internalShinglingEnabled) {\n                this.internalShingle = (builder.knownShingle != null)\n                        ? Arrays.copyOf(toFloatArray(builder.knownShingle), dimensions)\n                        : new float[dimensions];\n            }\n\n            indexManager = new IndexIntervalManager(builder.refCount, builder.indexCapacity);\n            store = (builder.store == null) ? new float[currentStoreCapacity * dimensions] : builder.store;\n        }\n    }\n\n    void resizeStore() {\n        int maxCapacity = (rotationEnabled) ? 2 * capacity : capacity;\n        int newCapacity = (int) Math.floor(Math.min(1.1 * currentStoreCapacity, maxCapacity));\n        if (newCapacity > currentStoreCapacity) {\n            float[] newStore = new float[newCapacity * dimensions];\n            System.arraycopy(store, 0, newStore, 0, currentStoreCapacity * dimensions);\n            currentStoreCapacity = newCapacity;\n            store = newStore;\n        }\n    }\n\n    boolean checkShingleAlignment(int location, float[] point) {\n        boolean test = (location - dimensions + baseDimension >= 0);\n        for (int i = 0; i < dimensions - baseDimension && test; i++) {\n            test = (((float) point[i]) == store[location - dimensions + baseDimension + i]);\n        }\n        return test;\n    }\n\n    void copyPoint(float[] point, int src, int location, int length) {\n        for (int i = 0; i < length; i++) {\n            store[location + i] = point[src + i];\n        }\n    }\n\n    protected abstract void checkFeasible(int index);\n\n    /**\n     * Get a copy of the point at the given index.\n     *\n     * @param index An index value corresponding to a storage location in this point\n     *              store.\n     * @return a copy of the point stored at the given index.\n     * @throws IllegalArgumentException if the index value is not valid.\n     * @throws IllegalArgumentException if the current reference count for this\n     *                                  index is nonpositive.\n     */\n    @Override\n    public float[] getNumericVector(int index) {\n        checkArgument(index >= 0 && index < locationListLength(), \" index not supported by store\");\n        int address = getLocation(index);\n        checkFeasible(index);\n\n        if (!rotationEnabled) {\n            return Arrays.copyOfRange(store, address, address + dimensions);\n        } else {\n            float[] answer = new float[dimensions];\n            for (int i = 0; i < dimensions; i++) {\n                answer[(address + i) % dimensions] = store[address + i];\n            }\n            return answer;\n        }\n    }\n\n    public String toString(int index) {\n        return Arrays.toString(getNumericVector(index));\n    }\n\n    void copyTo(int dest, int source, int length) {\n        if (dest < source) {\n            for (int i = 0; i < length; i++) {\n                store[dest + i] = store[source + i];\n            }\n        }\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n\n    /**\n     * a function that exposes an L1 clustering of the points stored in pointstore\n     * \n     * @param maxAllowed              the maximum number of clusters one is\n     *                                interested in\n     * @param shrinkage               a parameter used in CURE algorithm that can\n     *                                produce a combination of behaviors (=1\n     *                                corresponds to centroid clustering, =0\n     *                                resembles robust Minimum Spanning Tree)\n     * @param numberOfRepresentatives another parameter used to control the\n     *                                plausible (potentially non-spherical) shapes\n     *                                of the clusters\n     * @param separationRatio         a parameter that controls how aggressively we\n     *                                go below maxAllowed -- this is often set to a\n     *                                DEFAULT_SEPARATION_RATIO_FOR_MERGE\n     * @param previous                a (possibly null) list of previous clusters\n     *                                which can be used to seed the current clusters\n     *                                to ensure some smoothness\n     * @return a list of clusters\n     */\n\n    public List<ICluster<float[]>> summarize(int maxAllowed, double shrinkage, int numberOfRepresentatives,\n            double separationRatio, BiFunction<float[], float[], Double> distance, List<ICluster<float[]>> previous) {\n        int[] counts = getRefCount();\n        ArrayList<Weighted<Integer>> refs = new ArrayList<>();\n        for (int i = 0; i < counts.length; i++) {\n            if (counts[i] != 0) {\n                refs.add(new Weighted<>(i, (float) counts[i]));\n            }\n        }\n        BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b,\n                shrinkage, numberOfRepresentatives);\n        return iterativeClustering(maxAllowed, 4 * maxAllowed, 1, refs, this::getNumericVector, distance,\n                clusterInitializer, 42, false, true, separationRatio, previous);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/PointStoreLarge.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\npublic class PointStoreLarge extends PointStore {\n\n    protected int[] locationList;\n\n    void setInfeasiblePointstoreLocationIndex(int index) {\n        locationList[index] = INFEASIBLE_LOCN;\n    };\n\n    void extendLocationList(int newCapacity) {\n        int oldCapacity = locationList.length;\n        locationList = Arrays.copyOf(locationList, newCapacity);\n        for (int i = oldCapacity; i < newCapacity; i++) {\n            locationList[i] = INFEASIBLE_LOCN;\n        }\n    };\n\n    void setLocation(int index, int location) {\n        locationList[index] = location / baseDimension;\n    }\n\n    int getLocation(int index) {\n        return baseDimension * locationList[index];\n    }\n\n    int locationListLength() {\n        return locationList.length;\n    }\n\n    public PointStoreLarge(PointStore.Builder builder) {\n        super(builder);\n        if (builder.locationList != null) {\n            locationList = Arrays.copyOf(builder.locationList, builder.locationList.length);\n        } else {\n            locationList = new int[currentStoreCapacity];\n            Arrays.fill(locationList, INFEASIBLE_LOCN);\n        }\n    }\n\n    @Override\n    public int size() {\n        int count = 0;\n        for (int i = 0; i < locationList.length; i++) {\n            if (locationList[i] != INFEASIBLE_LOCN) {\n                ++count;\n            }\n        }\n        return count;\n    }\n\n    @Override\n    protected void checkFeasible(int index) {\n        checkArgument(locationList[index] != INFEASIBLE_LOCN, \" invalid point\");\n    }\n\n    @Override\n    public int[] getLocationList() {\n        return Arrays.copyOf(locationList, locationList.length);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/PointStoreSmall.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\npublic class PointStoreSmall extends PointStore {\n\n    public static char INFEASIBLE_SMALL_POINTSTORE_LOCN = (char) PointStore.INFEASIBLE_LOCN;\n    protected char[] locationList;\n\n    void setInfeasiblePointstoreLocationIndex(int index) {\n        locationList[index] = INFEASIBLE_SMALL_POINTSTORE_LOCN;\n    };\n\n    void extendLocationList(int newCapacity) {\n        int oldCapacity = locationList.length;\n        assert (oldCapacity < newCapacity);\n        locationList = Arrays.copyOf(locationList, newCapacity);\n        for (int i = oldCapacity; i < newCapacity; i++) {\n            locationList[i] = INFEASIBLE_SMALL_POINTSTORE_LOCN;\n        }\n    };\n\n    void setLocation(int index, int location) {\n        locationList[index] = (char) (location / baseDimension);\n        assert (baseDimension * (int) locationList[index] == location);\n    }\n\n    int getLocation(int index) {\n        return baseDimension * (int) locationList[index];\n    }\n\n    int locationListLength() {\n        return locationList.length;\n    }\n\n    public PointStoreSmall(PointStore.Builder builder) {\n        super(builder);\n        checkArgument(shingleSize * capacity < Character.MAX_VALUE, \" incorrect parameters\");\n        if (builder.locationList != null) {\n            locationList = new char[builder.locationList.length];\n            for (int i = 0; i < locationList.length; i++) {\n                locationList[i] = (char) builder.locationList[i];\n            }\n        } else {\n            locationList = new char[currentStoreCapacity];\n            Arrays.fill(locationList, INFEASIBLE_SMALL_POINTSTORE_LOCN);\n        }\n    }\n\n    public PointStoreSmall(int dimensions, int capacity) {\n        this(PointStore.builder().capacity(capacity).dimensions(dimensions).shingleSize(1).initialSize(capacity));\n    }\n\n    @Override\n    protected void checkFeasible(int index) {\n        checkArgument(locationList[index] != INFEASIBLE_SMALL_POINTSTORE_LOCN, \" invalid point\");\n    }\n\n    @Override\n    public int size() {\n        int count = 0;\n        for (int i = 0; i < locationList.length; i++) {\n            if (locationList[i] != INFEASIBLE_SMALL_POINTSTORE_LOCN) {\n                ++count;\n            }\n        }\n        return count;\n    }\n\n    @Override\n    public int[] getLocationList() {\n        int[] answer = new int[locationList.length];\n        for (int i = 0; i < locationList.length; i++) {\n            answer[i] = locationList[i];\n        }\n        return answer;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/store/StreamSampler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.ArrayList;\nimport java.util.Optional;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.sampler.ISampled;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * The following class is a sampler for generic objects that allow weighted time\n * dependent sampling. It is an encapsulation of CompactSampler in\n * RandomCutForest core and is meant to be extended in multiple ways. Hence the\n * functions are protected and should be overriden/not used arbirarily.\n */\npublic class StreamSampler<P> {\n\n    // basic time dependent sampler\n    protected final CompactSampler sampler;\n\n    // list of objects\n    protected final ArrayList<Weighted<P>> objectList;\n\n    // managing indices\n    protected final IndexIntervalManager intervalManager;\n\n    // accounting for evicted items\n    protected Optional<P> evicted;\n\n    // sequence number used in sequential sampling\n    protected long sequenceNumber = -1L;\n\n    // number of items seen, different from sequenceNumber in case of merge\n    protected long entriesSeen = 0L;\n\n    protected boolean currentlySampling;\n\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    public StreamSampler(Builder<?> builder) {\n        sampler = new CompactSampler.Builder<>().capacity(builder.capacity)\n                .storeSequenceIndexesEnabled(builder.storeSequenceIndexesEnabled).randomSeed(builder.randomSeed)\n                .initialAcceptFraction(builder.initialAcceptFraction).timeDecay(builder.timeDecay).build();\n        objectList = new ArrayList<>(builder.capacity);\n        intervalManager = new IndexIntervalManager(builder.capacity);\n        evicted = Optional.empty();\n        currentlySampling = true;\n    }\n\n    /**\n     * a basic sampling operation that accounts for weights of items. This function\n     * will be overriden in future classes.\n     * \n     * @param object to be sampled\n     * @param weight weight of object (non-negative); although 0 weight implies do\n     *               not sample\n     * @return true if the object is sampled and false if the object is not sampled;\n     *         if true then there may have been an eviction which is updated\n     */\n    protected boolean sample(P object, float weight) {\n        ++sequenceNumber;\n        ++entriesSeen;\n        if (currentlySampling) {\n            if (sampler.acceptPoint(sequenceNumber, weight)) {\n                Optional<ISampled<Integer>> samplerEvicted = sampler.getEvictedPoint();\n\n                if (samplerEvicted.isPresent()) {\n                    int oldIndex = samplerEvicted.get().getValue();\n                    evicted = Optional.of(objectList.get(oldIndex).index);\n                    intervalManager.releaseIndex(oldIndex);\n                }\n                int index = intervalManager.takeIndex();\n                if (index < objectList.size()) {\n                    objectList.set(index, new Weighted<>(object, weight));\n                } else {\n                    objectList.add(new Weighted<>(object, weight));\n                }\n                sampler.addPoint(index);\n                return true;\n            }\n        }\n        evicted = Optional.empty();\n        return false;\n    }\n\n    public StreamSampler(StreamSampler<P> first, StreamSampler<P> second, int capacity, double timeDecay, long seed) {\n        checkArgument(capacity > 0, \"capacity has to be positive\");\n        double initialAcceptFraction = max(first.sampler.getInitialAcceptFraction(),\n                second.sampler.getInitialAcceptFraction());\n        // merge would remove sequenceIndex information\n\n        objectList = new ArrayList<>(capacity);\n        int[] pointList = new int[capacity];\n        float[] weightList = new float[capacity];\n        intervalManager = new IndexIntervalManager(capacity);\n        evicted = Optional.empty();\n        currentlySampling = true;\n        double firstUpdate = -(first.sampler.getMaxSequenceIndex() - first.sampler.getMostRecentTimeDecayUpdate())\n                * first.sampler.getTimeDecay();\n        ArrayList<Weighted<Integer>> list = new ArrayList<>();\n        int offset = first.sampler.size();\n        int[] firstList = first.sampler.getPointIndexArray();\n        float[] firstWeightList = first.sampler.getWeightArray();\n        for (int i = 0; i < offset; i++) {\n            list.add(new Weighted<>(firstList[i], (float) (firstWeightList[i] + firstUpdate)));\n        }\n        double secondUpdate = -(second.sampler.getMaxSequenceIndex() - second.sampler.getMostRecentTimeDecayUpdate())\n                * second.sampler.getTimeDecay();\n        int secondOffset = second.sampler.size();\n        int[] secondList = second.sampler.getPointIndexArray();\n        float[] secondWeightList = second.sampler.getWeightArray();\n        for (int i = 0; i < secondOffset; i++) {\n            list.add(new Weighted<>(secondList[i] + offset, (float) (secondWeightList[i] + secondUpdate)));\n        }\n        list.sort((o1, o2) -> Float.compare(o1.weight, o2.weight));\n        int size = min(capacity, list.size());\n        for (int j = size - 1; j >= 0; j--) {\n            int index = intervalManager.takeIndex();\n            pointList[index] = index;\n            weightList[index] = list.get(j).weight;\n            if (list.get(j).index < offset) {\n                objectList.add(first.objectList.get(list.get(j).index));\n            } else {\n                objectList.add(second.objectList.get(list.get(j).index - offset));\n            }\n        }\n        // sequence number corresponds to linear order of time\n        this.sequenceNumber = max(first.sequenceNumber, second.sequenceNumber);\n        // entries seen is the sum\n        this.entriesSeen = first.entriesSeen + second.entriesSeen;\n        sampler = new CompactSampler.Builder<>().capacity(capacity).storeSequenceIndexesEnabled(false).randomSeed(seed)\n                .initialAcceptFraction(initialAcceptFraction).timeDecay(timeDecay).pointIndex(pointList)\n                .weight(weightList).randomSeed(seed).maxSequenceIndex(this.sequenceNumber)\n                .mostRecentTimeDecayUpdate(this.sequenceNumber).build();\n    }\n\n    public boolean isCurrentlySampling() {\n        return currentlySampling;\n    }\n\n    public void pauseSampling() {\n        currentlySampling = false;\n    }\n\n    public void resumeSampling() {\n        currentlySampling = true;\n    }\n\n    public ArrayList<Weighted<P>> getObjectList() {\n        return objectList;\n    }\n\n    public int getCapacity() {\n        return sampler.getCapacity();\n    }\n\n    public long getSequenceNumber() {\n        return sequenceNumber;\n    }\n\n    public long getEntriesSeen() {\n        return entriesSeen;\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        private boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n        protected int capacity = DEFAULT_SAMPLE_SIZE;\n        protected double timeDecay = 1.0 / (DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY * capacity);\n        protected long randomSeed = new Random().nextLong();\n        protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;\n\n        public T capacity(int capacity) {\n            this.capacity = capacity;\n            return (T) this;\n        }\n\n        public T randomSeed(long seed) {\n            this.randomSeed = seed;\n            return (T) this;\n        }\n\n        public T initialAcceptFraction(double initialAcceptFraction) {\n            this.initialAcceptFraction = initialAcceptFraction;\n            return (T) this;\n        }\n\n        public T timeDecay(double timeDecay) {\n            this.timeDecay = timeDecay;\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        public StreamSampler build() {\n            return new StreamSampler<>(this);\n        }\n\n        public double getTimeDecay() {\n            return timeDecay;\n        }\n\n        public int getCapacity() {\n            return capacity;\n        }\n\n        public long getRandomSeed() {\n            return randomSeed;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/summarization/Center.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.exp;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * the following class abstracts a single centroid representation of a group of\n * points\n */\npublic class Center implements ICluster<float[]> {\n\n    float[] representative;\n    double weight;\n    ArrayList<Weighted<Integer>> assignedPoints;\n    double sumOfRadius;\n\n    double previousWeight = 0;\n    double previousSumOFRadius = 0;\n\n    Center(float[] coordinate, float weight) {\n        // explicitly copied because array elements will change\n        this.representative = Arrays.copyOf(coordinate, coordinate.length);\n        this.weight = weight;\n        this.assignedPoints = new ArrayList<>();\n    }\n\n    public static Center initialize(float[] coordinate, float weight) {\n        return new Center(coordinate, weight);\n    }\n\n    // adds a point; only the index to keep space bounds lower\n    // note that the weight may not be the entire weight of a point in case of a\n    // \"soft\" assignment\n    public void addPoint(int index, float weight, double dist, float[] point,\n            BiFunction<float[], float[], Double> distance) {\n        assignedPoints.add(new Weighted<>(index, weight));\n        this.weight += weight;\n        this.sumOfRadius += weight * dist;\n    }\n\n    // the following sets up reassignment of the coordinate based on the points\n    // assigned to the center\n    public void reset() {\n        assignedPoints = new ArrayList<>();\n        previousWeight = weight;\n        weight = 0;\n        previousSumOFRadius = sumOfRadius;\n    }\n\n    public double averageRadius() {\n        return (weight > 0) ? sumOfRadius / weight : 0;\n    }\n\n    // average radius computation, provides an extent measure\n    public double extentMeasure() {\n        return (weight > 0) ? sumOfRadius / weight : 0;\n    }\n\n    public double getWeight() {\n        return weight;\n    }\n\n    // a standard reassignment using the median values and NOT the mean; the mean is\n    // unlikely to\n    // provide robust convergence\n    public double recompute(Function<Integer, float[]> getPoint, boolean approx,\n            BiFunction<float[], float[], Double> distance) {\n        if (assignedPoints.size() == 0 || weight == 0.0) {\n            Arrays.fill(representative, 0); // zero out values\n            return 0;\n        }\n\n        previousSumOFRadius = sumOfRadius;\n        sumOfRadius = 0;\n        for (int i = 0; i < representative.length; i++) {\n            int index = i;\n            // the following would be significantly slow unless points are backed by arrays\n            assignedPoints\n                    .sort((o1, o2) -> Double.compare(getPoint.apply(o1.index)[index], getPoint.apply(o2.index)[index]));\n            double runningWeight = weight / 2;\n            int position = 0;\n            while (runningWeight >= 0 && position < assignedPoints.size()) {\n                if (runningWeight > assignedPoints.get(position).weight) {\n                    runningWeight -= assignedPoints.get(position).weight;\n                    ++position;\n                } else {\n                    break;\n                }\n            }\n            if (position == assignedPoints.size()) {\n                position--;\n            }\n            representative[index] = getPoint.apply(assignedPoints.get(position).index)[index];\n        }\n        for (int j = 0; j < assignedPoints.size(); j++) {\n            double addTerm = distance.apply(representative, getPoint.apply(assignedPoints.get(j).index))\n                    * assignedPoints.get(j).weight;\n            checkArgument(addTerm >= 0, \"distances or weights cannot be negative\");\n            sumOfRadius += addTerm;\n        }\n        return (previousSumOFRadius - sumOfRadius);\n\n    }\n\n    @Override\n    public List<Weighted<Integer>> getAssignedPoints() {\n        return assignedPoints;\n    }\n\n    // merges a center into another\n    // this can be followed by a reassignment step; however the merger uses a\n    // sigmoid based weightage\n    // for robustness\n    public void absorb(ICluster<float[]> other, BiFunction<float[], float[], Double> distance) {\n        List<Weighted<float[]>> representatives = other.getRepresentatives();\n        float[] closest = representatives.get(0).index;\n        double dist = Double.MAX_VALUE;\n        for (Weighted<float[]> e : representatives) {\n            double t = distance.apply(e.index, representative);\n            checkArgument(t >= 0, \"distances cannot be negative\");\n            if (t < dist) {\n                dist = t;\n                closest = e.index;\n            }\n        }\n\n        double otherWeight = other.getWeight();\n        double expRatio = exp(2 * (weight - otherWeight) / (weight + otherWeight));\n        double factor = expRatio / (1.0 + expRatio);\n        for (int i = 0; i < representative.length; i++) {\n            representative[i] = (float) (factor * representative[i] + (1 - factor) * closest[i]);\n        }\n        // distance is (approximately) the reverse of the ratio\n        // this computation is meant to be approximate\n        sumOfRadius += (weight * (1.0 - factor) + otherWeight * factor) * dist;\n        weight += otherWeight;\n        assignedPoints.addAll(other.getAssignedPoints());\n        other.reset();\n    }\n\n    public double distance(float[] point, BiFunction<float[], float[], Double> distance) {\n        double t = distance.apply(point, representative);\n        checkArgument(t >= 0, \"distance cannot be negative\");\n        return t;\n    }\n\n    @Override\n    public double distance(ICluster<float[]> other, BiFunction<float[], float[], Double> distance) {\n        return other.distance(representative, distance);\n    }\n\n    @Override\n    public float[] primaryRepresentative(BiFunction<float[], float[], Double> distance) {\n        return Arrays.copyOf(representative, representative.length);\n    }\n\n    @Override\n    public List<Weighted<float[]>> getRepresentatives() {\n        ArrayList<Weighted<float[]>> answer = new ArrayList<>();\n        answer.add(new Weighted<>(representative, (float) weight));\n        return answer;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/summarization/GenericMultiCenter.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.min;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * the following class abstracts a single centroid representation of a group of\n * points. The class is modeled after the well scattered representatives used in\n * CURE https://en.wikipedia.org/wiki/CURE_algorithm\n *\n * The number of representatives (refered as c in above) determines the possible\n * shapes that can be represented. Setting c=1 corresponds to stnadard centroid\n * based clustering\n *\n * The parameter shrinkage is slightly different from its usage in CURE,\n * although the idea of its use is similar. The main reason is that CURE was\n * designed for geometric spaces, and RCFSummarize is designed to support\n * arbitrary distance based clustering; once the user provides a distance\n * function from (R, R) into double based on ideas of STREAM\n * https://en.wikipedia.org/wiki/Data_stream_clustering In CURE, shrinkage was\n * used to create representatives close to the center of a cluster which is\n * impossible for generic types R. Instead shrinkage value in [0,1] corresponds\n * to morphing the distance function to \"pretend\" as if the distance is to the\n * primary representative of the cluster.\n *\n * This generic version does not store any assigned points. As a result the size\n * is bounded and these clusters are ideal for streaming algorithms where\n * resource usage would not increase with more data.\n */\npublic class GenericMultiCenter<R> implements ICluster<R> {\n\n    public static int DEFAULT_NUMBER_OF_REPRESENTATIVES = 5;\n    public static double DEFAULT_SHRINKAGE = 0.0;\n    int numberOfRepresentatives = DEFAULT_NUMBER_OF_REPRESENTATIVES;\n    double shrinkage = DEFAULT_SHRINKAGE;\n\n    ArrayList<Weighted<R>> representatives;\n    double weight;\n    double sumOfRadius;\n\n    double previousWeight = 0;\n    double previousSumOFRadius = 0;\n\n    GenericMultiCenter(R coordinate, float weight, double shrinkage, int numberOfRepresentatives) {\n        // explicitly copied because array elements will change\n        this.representatives = new ArrayList<>();\n        this.representatives.add(new Weighted<>(coordinate, weight));\n        this.weight = weight;\n        this.numberOfRepresentatives = numberOfRepresentatives;\n        this.shrinkage = shrinkage;\n    }\n\n    public static <R> GenericMultiCenter<R> initialize(R coordinate, float weight, double shrinkage,\n            int numberOfRepresentatives) {\n        checkArgument(shrinkage >= 0 && shrinkage <= 1.0, \" parameter has to be in [0,1]\");\n        checkArgument(numberOfRepresentatives > 0 && numberOfRepresentatives <= 100,\n                \" the number of representatives has to be in (0,100]\");\n        return new GenericMultiCenter<>(coordinate, weight, shrinkage, numberOfRepresentatives);\n    }\n\n    // adds a point; only the index to keep space bounds lower\n    // note that the weight may not be the entire weight of a point in case of a\n    // \"soft\" assignment\n    public void addPoint(int index, float weight, double dist, R point, BiFunction<R, R, Double> distance) {\n        // accounting for the closest representative, if there are more than one\n        Weighted<R> closest = representatives.get(0);\n        if (representatives.size() > 1) {\n            double newDist = distance.apply(point, representatives.get(0).index);\n            for (int i = 1; i < representatives.size(); i++) {\n                double t = distance.apply(point, representatives.get(i).index);\n                if (t < newDist) {\n                    newDist = t;\n                    closest = representatives.get(i);\n                }\n            }\n        }\n        closest.weight += weight;\n        this.weight += weight;\n        this.sumOfRadius += weight * dist;\n    }\n\n    // the following sets up reassignment of the coordinate based on the points\n    // assigned to the center\n    public void reset() {\n        previousWeight = weight;\n        weight = 0;\n        for (int i = 0; i < representatives.size(); i++) {\n            representatives.get(i).weight = 0;\n        }\n        previousSumOFRadius = sumOfRadius;\n        sumOfRadius = 0;\n    }\n\n    public double averageRadius() {\n        return (weight > 0) ? sumOfRadius / weight : 0;\n    }\n\n    // forces a nearest neighbor merge\n    public double extentMeasure() {\n        return (weight > 0) ? 0.5 * sumOfRadius / (numberOfRepresentatives * weight) : 0;\n    }\n\n    public double getWeight() {\n        return weight;\n    }\n\n    // reassignment may not be meaningful for generic types, without additional\n    // information\n    public double recompute(Function<Integer, R> getPoint, boolean flag, BiFunction<R, R, Double> distanceFunction) {\n        return 0;\n    }\n\n    // merges a center into another\n    public void absorb(ICluster<R> other, BiFunction<R, R, Double> distance) {\n        List<Weighted<R>> savedRepresentatives = this.representatives;\n        savedRepresentatives.addAll(other.getRepresentatives());\n        this.representatives = new ArrayList<>();\n\n        int maxIndex = 0;\n        float weight = savedRepresentatives.get(0).weight;\n        for (int i = 1; i < savedRepresentatives.size(); i++) {\n            if (weight < savedRepresentatives.get(i).weight) {\n                weight = savedRepresentatives.get(i).weight;\n                maxIndex = i;\n            }\n        }\n        this.representatives.add(savedRepresentatives.get(maxIndex));\n        savedRepresentatives.remove(maxIndex);\n        sumOfRadius += other.extentMeasure() * other.getWeight();\n        this.weight += other.getWeight();\n\n        /**\n         * create a list of representatives based on the farthest point method, which\n         * correspond to a well scattered set. See\n         * https://en.wikipedia.org/wiki/CURE_algorithm\n         */\n        while (savedRepresentatives.size() > 0 && this.representatives.size() < numberOfRepresentatives) {\n            double farthestWeightedDistance = 0.0;\n            int farthestIndex = Integer.MAX_VALUE;\n            for (int j = 0; j < savedRepresentatives.size(); j++) {\n                if (savedRepresentatives.get(j).weight > weight / (2 * numberOfRepresentatives)) {\n                    double newWeightedDist = distance.apply(this.representatives.get(0).index,\n                            savedRepresentatives.get(j).index) * savedRepresentatives.get(j).weight;\n                    checkArgument(newWeightedDist >= 0, \" weights or distances cannot be negative\");\n                    for (int i = 1; i < this.representatives.size(); i++) {\n                        newWeightedDist = min(newWeightedDist,\n                                distance.apply(this.representatives.get(i).index, savedRepresentatives.get(j).index))\n                                * savedRepresentatives.get(j).weight;\n                        checkArgument(newWeightedDist >= 0, \" weights or distances cannot be negative\");\n                    }\n                    if (newWeightedDist > farthestWeightedDistance) {\n                        farthestWeightedDistance = newWeightedDist;\n                        farthestIndex = j;\n                    }\n                }\n            }\n            if (farthestWeightedDistance == 0.0) {\n                break;\n            }\n            this.representatives.add(savedRepresentatives.get(farthestIndex));\n            savedRepresentatives.remove(farthestIndex);\n        }\n\n        // absorb the remainder into existing representatives\n        for (Weighted<R> representative : savedRepresentatives) {\n            double dist = distance.apply(representative.index, this.representatives.get(0).index);\n            checkArgument(dist >= 0, \"distance cannot be negative\");\n            double minDist = dist;\n            int minIndex = 0;\n            for (int i = 1; i < this.representatives.size(); i++) {\n                double newDist = distance.apply(this.representatives.get(i).index, representative.index);\n                checkArgument(newDist >= 0, \"distance cannot be negative\");\n                if (newDist < minDist) {\n                    minDist = newDist;\n                    minIndex = i;\n                }\n            }\n            this.representatives.get(minIndex).weight += representative.weight;\n            sumOfRadius += representative.weight * ((1 - shrinkage) * minDist + dist * shrinkage);\n        }\n    }\n\n    @Override\n    public double distance(R point, BiFunction<R, R, Double> distanceFunction) {\n        double dist = distanceFunction.apply(this.representatives.get(0).index, point);\n        checkArgument(dist >= 0, \"distance cannot be negative\");\n        double newDist = dist;\n        for (int i = 1; i < this.representatives.size(); i++) {\n            newDist = min(newDist, distanceFunction.apply(this.representatives.get(i).index, point));\n            checkArgument(newDist >= 0, \"distance cannot be negative\");\n        }\n        return (1 - shrinkage) * newDist + shrinkage * dist;\n    }\n\n    @Override\n    public double distance(ICluster<R> other, BiFunction<R, R, Double> distanceFunction) {\n        List<Weighted<R>> representatives = other.getRepresentatives();\n        double dist = distanceFunction.apply(this.representatives.get(0).index, representatives.get(0).index);\n        checkArgument(dist >= 0, \"distance cannot be negative\");\n        double newDist = dist;\n        for (int i = 1; i < this.representatives.size(); i++) {\n            for (int j = 1; j < representatives.size(); j++) {\n                newDist = min(newDist,\n                        distanceFunction.apply(this.representatives.get(i).index, representatives.get(j).index));\n                checkArgument(newDist >= 0, \"distance cannot be negative\");\n            }\n        }\n        return (1 - shrinkage) * newDist + shrinkage * dist;\n    }\n\n    @Override\n    public List<Weighted<R>> getRepresentatives() {\n        return representatives;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/summarization/ICluster.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.summarization;\n\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * a set of cunstions that a conceptual \"cluster\" should satisfy for any generic\n * distance based clustering where a distance function of type from (R,R) into\n * double is provided externally. It is not feasible (short of various\n * assumptions) to check for the validity of a distance function and the\n * clustering would not perform any validity checks. The user is referred to\n * https://en.wikipedia.org/wiki/Metric_(mathematics)\n *\n * It does not escape our attention that the clustering can use multiple\n * different distance functions over its execution. But such should be performed\n * with caution.\n */\npublic interface ICluster<R> {\n\n    // restting statistics for a potential reassignment\n    void reset();\n\n    double averageRadius();\n\n    // a measure of the noise/blur around a cluster; for single centroid clustering\n    // this is the average distance of a point from a cluster representative\n    double extentMeasure();\n\n    // weight computation\n    double getWeight();\n\n    // merge another cluster of same type\n    void absorb(ICluster<R> other, BiFunction<R, R, Double> distance);\n\n    // distance of apoint from a cluster, has to be non-negative\n    double distance(R point, BiFunction<R, R, Double> distance);\n\n    // distance of another cluster from this cluster, has to be non negative\n    double distance(ICluster<R> other, BiFunction<R, R, Double> distance);\n\n    // all potential representativess of a cluster these are typically chosen to be\n    // well scattered\n    // by default the first entry is the primary representative\n    List<Weighted<R>> getRepresentatives();\n\n    // a primary representative of the cluster; by default it is the first in the\n    // list of representatives\n    // this additional function allows an option for optimization of runtime as well\n    // as alternate\n    // representations. For example the distance metric can be altered to be a fixed\n    // linear combination\n    // of the primary and secondary representatives, as in CURE\n    // https://en.wikipedia.org/wiki/CURE_algorithm\n    default R primaryRepresentative(BiFunction<R, R, Double> distance) {\n        return getRepresentatives().get(0).index;\n    }\n\n    // Some of the algorithms, in particular the geometric ones may store the\n    // assigned points for\n    // iterative refinement. However that can be extremely inefficient if the\n    // distance measure does not\n    // have sufficient range, for example, string edit distances (for bounded\n    // strings) are bounded in a\n    // short interval. A soft assignment would create multiple copies of points (as\n    // is appropriate) and\n    // that can be significantly slower.\n    default List<Weighted<Integer>> getAssignedPoints() {\n        return Collections.emptyList();\n    }\n\n    // optimize the cluster representation based on assigned points; this is classic\n    // iterative optimization\n    // useful in EM type algorithms\n\n    /**\n     * optimize the cluster representation based on assigned points; this is classic\n     * iterative optimization useful in EM type algorithms\n     * \n     * @param getPoint a function that provides a point given an integer index\n     * @param force    it set as true perform a slow and accurate recomputation;\n     *                 otherwise approximation would suffice\n     * @param distance the distance function\n     * @return a measure of improvement (if any); this can be useful in the future\n     *         as a part of the stopping condition\n     */\n    double recompute(Function<Integer, R> getPoint, boolean force, BiFunction<R, R, Double> distance);\n\n    // adding a point to a cluster, and possibly updates the extent measure and the\n    // assigned points\n    void addPoint(int index, float weight, double dist, R point, BiFunction<R, R, Double> distance);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/summarization/MultiCenter.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class MultiCenter extends GenericMultiCenter<float[]> {\n\n    ArrayList<Weighted<Integer>> assignedPoints;\n\n    MultiCenter(float[] coordinate, float weight, double shrinkage, int numberOfRepresentatives) {\n        super(coordinate, weight, shrinkage, numberOfRepresentatives);\n        this.assignedPoints = new ArrayList<>();\n    }\n\n    public static MultiCenter initialize(float[] coordinate, float weight, double shrinkage,\n            int numberOfRepresentatives) {\n        checkArgument(shrinkage >= 0 && shrinkage <= 1.0, \" parameter has to be in [0,1]\");\n        checkArgument(numberOfRepresentatives > 0 && numberOfRepresentatives <= 100,\n                \" the number of representatives has to be in (0,100]\");\n        return new MultiCenter(coordinate, weight, shrinkage, numberOfRepresentatives);\n    }\n\n    public void addPoint(int index, float weight, double dist, float[] point,\n            BiFunction<float[], float[], Double> distance) {\n        super.addPoint(index, weight, dist, point, distance);\n        assignedPoints.add(new Weighted<>(index, weight));\n    }\n\n    // the following sets up reassignment of the coordinate based on the points\n    // assigned to the center\n    public void reset() {\n        super.reset();\n        assignedPoints = new ArrayList<>();\n    }\n\n    // a standard reassignment using the median values and NOT the mean; the mean is\n    // unlikely to\n    // provide robust convergence\n    public double recompute(Function<Integer, float[]> getPoint, boolean force,\n            BiFunction<float[], float[], Double> distanceFunction) {\n        if (assignedPoints.size() == 0 || weight == 0.0 || !force) {\n            return 0;\n        }\n\n        previousSumOFRadius = sumOfRadius;\n        sumOfRadius = 0;\n        for (int j = 0; j < assignedPoints.size(); j++) {\n            // distance will check for -negative internally\n            double addTerm = distance(getPoint.apply(assignedPoints.get(j).index), distanceFunction)\n                    * assignedPoints.get(j).weight;\n            sumOfRadius += addTerm;\n        }\n        return (previousSumOFRadius - sumOfRadius);\n\n    }\n\n    @Override\n    public List<Weighted<Integer>> getAssignedPoints() {\n        return assignedPoints;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/summarization/Summarizer.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.util.Weighted.createSample;\nimport static com.amazon.randomcutforest.util.Weighted.prefixPick;\nimport static java.lang.Math.max;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\n\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class Summarizer {\n\n    /**\n     * a factor that controls weight assignment for soft clustering; this is the\n     * multiple of the minimum distance and should be greater or equal 1.\n     */\n    public static double WEIGHT_ALLOCATION_THRESHOLD = 1.25;\n\n    /**\n     * the following determines the ratio between the sum of the (average) radius\n     * and the separation between centers for a merge; ratio greater than 1 means\n     * significant overlap a ratio of 0 means merge closest pairs without\n     * consideration of separartion\n     *\n     **/\n    public static double DEFAULT_SEPARATION_RATIO_FOR_MERGE = 0.8;\n\n    public static int PHASE2_THRESHOLD = 2;\n\n    public static int LENGTH_BOUND = 1000;\n\n    public static Double L1distance(float[] a, float[] b) {\n        double dist = 0;\n        for (int i = 0; i < a.length; i++) {\n            dist += Math.abs(a[i] - b[i]);\n        }\n        return dist;\n    }\n\n    public static Double L2distance(float[] a, float[] b) {\n        double dist = 0;\n        for (int i = 0; i < a.length; i++) {\n            double t = Math.abs(a[i] - b[i]);\n            dist += t * t;\n        }\n        return Math.sqrt(dist);\n    }\n\n    public static Double LInfinitydistance(float[] a, float[] b) {\n        double dist = 0;\n        for (int i = 0; i < a.length; i++) {\n            dist = max(Math.abs(a[i] - b[i]), dist);\n        }\n        return dist;\n    }\n\n    /**\n     * a function that reassigns points to clusters\n     * \n     * @param sampledPoints   a list of sampled points with weights\n     * @param clusters        a list of current clusters, because random access to\n     *                        the elements is necessary\n     * @param distance        a distance function\n     * @param parallelEnabled a flag enabling limited parallelism; only during\n     *                        cluster by cluster recomputation. Using parallel mode\n     *                        during the assignment of points does not seem to help\n     */\n    public static <R> void assignAndRecompute(List<Weighted<Integer>> sampledPoints, Function<Integer, R> getPoint,\n            List<ICluster<R>> clusters, BiFunction<R, R, Double> distance, boolean parallelEnabled) {\n        checkArgument(clusters.size() > 0, \" cannot be empty list of clusters\");\n        checkArgument(sampledPoints.size() > 0, \" cannot be empty list of points\");\n\n        for (ICluster<R> cluster : clusters) {\n            cluster.reset();\n        }\n\n        for (Weighted<Integer> point : sampledPoints) {\n            if (point.weight > 0) {\n\n                double[] dist = new double[clusters.size()];\n                Arrays.fill(dist, Double.MAX_VALUE);\n                double minDist = Double.MAX_VALUE;\n                int minDistNbr = -1;\n                for (int i = 0; i < clusters.size(); i++) {\n                    // will check for negative distances\n                    dist[i] = clusters.get(i).distance(getPoint.apply(point.index), distance);\n                    if (minDist > dist[i]) {\n                        minDist = dist[i];\n                        minDistNbr = i;\n                    }\n                    if (minDist == 0) {\n                        break;\n                    }\n                }\n\n                if (minDist == 0) {\n                    clusters.get(minDistNbr).addPoint(point.index, point.weight, 0, getPoint.apply(point.index),\n                            distance);\n                } else {\n                    double sum = 0;\n                    for (int i = 0; i < clusters.size(); i++) {\n                        if (dist[i] <= WEIGHT_ALLOCATION_THRESHOLD * minDist) {\n                            sum += minDist / dist[i]; // setting up harmonic mean\n                        }\n                    }\n                    for (int i = 0; i < clusters.size(); i++) {\n                        if (dist[i] <= WEIGHT_ALLOCATION_THRESHOLD * minDist) {\n                            // harmonic mean\n                            clusters.get(i).addPoint(point.index, (float) (point.weight * minDist / (dist[i] * sum)),\n                                    dist[i], getPoint.apply(point.index), distance);\n                        }\n                    }\n                }\n            }\n        }\n\n        if (parallelEnabled) {\n            clusters.parallelStream().forEach(e -> e.recompute(getPoint, true, distance));\n        } else {\n            clusters.stream().forEach(e -> e.recompute(getPoint, true, distance));\n        }\n    }\n\n    /**\n     * The core subroutine for iterative clustering used herein. The clustering\n     * algorithm borrows from CURE https://en.wikipedia.org/wiki/CURE_algorithm,\n     * which used sampling as a tradeoff of representationa accuracy versus\n     * algorithmic efficiency. Note however random sampling can also perform\n     * denoising and reduce space as a filtering mechanism. Note that hierarchical\n     * iterative merging strategies can be proven to not degrade clustering\n     * https://en.wikipedia.org/wiki/Data_stream_clustering with the benefit of\n     * small space. The algorithm herein proceeds in three phases, where the first\n     * phase corresponds from the initial seeding to about twice the maximum number\n     * of clusters one wishes to consider. The second phase corresponds to reducing\n     * that number to the maximum allowable number. The third phase corresponds to\n     * continuing the clustering as long as the conditions are similar to the end of\n     * phase two, thereby enabling us to use a rough estimate for the maximum\n     * allowed. By default, recomputation of the cluster makes sense in phases 2 and\n     * 3 -- however can be enabled for phase 1 as well, thereby enabling the regular\n     * K-Means algorithm to be expressed in the below algorithm as well. The\n     * algorithm can also express Minimum Spanning Tree based clustering with\n     * repeated merging of closest pair (which is a capability derived from CURE)\n     *\n     * The primary reason for the number of parameters is the ability to invoke this\n     * function without creating a copy of the points (or equivalent large objects),\n     * and hence the functions as parameters\n     *\n     * @param maxAllowed           number of maximum clusters one is interested in\n     * @param initial              an initial number of sampled centers to start\n     *                             from\n     * @param stopAt               a hard lower bound on the number of clusters\n     * @param refs                 a (possibly sampled) list of references with\n     *                             weight\n     * @param getPoint             a function which retrives the point/object given\n     *                             an index in the refs\n     * @param distance             a distance function\n     * @param clusterInitializer   a function that creates a cluster given an object\n     *                             aand a weight\n     * @param seed                 a random seed\n     * @param parallelEnabled      enabling parallel computation in the first phase\n     *                             when points are assigned to different sampled\n     *                             centers; and the centers are possibly adjusted\n     * @param phase2GlobalReassign a flag that determines if the points would be\n     *                             reassigned when the clusters fall below 1.2 *\n     *                             maxAllowed -- this serves as a denoising.\n     * @param overlapParameter     a parameter that controls the ordering of the\n     *                             merges as well as the stopping condition of the\n     *                             merges\n     * @param previousClustering   a possibly null list of clusters seen previously,\n     *                             used as zero weight seeds to smoothen the\n     *                             continuous clustering\n     * @param <R>                  type of object being clustered\n     * @return a list of clusters\n     */\n    public static <R> List<ICluster<R>> iterativeClustering(int maxAllowed, int initial, int stopAt,\n            List<Weighted<Integer>> refs, Function<Integer, R> getPoint, BiFunction<R, R, Double> distance,\n            BiFunction<R, Float, ICluster<R>> clusterInitializer, long seed, boolean parallelEnabled,\n            boolean phase2GlobalReassign, double overlapParameter, List<ICluster<R>> previousClustering) {\n\n        checkArgument(refs.size() > 0, \"empty list, nothing to do\");\n        checkArgument(stopAt > 0, \"has to stop at 1 cluster\");\n        checkArgument(stopAt <= maxAllowed, \"cannot stop before achieving the limit\");\n\n        Random rng = new Random(seed);\n        double sampledSum = refs.stream().map(e -> {\n            checkArgument(Double.isFinite(e.weight), \" weights have to be finite\");\n            checkArgument(e.weight >= 0.0, () -> \"negative weights are not meaningful\" + e.weight);\n            return (double) e.weight;\n        }).reduce(0.0, Double::sum);\n        checkArgument(sampledSum > 0, \" total weight has to be positive\");\n        ArrayList<ICluster<R>> centers = new ArrayList<>();\n        if (refs.size() < 10 * (initial + 5)) {\n            for (Weighted<Integer> point : refs) {\n                centers.add(clusterInitializer.apply(getPoint.apply(point.index), 0f));\n            }\n        } else {\n            for (int k = 0; k < 2 * (initial + 5); k++) {\n                double wt = rng.nextDouble() * sampledSum;\n                Weighted<Integer> picked = prefixPick(refs, wt);\n                centers.add(clusterInitializer.apply(getPoint.apply(picked.index), 0f));\n            }\n        }\n        if (previousClustering != null) {\n            for (ICluster<R> previousCluster : previousClustering) {\n                List<Weighted<R>> representatives = previousCluster.getRepresentatives();\n                for (Weighted<R> representative : representatives) {\n                    centers.add(clusterInitializer.apply(representative.index, 0f));\n                }\n            }\n        }\n        assignAndRecompute(refs, getPoint, centers, distance, parallelEnabled);\n        // assignment would change weights, sorting in non-decreasing order\n        centers.sort(Comparator.comparingDouble(ICluster::getWeight));\n        while (centers.get(0).getWeight() == 0) {\n            centers.remove(0);\n        }\n\n        double phase3Distance = 0;\n        double runningPhase3Distance = 0;\n        boolean keepReducingCenters = (centers.size() > maxAllowed);\n\n        while (keepReducingCenters) {\n            double measure = 0;\n            double measureDist = Double.MAX_VALUE;\n            int lower = 0;\n            int firstOfMerge = lower;\n            int secondOfMerge = lower + 1;// will be reset before exiting the loop\n            boolean foundMerge = false;\n            double minDist = Double.MAX_VALUE;\n\n            while (lower < centers.size() - 1 && !foundMerge) {\n                // we will keep searching\n                int minNbr = -1;\n                for (int j = lower + 1; j < centers.size(); j++) {\n                    double dist = centers.get(lower).distance(centers.get(j), distance);\n                    if (dist == 0) {\n                        foundMerge = true;\n                        firstOfMerge = lower;\n                        secondOfMerge = minNbr = j;\n                        minDist = measureDist = 0.0;\n                        break;\n                    } else {\n                        if (minDist > dist) {\n                            minNbr = j;\n                            minDist = dist;\n                        }\n\n                        double temp = (centers.get(lower).extentMeasure() + centers.get(j).extentMeasure()\n                                + phase3Distance) / dist;\n                        if (temp > overlapParameter && measure < temp) {\n                            firstOfMerge = lower;\n                            secondOfMerge = j;\n                            measure = temp;\n                            measureDist = dist;\n                        }\n                    }\n                }\n                if (lower == 0 && !foundMerge) {\n                    measureDist = minDist;\n                    // this is set assuming we may be interested in merging the minimum weight\n                    // cluster which corresponds to lower == 0\n                    secondOfMerge = minNbr;\n                }\n                ++lower;\n            }\n\n            int inital = centers.size();\n\n            if (inital > maxAllowed || foundMerge || (inital > stopAt && measure > overlapParameter)) {\n                centers.get(secondOfMerge).absorb(centers.get(firstOfMerge), distance);\n                if (phase2GlobalReassign && centers.size() <= PHASE2_THRESHOLD * maxAllowed + 1) {\n                    centers.remove(firstOfMerge);\n                    assignAndRecompute(refs, getPoint, centers, distance, parallelEnabled);\n                } else {\n                    centers.get(secondOfMerge).recompute(getPoint, false, distance);\n                    centers.remove(firstOfMerge);\n                }\n                centers.sort(Comparator.comparingDouble(ICluster::getWeight));\n                while (centers.get(0).getWeight() == 0.0) {\n                    // this line is reachable via zeroTest() in\n                    // SampleSummaryTest\n                    centers.remove(0);\n                }\n                if (inital < 1.2 * maxAllowed + 1) {\n                    // phase 3 kicks in; but this will execute at most once\n                    // note that measureDist can be 0 as well\n                    runningPhase3Distance = max(runningPhase3Distance, measureDist);\n                    if (inital > maxAllowed && centers.size() <= maxAllowed) {\n                        phase3Distance = runningPhase3Distance;\n                    }\n                }\n            } else {\n                keepReducingCenters = false;\n            }\n        }\n        // sort in decreasing weight\n        centers.sort((o1, o2) -> Double.compare(o2.getWeight(), o1.getWeight()));\n        return centers;\n    }\n\n    /**\n     * the following function returns a summary of the input points\n     *\n     * @param points               points with associated weights\n     * @param maxAllowed           the maximum number of clusters/summary points\n     * @param initial              the initial number of clusters/summary points,\n     *                             chosen at random\n     * @param stopAt               a hard lower bound on the number of clusters\n     * @param phase2GlobalReassign a flag that performs global reassignments when\n     *                             the number of clusters is in the range\n     *                             [maxAllowed, ceil(1.2*maxAllowed)]\n     * @param overlapParameter     a control for merging clusters\n     * @param distance             a distance function for the points, that\n     *                             determines the order of the reverse delete\n     *                             however the EM like step uses L1 measure (to be\n     *                             robust to noise)\n     * @param clusterInitializer   a function that creates the cluster type given a\n     *                             single object and a weight\n     * @param seed                 a random seed for controlling the randomness\n     * @param parallelEnabled      flag enabling (limited) parallelism\n     * @param previousClustering   any previous clustering that can be used as zero\n     *                             weight seeds to ensure smoothness\n     * @return a clustering of the input points (Note: the median returned is an\n     *         approximate median; exact computation is unlikely to be critical for\n     *         true applications of summarization)\n     */\n    public static <R> List<ICluster<R>> summarize(List<Weighted<R>> points, int maxAllowed, int initial, int stopAt,\n            boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance,\n            BiFunction<R, Float, ICluster<R>> clusterInitializer, long seed, boolean parallelEnabled,\n            List<ICluster<R>> previousClustering) {\n        checkArgument(maxAllowed < 100, \"are you sure you want more elements in the summary?\");\n        checkArgument(maxAllowed <= initial, \"initial parameter should be at least maximum allowed in final result\");\n\n        double totalWeight = points.stream().map(e -> {\n            checkArgument(Double.isFinite(e.weight), \" weights have to be finite\");\n            checkArgument(e.weight >= 0.0, () -> \"negative weights are not meaningful\" + e.weight);\n            return (double) e.weight;\n        }).reduce(0.0, Double::sum);\n        checkArgument(totalWeight > 0, \" total weight has to be positive\");\n        Random rng = new Random(seed);\n        // the following list is explicity copied and sorted for potential efficiency\n        List<Weighted<R>> sampledPoints = createSample(points, rng.nextLong(), 5 * LENGTH_BOUND, 0.005, 1.0);\n\n        ArrayList<Weighted<Integer>> refs = new ArrayList<>();\n        for (int i = 0; i < sampledPoints.size(); i++) {\n            refs.add(new Weighted<>(i, sampledPoints.get(i).weight));\n        }\n\n        Function<Integer, R> getPoint = (i) -> sampledPoints.get(i).index;\n\n        return iterativeClustering(maxAllowed, initial, stopAt, refs, getPoint, distance, clusterInitializer,\n                rng.nextLong(), parallelEnabled, phase2GlobalReassign, overlapParameter, previousClustering);\n    }\n\n    // same as above, specific for single centroid clustering of float[]\n    // with an explicit stopping condition as well as a global reassignment option\n    public static List<ICluster<float[]>> singleCentroidSummarize(List<Weighted<float[]>> points, int maxAllowed,\n            int initial, int stopAt, boolean phase2GlobalReassign, BiFunction<float[], float[], Double> distance,\n            long seed, boolean parallelEnabled, List<ICluster<float[]>> previousClustering) {\n        return summarize(points, maxAllowed, initial, stopAt, phase2GlobalReassign, DEFAULT_SEPARATION_RATIO_FOR_MERGE,\n                distance, Center::initialize, seed, parallelEnabled, previousClustering);\n    }\n\n    /**\n     * the following function returns a summary of the input points\n     *\n     * @param points          points with associated weights\n     * @param maxAllowed      the maximum number of clusters/summary points\n     * @param initial         the initial number of clusters/summary points, chosen\n     *                        at random\n     * @param phase1reassign  (this parameter is ignored in the current version, but\n     *                        the signature is unchanged for convenience)\n     * @param distance        a distance function for the points, that determines\n     *                        the order of the reverse delete however the EM like\n     *                        step uses L1 measure (to be robust to noise)\n     * @param seed            a random seed for controlling the randomness\n     * @param parallelEnabled flag enabling (limited) parallelism\n     * @return a summary of the input points (Note: the median returned is an\n     *         approximate median; exact computation is unlikely to be critical for\n     *         true applications of summarization)\n     */\n    public static SampleSummary summarize(List<Weighted<float[]>> points, int maxAllowed, int initial,\n            boolean phase1reassign, BiFunction<float[], float[], Double> distance, long seed, boolean parallelEnabled,\n            int numberOfReps, double shrinkage) {\n        checkArgument(maxAllowed < 100, \"are you sure you want more elements in the summary?\");\n        checkArgument(maxAllowed <= initial, \"initial parameter should be at least maximum allowed in final result\");\n\n        double totalWeight = points.stream().map(e -> {\n            checkArgument(Double.isFinite(e.weight), \" weights have to be finite\");\n            checkArgument(e.weight >= 0.0, () -> \"negative weights are not meaningful\" + e.weight);\n            return (double) e.weight;\n        }).reduce(0.0, Double::sum);\n        checkArgument(totalWeight > 0, \" total weight has to be positive\");\n\n        Random rng = new Random(seed);\n        // the following list is explicity copied and sorted for potential efficiency\n        List<Weighted<float[]>> sampledPoints = createSample(points, rng.nextLong(), 5 * LENGTH_BOUND, 0.005, 1.0);\n\n        List<ICluster<float[]>> centers = (numberOfReps == 1)\n                ? summarize(sampledPoints, maxAllowed, initial, 1, true, DEFAULT_SEPARATION_RATIO_FOR_MERGE, distance,\n                        Center::initialize, seed, parallelEnabled, null)\n                : multiSummarizeWeighted(sampledPoints, maxAllowed, initial, 1, false,\n                        DEFAULT_SEPARATION_RATIO_FOR_MERGE, distance, seed, parallelEnabled, shrinkage, numberOfReps);\n\n        int num = centers.stream().mapToInt(x -> x.getRepresentatives().size()).sum();\n        float[][] pointList = new float[num][];\n        float[] likelihood = new float[num];\n        float[] measure = new float[num];\n\n        int dimensions = centers.get(0).primaryRepresentative(distance).length;\n        int count = 0;\n        for (int i = 0; i < centers.size(); i++) {\n            for (Weighted<float[]> rep : centers.get(i).getRepresentatives()) {\n                pointList[count] = Arrays.copyOf(rep.index, dimensions);\n                likelihood[count] = (float) (rep.weight / totalWeight);\n                measure[count++] = (float) centers.get(i).averageRadius();\n            }\n        }\n\n        return new SampleSummary(sampledPoints, pointList, likelihood, measure);\n    }\n\n    public static SampleSummary summarize(List<Weighted<float[]>> points, int maxAllowed, int initial,\n            boolean phase1reassign, BiFunction<float[], float[], Double> distance, long seed, boolean parallelEnabled) {\n        return summarize(points, maxAllowed, initial, phase1reassign, distance, seed, parallelEnabled, 1, 0);\n    }\n\n    /**\n     * Same as previous over a flat collection of unweighted float[]\n     *\n     * @param points          points represented by float[][]\n     * @param maxAllowed      maximum number of clusters in output\n     * @param initial         initial number of points to seed; a control parameter\n     *                        that serves both as a denoiser, as well as as a\n     *                        facilitator of coninuity (large numbers would\n     *                        correspond to MST like clustering)\n     * @param reassignPerStep unusued in current version\n     * @param distance        distance metric over float []\n     * @param seed            random seed\n     * @param parallelEnabled flag enabling (limited) parallelism\n     * @return a list of centers with weights\n     */\n    public static SampleSummary summarize(float[][] points, int maxAllowed, int initial, boolean reassignPerStep,\n            BiFunction<float[], float[], Double> distance, long seed, Boolean parallelEnabled) {\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        for (float[] point : points) {\n            weighted.add(new Weighted<>(point, 1.0f));\n        }\n        return summarize(weighted, maxAllowed, initial, reassignPerStep, distance, seed, parallelEnabled);\n    }\n\n    /**\n     * same as before with common cases filled in, used in analysis of\n     * ConditionalSamples\n     *\n     * @param points          points in ProjectedPoint{}\n     * @param maxAllowed      maximum number of groups/clusters\n     * @param initial         a parameter controlling the initialization\n     * @param reassignPerStep if reassignment is to be performed each step\n     * @param seed            random seed\n     * @return a summarization\n     */\n    public static SampleSummary l2summarize(List<Weighted<float[]>> points, int maxAllowed, int initial,\n            boolean reassignPerStep, long seed) {\n        return summarize(points, maxAllowed, initial, reassignPerStep, Summarizer::L2distance, seed, false);\n    }\n\n    /**\n     * Same as above, with the most common use cases filled in\n     *\n     * @param points     points in float[][], each of weight 1.0\n     * @param maxAllowed maximum number of clusters one is interested in\n     * @param seed       random seed\n     * @return a summarization\n     */\n    public static SampleSummary l2summarize(float[][] points, int maxAllowed, long seed) {\n        return summarize(points, maxAllowed, 4 * maxAllowed, false, Summarizer::L2distance, seed, false);\n    }\n\n    /**\n     *\n     * @param points                  points represented by R[]\n     * @param maxAllowed              maximum number of clusters in output\n     * @param initial                 initial number of points to seed; a control\n     *                                parameter that serves both as a denoiser, as\n     *                                well as as a facilitator of coninuity (large\n     *                                numbers would correspond to MST like\n     *                                clustering)\n     * @param phase2GlobalReassign    a boolean determining global reassignment\n     * @param overlapParameter        a parameter controlling order of mergers\n     * @param distance                distance metric over float []\n     * @param seed                    random seed\n     * @param parallelEnabled         flag enabling (limited) parallelism\n     * @param shrinkage               a parameter that morphs from centroidal\n     *                                behavior (=1) to robust Minimum Spanning Tree\n     *                                (=0)\n     * @param numberOfRepresentatives the number of representatives ina multicentrod\n     *                                representation used to cluster potentially\n     *                                non-spherical shapes\n     * @return a list of centers with weights\n     */\n    public static <R> List<ICluster<R>> multiSummarize(List<R> points, int maxAllowed, int initial, int stopAt,\n            boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance, long seed,\n            Boolean parallelEnabled, double shrinkage, int numberOfRepresentatives) {\n\n        ArrayList<Weighted<R>> weighted = new ArrayList<>();\n        for (R point : points) {\n            weighted.add(new Weighted<>(point, 1.0f));\n        }\n        return multiSummarizeWeighted(weighted, maxAllowed, initial, stopAt, phase2GlobalReassign, overlapParameter,\n                distance, seed, parallelEnabled, shrinkage, numberOfRepresentatives);\n    }\n\n    public static <R> List<ICluster<R>> multiSummarizeWeighted(List<Weighted<R>> points, int maxAllowed, int initial,\n            int stopAt, boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance,\n            long seed, boolean parallelEnabled, double shrinkage, int numberOfRepresentatives) {\n        BiFunction<R, Float, ICluster<R>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrinkage,\n                numberOfRepresentatives);\n        return summarize(points, maxAllowed, initial, stopAt, phase2GlobalReassign, overlapParameter, distance,\n                clusterInitializer, seed, parallelEnabled, null);\n    }\n\n    // same as above, different input\n    public static <R> List<ICluster<R>> multiSummarize(R[] points, int maxAllowed, int initial, int stopAt,\n            boolean phase2GlobalReassign, double overlapParameter, BiFunction<R, R, Double> distance, long seed,\n            Boolean parallelEnabled, double shrinkage, int numberOfRepresentatives) {\n\n        ArrayList<Weighted<R>> weighted = new ArrayList<>();\n        for (R point : points) {\n            weighted.add(new Weighted<>(point, 1.0f));\n        }\n        BiFunction<R, Float, ICluster<R>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrinkage,\n                numberOfRepresentatives);\n        return summarize(weighted, maxAllowed, initial, stopAt, phase2GlobalReassign, overlapParameter, distance,\n                clusterInitializer, seed, parallelEnabled, null);\n    }\n\n    // same as above, with multicenter instead of generic\n    public static List<ICluster<float[]>> multiSummarize(float[][] points, int maxAllowed, double shrinkage,\n            boolean parallelEnabled, int numberOfRepresentatives, long seed) {\n\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        for (float[] point : points) {\n            weighted.add(new Weighted<>(point, 1.0f));\n        }\n        return multiSummarizeWeighted(weighted, maxAllowed, shrinkage, parallelEnabled, numberOfRepresentatives, seed);\n    }\n\n    public static List<ICluster<float[]>> multiSummarizeWeighted(List<Weighted<float[]>> points, int maxAllowed,\n            double shrinkage, boolean parallelEnabled, int numberOfRepresentatives, long seed) {\n        BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b,\n                shrinkage, numberOfRepresentatives);\n        return summarize(points, maxAllowed, 4 * maxAllowed, 1, true, DEFAULT_SEPARATION_RATIO_FOR_MERGE,\n                Summarizer::L2distance, clusterInitializer, seed, parallelEnabled, null);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/AbstractNodeStore.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Stack;\n\nimport com.amazon.randomcutforest.store.IndexIntervalManager;\n\n/**\n * A fixed-size buffer for storing interior tree nodes. An interior node is\n * defined by its location in the tree (parent and child nodes), its random cut,\n * and its bounding box. The NodeStore class uses arrays to store these field\n * values for a collection of nodes. An index in the store can be used to look\n * up the field values for a particular node.\n *\n * The internal nodes (handled by this store) corresponds to\n * [0..upperRangeLimit]\n *\n * If we think of an array of Node objects as being row-oriented (where each row\n * is a Node), then this class is analogous to a column-oriented database of\n * Nodes.\n *\n */\npublic abstract class AbstractNodeStore {\n\n    public static int Null = -1;\n\n    public static boolean DEFAULT_STORE_PARENT = false;\n\n    /**\n     * the number of internal nodes; the nodes will range from 0..capacity-1 the\n     * value capacity would correspond to \"not yet set\" the values Y= capacity+1+X\n     * correspond to pointstore index X note that capacity + 1 + X =\n     * number_of_leaves + X\n     */\n    protected final int capacity;\n    protected final float[] cutValue;\n    protected IndexIntervalManager freeNodeManager;\n\n    public AbstractNodeStore(AbstractNodeStore.Builder<?> builder) {\n        this.capacity = builder.capacity;\n        if ((builder.leftIndex == null)) {\n            freeNodeManager = new IndexIntervalManager(capacity);\n        }\n        cutValue = (builder.cutValues != null) ? builder.cutValues : new float[capacity];\n    }\n\n    protected abstract int addNode(Stack<int[]> pathToRoot, float[] point, long sendex, int pointIndex, int childIndex,\n            int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box);\n\n    public boolean isLeaf(int index) {\n        return index > capacity;\n    }\n\n    public boolean isInternal(int index) {\n        return index < capacity && index >= 0;\n    }\n\n    public abstract void assignInPartialTree(int savedParent, float[] point, int childReference);\n\n    public abstract int getLeftIndex(int index);\n\n    public abstract int getRightIndex(int index);\n\n    public abstract int getParentIndex(int index);\n\n    public abstract void setRoot(int index);\n\n    protected abstract void decreaseMassOfInternalNode(int node);\n\n    protected abstract void increaseMassOfInternalNode(int node);\n\n    protected void manageInternalNodesPartial(Stack<int[]> path) {\n        while (!path.isEmpty()) {\n            int index = path.pop()[0];\n            increaseMassOfInternalNode(index);\n        }\n    }\n\n    public Stack<int[]> getPath(int root, float[] point, boolean verbose) {\n        int node = root;\n        Stack<int[]> answer = new Stack<>();\n        answer.push(new int[] { root, capacity });\n        while (isInternal(node)) {\n            double y = getCutValue(node);\n            if (leftOf(node, point)) {\n                answer.push(new int[] { getLeftIndex(node), getRightIndex(node) });\n                node = getLeftIndex(node);\n            } else { // this would push potential Null, of node == capacity\n                     // that would be used for tree reconstruction\n                answer.push(new int[] { getRightIndex(node), getLeftIndex(node) });\n                node = getRightIndex(node);\n            }\n        }\n        return answer;\n    }\n\n    public abstract void deleteInternalNode(int index);\n\n    public abstract int getMass(int index);\n\n    protected boolean leftOf(float cutValue, int cutDimension, float[] point) {\n        return point[cutDimension] <= cutValue;\n    }\n\n    public boolean leftOf(int node, float[] point) {\n        int cutDimension = getCutDimension(node);\n        return leftOf(cutValue[node], cutDimension, point);\n    }\n\n    public int getSibling(int node, int parent) {\n        int sibling = getLeftIndex(parent);\n        if (node == sibling) {\n            sibling = getRightIndex(parent);\n        }\n        return sibling;\n    }\n\n    public abstract void spliceEdge(int parent, int node, int newNode);\n\n    public abstract void replaceParentBySibling(int grandParent, int parent, int node);\n\n    public abstract int getCutDimension(int index);\n\n    public double getCutValue(int index) {\n        return cutValue[index];\n    }\n\n    protected boolean toLeft(float[] point, int currentNodeOffset) {\n        return point[getCutDimension(currentNodeOffset)] <= cutValue[currentNodeOffset];\n    }\n\n    public abstract int[] getCutDimension();\n\n    public abstract int[] getRightIndex();\n\n    public abstract int[] getLeftIndex();\n\n    public float[] getCutValues() {\n        return cutValue;\n    }\n\n    public int getCapacity() {\n        return capacity;\n    }\n\n    public int size() {\n        return capacity - freeNodeManager.size();\n    }\n\n    /**\n     * a builder\n     */\n\n    public static class Builder<T extends Builder<T>> {\n        protected int capacity;\n        protected int[] leftIndex;\n        protected int[] rightIndex;\n        protected int[] cutDimension;\n        protected float[] cutValues;\n        protected boolean storeParent = DEFAULT_STORE_PARENT;\n        protected int dimension;\n        protected int root;\n\n        // maximum number of points in the store\n        public T capacity(int capacity) {\n            this.capacity = capacity;\n            return (T) this;\n        }\n\n        public T dimension(int dimension) {\n            this.dimension = dimension;\n            return (T) this;\n        }\n\n        public T useRoot(int root) {\n            this.root = root;\n            return (T) this;\n        }\n\n        public T leftIndex(int[] leftIndex) {\n            this.leftIndex = leftIndex;\n            return (T) this;\n        }\n\n        public T rightIndex(int[] rightIndex) {\n            this.rightIndex = rightIndex;\n            return (T) this;\n        }\n\n        public T cutDimension(int[] cutDimension) {\n            this.cutDimension = cutDimension;\n            return (T) this;\n        }\n\n        public T cutValues(float[] cutValues) {\n            this.cutValues = cutValues;\n            return (T) this;\n        }\n\n        public T storeParent(boolean storeParent) {\n            this.storeParent = storeParent;\n            return (T) this;\n        }\n\n        public AbstractNodeStore build() {\n            if (leftIndex == null) {\n                checkArgument(rightIndex == null, \" incorrect option of right indices\");\n                checkArgument(cutValues == null, \"incorrect option of cut values\");\n                checkArgument(cutDimension == null, \" incorrect option of cut dimensions\");\n            } else {\n                checkArgument(rightIndex.length == capacity, \" incorrect length of right indices\");\n                checkArgument(cutValues.length == capacity, \"incorrect length of cut values\");\n                checkArgument(cutDimension.length == capacity, \" incorrect length of cut dimensions\");\n            }\n\n            // capacity is numbner of internal nodes\n            if (capacity < 256 && dimension <= 256) {\n                return new NodeStoreSmall(this);\n            } else if (capacity < Character.MAX_VALUE && dimension <= Character.MAX_VALUE) {\n                return new NodeStoreMedium(this);\n            } else {\n                return new NodeStoreLarge(this);\n            }\n        }\n\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/BoundingBox.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\n\nimport java.util.Arrays;\n\n/**\n * A single precision implementation of AbstractBoundingBox which also satisfies\n * the interface for Visitor classes\n */\npublic class BoundingBox implements IBoundingBoxView {\n\n    /**\n     * An array containing the minimum value corresponding to each dimension.\n     */\n    protected final float[] minValues;\n\n    /**\n     * An array containing the maximum value corresponding to each dimensions\n     */\n    protected final float[] maxValues;\n\n    /**\n     * The sum of side lengths defined by this bounding box.\n     */\n    protected double rangeSum;\n\n    public BoundingBox(float[] point) {\n        minValues = maxValues = point;\n        // a copy in not needed because mergedBox would create a copy\n        // addPoint, addBox would also create copies\n        rangeSum = 0.0;\n    }\n\n    /**\n     * Create a new BoundingBox with the given minimum values and maximum values.\n     *\n     * @param minValues The minimum values for each coordinate.\n     * @param maxValues The maximum values for each coordinate\n     */\n    public BoundingBox(final float[] minValues, final float[] maxValues, double sum) {\n        this.minValues = minValues;\n        this.maxValues = maxValues;\n        rangeSum = sum;\n    }\n\n    public BoundingBox(final float[] first, final float[] second) {\n        checkArgument(first.length == second.length, \" incorrect lengths in box\");\n        minValues = new float[first.length];\n        maxValues = new float[first.length];\n        rangeSum = 0;\n        for (int i = 0; i < minValues.length; ++i) {\n            minValues[i] = Math.min(first[i], second[i]);\n            maxValues[i] = Math.max(first[i], second[i]);\n            rangeSum += maxValues[i] - minValues[i];\n        }\n\n    }\n\n    public BoundingBox copy() {\n        return new BoundingBox(Arrays.copyOf(minValues, minValues.length), Arrays.copyOf(maxValues, maxValues.length),\n                rangeSum);\n    }\n\n    public BoundingBox getMergedBox(IBoundingBoxView otherBox) {\n        float[] minValuesMerged = new float[minValues.length];\n        float[] maxValuesMerged = new float[minValues.length];\n        double sum = 0.0;\n\n        for (int i = 0; i < minValues.length; ++i) {\n            minValuesMerged[i] = Math.min(minValues[i], (float) otherBox.getMinValue(i));\n            maxValuesMerged[i] = Math.max(maxValues[i], (float) otherBox.getMaxValue(i));\n            sum += maxValuesMerged[i] - minValuesMerged[i];\n        }\n        return new BoundingBox(minValuesMerged, maxValuesMerged, sum);\n    }\n\n    public double probabilityOfCut(float[] point) {\n        double range = 0;\n        for (int i = 0; i < point.length; i++) {\n            range += Math.max(minValues[i] - point[i], 0);\n        }\n        for (int i = 0; i < point.length; i++) {\n            range += Math.max(point[i] - maxValues[i], 0);\n        }\n        if (range == 0) {\n            return 0;\n        } else if (rangeSum == 0) {\n            return 1;\n        } else {\n            return range / (range + rangeSum);\n        }\n    }\n\n    public BoundingBox getMergedBox(float[] point) {\n        checkArgument(point.length == minValues.length, \"incorrect length\");\n        return copy().addPoint(point);\n    }\n\n    public float[] getMaxValues() {\n        return maxValues;\n    }\n\n    public float[] getMinValues() {\n        return minValues;\n    }\n\n    public BoundingBox addPoint(float[] point) {\n        checkArgument(minValues.length == point.length, \"incorrect length\");\n        checkArgument(minValues != maxValues, \"not a mutable box\");\n        rangeSum = 0;\n        for (int i = 0; i < point.length; ++i) {\n            minValues[i] = Math.min(minValues[i], point[i]);\n        }\n        for (int i = 0; i < point.length; ++i) {\n            maxValues[i] = Math.max(maxValues[i], point[i]);\n        }\n        for (int i = 0; i < point.length; ++i) {\n            rangeSum += maxValues[i] - minValues[i];\n        }\n        return this;\n    }\n\n    public BoundingBox addBox(BoundingBox otherBox) {\n        checkState(minValues != maxValues, \"not a mutable box\");\n        rangeSum = 0;\n        for (int i = 0; i < minValues.length; ++i) {\n            minValues[i] = Math.min(minValues[i], otherBox.minValues[i]);\n        }\n        for (int i = 0; i < minValues.length; ++i) {\n            maxValues[i] = Math.max(maxValues[i], otherBox.maxValues[i]);\n        }\n        for (int i = 0; i < minValues.length; ++i) {\n            rangeSum += maxValues[i] - minValues[i];\n        }\n        return this;\n    }\n\n    public int getDimensions() {\n        return minValues.length;\n    }\n\n    /**\n     * @return the sum of side lengths for this BoundingBox.\n     */\n    public double getRangeSum() {\n        return rangeSum;\n    }\n\n    /**\n     * Gets the max value of the specified dimension.\n     *\n     * @param dimension the dimension for which we need the max value\n     * @return the max value of the specified dimension\n     */\n    public double getMaxValue(final int dimension) {\n        return maxValues[dimension];\n    }\n\n    /**\n     * Gets the min value of the specified dimension.\n     *\n     * @param dimension the dimension for which we need the min value\n     * @return the min value of the specified dimension\n     */\n    public double getMinValue(final int dimension) {\n        return minValues[dimension];\n    }\n\n    /**\n     * Returns true if the given point is contained in this bounding box. This is\n     * equivalent to the point being a member of the set defined by this bounding\n     * box.\n     *\n     * @param point with which we're performing the comparison\n     * @return whether the point is contained by the bounding box\n     */\n    public boolean contains(float[] point) {\n        checkArgument(point.length == minValues.length, \" incorrect lengths\");\n        for (int i = 0; i < minValues.length; i++) {\n            if (minValues[i] > point[i] || maxValues[i] < point[i]) {\n                return false;\n            }\n        }\n\n        return true;\n    }\n\n    public boolean contains(BoundingBox otherBox) {\n        checkArgument(otherBox.minValues.length == minValues.length, \" incorrect lengths\");\n        return contains(otherBox.minValues) && contains(otherBox.maxValues);\n    }\n\n    public double getRange(final int dimension) {\n        return maxValues[dimension] - minValues[dimension];\n    }\n\n    @Override\n    public String toString() {\n        return String.format(\"BoundingBox(%s, %s)\", Arrays.toString(minValues), Arrays.toString(maxValues));\n    }\n\n    /**\n     * Two bounding boxes are considered equal if they have the same dimensions and\n     * all their min values and max values are the same. Min and max values are\n     * compared as primitive doubles using ==, so two bounding boxes are not equal\n     * if their min and max values are merely very close.\n     *\n     * @param other An object to test for equality\n     * @return true if other is a bounding box with the same min and max values\n     */\n    @Override\n    public boolean equals(Object other) {\n        if (!(other instanceof BoundingBox)) {\n            return false;\n        }\n\n        BoundingBox otherBox = (BoundingBox) other;\n        return Arrays.equals(minValues, otherBox.minValues) && Arrays.equals(maxValues, otherBox.maxValues);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/Cut.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\n/**\n * A Cut represents a division of space into two half-spaces. Cuts are used to\n * define the tree structure in {@link RandomCutTree}, and they determine the\n * standard tree traversal path defined in {@link RandomCutTree#traverse}.\n */\npublic class Cut {\n\n    private final int dimension;\n    private final double value;\n\n    /**\n     * Create a new Cut with the given dimension and value.\n     *\n     * @param dimension The 0-based index of the dimension that the cut is made in.\n     * @param value     The spatial value of the cut.\n     */\n    public Cut(int dimension, double value) {\n        this.dimension = dimension;\n        this.value = value;\n    }\n\n    /**\n     * For the given point, this method compares the value of that point in the cut\n     * dimension to the cut value. If the point's value in the cut dimension is less\n     * than or equal to the cut value this method returns true, otherwise it returns\n     * false. The name of this method is a mnemonic: if we are working in a\n     * one-dimensional space, then this method will return 'true' if the point value\n     * is to the left of the cut value on the standard number line.\n     *\n     * @param point A point that we are testing in relation to the cut\n     * @param cut   A Cut instance.\n     * @return true if the value of the point coordinate corresponding to the cut\n     *         dimension is less than or equal to the cut value, false otherwise.\n     */\n    public static boolean isLeftOf(double[] point, Cut cut) {\n        return point[cut.getDimension()] <= cut.getValue();\n    }\n\n    /**\n     * Return the index of the dimension that this cut was made in.\n     *\n     * @return the 0-based index of the dimension that this cut was made in.\n     */\n    public int getDimension() {\n        return dimension;\n    }\n\n    /**\n     * Return the value of the cut. This value separates space into two half-spaces:\n     * the set of points whose coordinate in the cut dimension is less than the cut\n     * value, and the set of points whose coordinate in the cut dimension is greater\n     * than the cut value.\n     *\n     * @return the value of the cut.\n     */\n    public double getValue() {\n        return value;\n    }\n\n    @Override\n    public String toString() {\n        return String.format(\"Cut(%d, %f)\", dimension, value);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/HyperTree.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.Function;\n\npublic class HyperTree extends RandomCutTree {\n\n    private final Function<IBoundingBoxView, double[]> gVecBuild;\n\n    public Function<IBoundingBoxView, double[]> getgVec() {\n        return gVecBuild;\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n\n    protected HyperTree(HyperTree.Builder builder) {\n        super(builder);\n        this.gVecBuild = builder.gVec;\n    }\n\n    public void makeTree(List<Integer> list, int seed) {\n        // this function allows a public call, which may be useful someday\n        if (list.size() > 0 && list.size() < numberOfLeaves + 1) {\n            int[] leftIndex = new int[numberOfLeaves - 1];\n            int[] rightIndex = new int[numberOfLeaves - 1];\n            Arrays.fill(leftIndex, numberOfLeaves - 1);\n            Arrays.fill(rightIndex, numberOfLeaves - 1);\n            int[] cutDimension = new int[numberOfLeaves - 1];\n            float[] cutValue = new float[numberOfLeaves - 1];\n            root = makeTreeInt(list, seed, 0, this.gVecBuild, leftIndex, rightIndex, cutDimension, cutValue);\n            nodeStore = AbstractNodeStore.builder().dimension(dimension).capacity(numberOfLeaves - 1)\n                    .leftIndex(leftIndex).rightIndex(rightIndex).cutDimension(cutDimension).cutValues(cutValue).build();\n            // the cuts are specififed; now build tree\n            for (int i = 0; i < list.size(); i++) {\n                addPointToPartialTree(list.get(i), 0L);\n            }\n        } else {\n            root = Null;\n        }\n    }\n\n    private int makeTreeInt(List<Integer> pointList, int seed, int firstFree,\n            Function<IBoundingBoxView, double[]> vecBuild, int[] left, int[] right, int[] cutDimension,\n            float[] cutValue) {\n\n        if (pointList.size() == 0)\n            return Null;\n\n        BoundingBox thisBox = new BoundingBox(pointStoreView.getNumericVector(pointList.get(0)));\n        for (int i = 1; i < pointList.size(); i++) {\n            thisBox = (BoundingBox) thisBox.getMergedBox(pointStoreView.getNumericVector(pointList.get(i)));\n        }\n        if (thisBox.getRangeSum() <= 0) {\n            return pointList.get(0) + nodeStore.getCapacity() + 1;\n        }\n\n        Random ring = new Random(seed);\n        int leftSeed = ring.nextInt();\n        int rightSeed = ring.nextInt();\n        Cut cut = getCut(thisBox, ring, vecBuild);\n\n        List<Integer> leftList = new ArrayList<>();\n        List<Integer> rightList = new ArrayList<>();\n\n        for (int j = 0; j < pointList.size(); j++) {\n            if (nodeStore.leftOf((float) cut.getValue(), cut.getDimension(),\n                    pointStoreView.getNumericVector(pointList.get(j)))) {\n                leftList.add(pointList.get(j));\n            } else\n                rightList.add(pointList.get(j));\n\n        }\n        int leftIndex = makeTreeInt(leftList, leftSeed, firstFree + 1, vecBuild, left, right, cutDimension, cutValue);\n        int rightIndex = makeTreeInt(rightList, rightSeed, firstFree + leftList.size(), vecBuild, left, right,\n                cutDimension, cutValue);\n        left[firstFree] = Math.min(leftIndex, numberOfLeaves - 1);\n        right[firstFree] = Math.min(rightIndex, numberOfLeaves - 1);\n        cutDimension[firstFree] = cut.getDimension();\n        cutValue[firstFree] = (float) cut.getValue();\n        return firstFree;\n    }\n\n    private Cut getCut(IBoundingBoxView bb, Random ring, Function<IBoundingBoxView, double[]> vecSeparation) {\n        Random rng = new Random(ring.nextInt());\n        double cutf = rng.nextDouble();\n        double dimf = rng.nextDouble();\n        int td = -1;\n        double rangeSum = 0;\n        double[] vector = vecSeparation.apply(bb);\n        for (int i = 0; i < bb.getDimensions(); i++) {\n            vector[i] = (float) vector[i];\n            rangeSum += vector[i];\n        }\n\n        double breakPoint = dimf * rangeSum;\n        float cutValue = 0;\n        for (int i = 0; i < bb.getDimensions(); i++) {\n            double range = vector[i];\n            if (range > 0) {\n                if ((breakPoint > 0) && (breakPoint <= range)) {\n                    td = i;\n                    cutValue = (float) (bb.getMinValue(td) + bb.getRange(td) * cutf);\n                    if (cutValue == bb.getMaxValue(td)) {\n                        cutValue = (float) bb.getMinValue(td);\n                    }\n                }\n                breakPoint -= range;\n            }\n        }\n\n        checkArgument(td != -1, \"Pivot selection failed.\");\n        return new Cut(td, cutValue);\n    }\n\n    public static class Builder extends RandomCutTree.Builder<Builder> {\n        private Function<IBoundingBoxView, double[]> gVec;\n\n        public Builder buildGVec(Function<IBoundingBoxView, double[]> gVec) {\n            this.gVec = gVec;\n            return this;\n        }\n\n        public HyperTree build() {\n            return new HyperTree(this);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/IBoundingBoxView.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\npublic interface IBoundingBoxView {\n\n    double getRangeSum();\n\n    int getDimensions();\n\n    double getRange(int i);\n\n    double getMinValue(int i);\n\n    double getMaxValue(int i);\n\n    // duplicates\n    IBoundingBoxView copy();\n\n    // below keeps the older box unchanged\n    IBoundingBoxView getMergedBox(float[] point);\n\n    // merges and keeps the older box unchaged\n    IBoundingBoxView getMergedBox(IBoundingBoxView otherBox);\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/INodeView.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport java.util.HashMap;\n\npublic interface INodeView {\n\n    boolean isLeaf();\n\n    int getMass();\n\n    IBoundingBoxView getBoundingBox();\n\n    IBoundingBoxView getSiblingBoundingBox(float[] point);\n\n    int getCutDimension();\n\n    double getCutValue();\n\n    float[] getLeafPoint();\n\n    default float[] getLiftedLeafPoint() {\n        return getLeafPoint();\n    };\n\n    /**\n     * for a leaf node, return the sequence indices corresponding leaf point. If\n     * this method is invoked on a non-leaf node then it throws an\n     * IllegalStateException.\n     */\n    HashMap<Long, Integer> getSequenceIndexes();\n\n    /**\n     * provides the probability of separation vis-a-vis the bounding box at the node\n     * \n     * @param point input piint being evaluated\n     * @return the probability of separation\n     */\n\n    double probailityOfSeparation(float[] point);\n\n    /**\n     * for a leaf node, return the index in the point store for the leaf point. If\n     * this method is invoked on a non-leaf node then it throws an\n     * IllegalStateException.\n     */\n    int getLeafPointIndex();\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/ITree.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport com.amazon.randomcutforest.config.IDynamicConfig;\nimport com.amazon.randomcutforest.executor.ITraversable;\n\n/**\n * A tree that can potentially interact with a coordinator\n *\n * @param <PointReference> The internal point representation expected by the\n *                         component models in this list.\n * @param <Point>          The explicit data type of points being passed\n */\npublic interface ITree<PointReference, Point> extends ITraversable, IDynamicConfig {\n    int getMass();\n\n    float[] projectToTree(float[] point);\n\n    float[] liftFromTree(float[] result);\n\n    double[] liftFromTree(double[] result);\n\n    int[] projectMissingIndices(int[] list);\n\n    PointReference addPoint(PointReference point, long sequenceIndex);\n\n    void addPointToPartialTree(PointReference point, long sequenceIndex);\n\n    void validateAndReconstruct();\n\n    PointReference deletePoint(PointReference point, long sequenceIndex);\n\n    long getRandomSeed();\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreLarge.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Stack;\n\nimport com.amazon.randomcutforest.store.IndexIntervalManager;\n\n/**\n * A fixed-size buffer for storing interior tree nodes. An interior node is\n * defined by its location in the tree (parent and child nodes), its random cut,\n * and its bounding box. The NodeStore class uses arrays to store these field\n * values for a collection of nodes. An index in the store can be used to look\n * up the field values for a particular node.\n *\n * The internal nodes (handled by this store) corresponds to\n * [0..upperRangeLimit]\n *\n * If we think of an array of Node objects as being row-oriented (where each row\n * is a Node), then this class is analogous to a column-oriented database of\n * Nodes.\n *\n */\npublic class NodeStoreLarge extends AbstractNodeStore {\n\n    private final int[] parentIndex;\n    private final int[] leftIndex;\n    private final int[] rightIndex;\n    public final int[] cutDimension;\n    private final int[] mass;\n\n    public NodeStoreLarge(AbstractNodeStore.Builder builder) {\n        super(builder);\n        mass = new int[capacity];\n        Arrays.fill(mass, 0);\n        if (builder.storeParent) {\n            parentIndex = new int[capacity];\n            Arrays.fill(parentIndex, capacity);\n        } else {\n            parentIndex = null;\n        }\n        if (builder.leftIndex == null) {\n            leftIndex = new int[capacity];\n            rightIndex = new int[capacity];\n            cutDimension = new int[capacity];\n            Arrays.fill(leftIndex, capacity);\n            Arrays.fill(rightIndex, capacity);\n        } else {\n            leftIndex = Arrays.copyOf(builder.leftIndex, builder.leftIndex.length);\n            rightIndex = Arrays.copyOf(builder.rightIndex, builder.rightIndex.length);\n            cutDimension = Arrays.copyOf(builder.cutDimension, builder.cutDimension.length);\n            BitSet bits = new BitSet(capacity);\n            if (builder.root != Null) {\n                bits.set(builder.root);\n            }\n            for (int i = 0; i < leftIndex.length; i++) {\n                if (isInternal(leftIndex[i])) {\n                    bits.set(leftIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[leftIndex[i]] = i;\n                    }\n                }\n            }\n            for (int i = 0; i < rightIndex.length; i++) {\n                if (isInternal(rightIndex[i])) {\n                    bits.set(rightIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[rightIndex[i]] = i;\n                    }\n                }\n            }\n            freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);\n        }\n    }\n\n    @Override\n    public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,\n            int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {\n        int index = freeNodeManager.takeIndex();\n        this.cutValue[index] = cutValue;\n        this.cutDimension[index] = (byte) cutDimension;\n        if (leftOf(cutValue, cutDimension, point)) {\n            this.leftIndex[index] = (pointIndex + capacity + 1);\n            this.rightIndex[index] = childIndex;\n        } else {\n            this.rightIndex[index] = (pointIndex + capacity + 1);\n            this.leftIndex[index] = childIndex;\n        }\n        this.mass[index] = (((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1) % (capacity + 1);\n\n        int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];\n        if (this.parentIndex != null) {\n            this.parentIndex[index] = parentIndex;\n            if (!isLeaf(childIndex)) {\n                this.parentIndex[childIndex] = (index);\n            }\n        }\n        if (parentIndex != Null) {\n            spliceEdge(parentIndex, childIndex, index);\n        }\n        return index;\n    }\n\n    public int getLeftIndex(int index) {\n        return leftIndex[index];\n    }\n\n    public int getRightIndex(int index) {\n        return rightIndex[index];\n    }\n\n    public void setRoot(int index) {\n        if (!isLeaf(index) && parentIndex != null) {\n            parentIndex[index] = capacity;\n        }\n    }\n\n    @Override\n    protected void decreaseMassOfInternalNode(int node) {\n        mass[node] = (mass[node] + capacity) % (capacity + 1);\n    }\n\n    @Override\n    protected void increaseMassOfInternalNode(int node) {\n        mass[node] = (mass[node] + 1) % (capacity + 1);\n    }\n\n    public void deleteInternalNode(int index) {\n        leftIndex[index] = capacity;\n        rightIndex[index] = capacity;\n        if (parentIndex != null) {\n            parentIndex[index] = capacity;\n        }\n        freeNodeManager.releaseIndex(index);\n    }\n\n    public int getMass(int index) {\n        return mass[index] != 0 ? mass[index] : (capacity + 1);\n    }\n\n    @Override\n    public void assignInPartialTree(int node, float[] point, int childReference) {\n        if (leftOf(node, point)) {\n            leftIndex[node] = childReference;\n        } else {\n            rightIndex[node] = childReference;\n        }\n    }\n\n    public void spliceEdge(int parent, int node, int newNode) {\n        assert (!isLeaf(newNode));\n        if (node == leftIndex[parent]) {\n            leftIndex[parent] = newNode;\n        } else {\n            rightIndex[parent] = newNode;\n        }\n        if (parentIndex != null && isInternal(node)) {\n            parentIndex[node] = newNode;\n        }\n    }\n\n    public void replaceParentBySibling(int grandParent, int parent, int node) {\n        int sibling = getSibling(node, parent);\n        if (parent == leftIndex[grandParent]) {\n            leftIndex[grandParent] = sibling;\n        } else {\n            rightIndex[grandParent] = sibling;\n        }\n        if (parentIndex != null && isInternal(sibling)) {\n            parentIndex[sibling] = grandParent;\n        }\n    }\n\n    public int getCutDimension(int index) {\n        return cutDimension[index];\n    }\n\n    public int[] getCutDimension() {\n        return Arrays.copyOf(cutDimension, cutDimension.length);\n    }\n\n    public int[] getLeftIndex() {\n        return Arrays.copyOf(leftIndex, leftIndex.length);\n    }\n\n    public int[] getRightIndex() {\n        return Arrays.copyOf(rightIndex, rightIndex.length);\n    }\n\n    public int getParentIndex(int index) {\n        checkArgument(parentIndex != null, \"incorrect call\");\n        return parentIndex[index];\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreMedium.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toCharArray;\nimport static com.amazon.randomcutforest.CommonUtils.toIntArray;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Stack;\n\nimport com.amazon.randomcutforest.store.IndexIntervalManager;\n\n/**\n * A fixed-size buffer for storing interior tree nodes. An interior node is\n * defined by its location in the tree (parent and child nodes), its random cut,\n * and its bounding box. The NodeStore class uses arrays to store these field\n * values for a collection of nodes. An index in the store can be used to look\n * up the field values for a particular node.\n *\n * The internal nodes (handled by this store) corresponds to [0..capacity]. The\n * mass of the nodes is cyclic, i.e., mass % (capacity + 1) -- therefore, in\n * presence of duplicates there would be nodes which are free, and they would\n * have mass 0 == (capacity + 1). But those nodes would not be reachable by the\n * code below.\n *\n */\npublic class NodeStoreMedium extends AbstractNodeStore {\n\n    private final char[] parentIndex;\n    private final int[] leftIndex;\n    private final int[] rightIndex;\n    public final char[] cutDimension;\n    private final char[] mass;\n\n    public NodeStoreMedium(AbstractNodeStore.Builder builder) {\n        super(builder);\n        mass = new char[capacity];\n        Arrays.fill(mass, (char) 0);\n        if (builder.storeParent) {\n            parentIndex = new char[capacity];\n            Arrays.fill(parentIndex, (char) capacity);\n        } else {\n            parentIndex = null;\n        }\n        if (builder.leftIndex == null) {\n            leftIndex = new int[capacity];\n            rightIndex = new int[capacity];\n            cutDimension = new char[capacity];\n            Arrays.fill(leftIndex, capacity);\n            Arrays.fill(rightIndex, capacity);\n        } else {\n            leftIndex = Arrays.copyOf(builder.leftIndex, builder.leftIndex.length);\n            rightIndex = Arrays.copyOf(builder.rightIndex, builder.rightIndex.length);\n            cutDimension = toCharArray(builder.cutDimension);\n            BitSet bits = new BitSet(capacity);\n            if (builder.root != Null) {\n                bits.set(builder.root);\n            }\n            for (int i = 0; i < leftIndex.length; i++) {\n                if (isInternal(leftIndex[i])) {\n                    bits.set(leftIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[leftIndex[i]] = (char) i;\n                    }\n                }\n            }\n            for (int i = 0; i < rightIndex.length; i++) {\n                if (isInternal(rightIndex[i])) {\n                    bits.set(rightIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[rightIndex[i]] = (char) i;\n                    }\n                }\n            }\n            freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);\n            // need to set up parents using the root\n        }\n    }\n\n    @Override\n    public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,\n            int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {\n        int index = freeNodeManager.takeIndex();\n        this.cutValue[index] = cutValue;\n        this.cutDimension[index] = (char) cutDimension;\n        if (leftOf(cutValue, cutDimension, point)) {\n            this.leftIndex[index] = (pointIndex + capacity + 1);\n            this.rightIndex[index] = childIndex;\n        } else {\n            this.rightIndex[index] = (pointIndex + capacity + 1);\n            this.leftIndex[index] = childIndex;\n        }\n        this.mass[index] = (char) ((((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1)\n                % (capacity + 1));\n        int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];\n        if (this.parentIndex != null) {\n            this.parentIndex[index] = (char) parentIndex;\n            if (!isLeaf(childIndex)) {\n                this.parentIndex[childIndex] = (char) (index);\n            }\n        }\n        if (parentIndex != Null) {\n            spliceEdge(parentIndex, childIndex, index);\n        }\n        return index;\n    }\n\n    @Override\n    public void assignInPartialTree(int node, float[] point, int childReference) {\n        if (leftOf(node, point)) {\n            leftIndex[node] = childReference;\n        } else {\n            rightIndex[node] = childReference;\n        }\n    }\n\n    public int getLeftIndex(int index) {\n        return leftIndex[index];\n    }\n\n    public int getRightIndex(int index) {\n        return rightIndex[index];\n    }\n\n    public int getParentIndex(int index) {\n        checkArgument(parentIndex != null, \"incorrect call\");\n        return parentIndex[index];\n    }\n\n    public void setRoot(int index) {\n        if (!isLeaf(index) && parentIndex != null) {\n            parentIndex[index] = (char) capacity;\n        }\n    }\n\n    @Override\n    protected void decreaseMassOfInternalNode(int node) {\n        mass[node] = (char) ((mass[node] + capacity) % (capacity + 1)); // this cannot get to 0\n    }\n\n    @Override\n    protected void increaseMassOfInternalNode(int node) {\n        mass[node] = (char) ((mass[node] + 1) % (capacity + 1));\n        // mass of root == 0; note capacity = number_of_leaves - 1\n    }\n\n    public void deleteInternalNode(int index) {\n        leftIndex[index] = capacity;\n        rightIndex[index] = capacity;\n        if (parentIndex != null) {\n            parentIndex[index] = (char) capacity;\n        }\n        freeNodeManager.releaseIndex(index);\n    }\n\n    public int getMass(int index) {\n        return mass[index] != 0 ? mass[index] : (capacity + 1);\n    }\n\n    public void spliceEdge(int parent, int node, int newNode) {\n        assert (!isLeaf(newNode));\n        if (node == leftIndex[parent]) {\n            leftIndex[parent] = newNode;\n        } else {\n            rightIndex[parent] = newNode;\n        }\n        if (parentIndex != null && isInternal(node)) {\n            parentIndex[node] = (char) newNode;\n        }\n    }\n\n    public void replaceParentBySibling(int grandParent, int parent, int node) {\n        int sibling = getSibling(node, parent);\n        if (parent == leftIndex[grandParent]) {\n            leftIndex[grandParent] = sibling;\n        } else {\n            rightIndex[grandParent] = sibling;\n        }\n        if (parentIndex != null && isInternal(sibling)) {\n            parentIndex[sibling] = (char) grandParent;\n        }\n    }\n\n    public int getCutDimension(int index) {\n        return cutDimension[index];\n    }\n\n    public int[] getCutDimension() {\n        return toIntArray(cutDimension);\n    }\n\n    public int[] getLeftIndex() {\n        return Arrays.copyOf(leftIndex, leftIndex.length);\n    }\n\n    public int[] getRightIndex() {\n        return Arrays.copyOf(rightIndex, rightIndex.length);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeStoreSmall.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toByteArray;\nimport static com.amazon.randomcutforest.CommonUtils.toCharArray;\nimport static com.amazon.randomcutforest.CommonUtils.toIntArray;\n\nimport java.util.Arrays;\nimport java.util.BitSet;\nimport java.util.Stack;\n\nimport com.amazon.randomcutforest.store.IndexIntervalManager;\n\n/**\n * A fixed-size buffer for storing interior tree nodes. An interior node is\n * defined by its location in the tree (parent and child nodes), its random cut,\n * and its bounding box. The NodeStore class uses arrays to store these field\n * values for a collection of nodes. An index in the store can be used to look\n * up the field values for a particular node.\n *\n * The internal nodes (handled by this store) corresponds to\n * [0..upperRangeLimit]\n *\n * If we think of an array of Node objects as being row-oriented (where each row\n * is a Node), then this class is analogous to a column-oriented database of\n * Nodes.\n */\npublic class NodeStoreSmall extends AbstractNodeStore {\n\n    private final byte[] parentIndex;\n    private final char[] leftIndex;\n    private final char[] rightIndex;\n    public final byte[] cutDimension;\n    private final byte[] mass;\n\n    public NodeStoreSmall(AbstractNodeStore.Builder builder) {\n        super(builder);\n        mass = new byte[capacity];\n        Arrays.fill(mass, (byte) 0);\n        if (builder.storeParent) {\n            parentIndex = new byte[capacity];\n            Arrays.fill(parentIndex, (byte) capacity);\n        } else {\n            parentIndex = null;\n        }\n        if (builder.leftIndex == null) {\n            leftIndex = new char[capacity];\n            rightIndex = new char[capacity];\n            cutDimension = new byte[capacity];\n            Arrays.fill(leftIndex, (char) capacity);\n            Arrays.fill(rightIndex, (char) capacity);\n        } else {\n            checkArgument(builder.leftIndex.length == capacity, \" incorrect length\");\n            checkArgument(builder.rightIndex.length == capacity, \" incorrect length\");\n\n            leftIndex = toCharArray(builder.leftIndex);\n            rightIndex = toCharArray(builder.rightIndex);\n            cutDimension = toByteArray(builder.cutDimension);\n            BitSet bits = new BitSet(capacity);\n            if (builder.root != Null) {\n                bits.set(builder.root);\n            }\n            for (int i = 0; i < leftIndex.length; i++) {\n                if (isInternal(leftIndex[i])) {\n                    bits.set(leftIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[leftIndex[i]] = (byte) i;\n                    }\n                }\n            }\n            for (int i = 0; i < rightIndex.length; i++) {\n                if (isInternal(rightIndex[i])) {\n                    bits.set(rightIndex[i]);\n                    if (parentIndex != null) {\n                        parentIndex[rightIndex[i]] = (byte) i;\n                    }\n                }\n            }\n            freeNodeManager = new IndexIntervalManager(capacity, capacity, bits);\n            // need to set up parents using the root\n        }\n    }\n\n    @Override\n    public int addNode(Stack<int[]> pathToRoot, float[] point, long sequenceIndex, int pointIndex, int childIndex,\n            int childMassIfLeaf, int cutDimension, float cutValue, BoundingBox box) {\n        int index = freeNodeManager.takeIndex();\n        this.cutValue[index] = cutValue;\n        this.cutDimension[index] = (byte) cutDimension;\n        if (leftOf(cutValue, cutDimension, point)) {\n            this.leftIndex[index] = (char) (pointIndex + capacity + 1);\n            this.rightIndex[index] = (char) childIndex;\n        } else {\n            this.rightIndex[index] = (char) (pointIndex + capacity + 1);\n            this.leftIndex[index] = (char) childIndex;\n        }\n        this.mass[index] = (byte) ((((childMassIfLeaf > 0) ? childMassIfLeaf : getMass(childIndex)) + 1)\n                % (capacity + 1));\n        int parentIndex = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];\n        if (this.parentIndex != null) {\n            this.parentIndex[index] = (byte) parentIndex;\n            if (!isLeaf(childIndex)) {\n                this.parentIndex[childIndex] = (byte) (index);\n            }\n        }\n        if (parentIndex != Null) {\n            spliceEdge(parentIndex, childIndex, index);\n        }\n        return index;\n    }\n\n    @Override\n    public void assignInPartialTree(int node, float[] point, int childReference) {\n        if (leftOf(node, point)) {\n            leftIndex[node] = (char) childReference;\n        } else {\n            rightIndex[node] = (char) childReference;\n        }\n    }\n\n    public int getLeftIndex(int index) {\n        return leftIndex[index];\n    }\n\n    public int getRightIndex(int index) {\n        return rightIndex[index];\n    }\n\n    public int getParentIndex(int index) {\n        checkArgument(parentIndex != null, \"incorrect call\");\n        return parentIndex[index];\n    }\n\n    public void setRoot(int index) {\n        if (!isLeaf(index) && parentIndex != null) {\n            parentIndex[index] = (byte) capacity;\n        }\n    }\n\n    @Override\n    protected void decreaseMassOfInternalNode(int node) {\n        mass[node] = (byte) (((mass[node] & 0xff) + capacity) % (capacity + 1)); // this cannot get to 0\n    }\n\n    @Override\n    protected void increaseMassOfInternalNode(int node) {\n        mass[node] = (byte) (((mass[node] & 0xff) + 1) % (capacity + 1));\n        // mass of root == 0; note capacity = number_of_leaves - 1\n    }\n\n    public void deleteInternalNode(int index) {\n        leftIndex[index] = (char) capacity;\n        rightIndex[index] = (char) capacity;\n        if (parentIndex != null) {\n            parentIndex[index] = (byte) capacity;\n        }\n        freeNodeManager.releaseIndex(index);\n    }\n\n    public int getMass(int index) {\n        return mass[index] != 0 ? (mass[index] & 0xff) : (capacity + 1);\n    }\n\n    public void spliceEdge(int parent, int node, int newNode) {\n        assert (!isLeaf(newNode));\n        if (node == leftIndex[parent]) {\n            leftIndex[parent] = (char) newNode;\n        } else {\n            rightIndex[parent] = (char) newNode;\n        }\n        if (parentIndex != null && isInternal(node)) {\n            parentIndex[node] = (byte) newNode;\n        }\n    }\n\n    public void replaceParentBySibling(int grandParent, int parent, int node) {\n        int sibling = getSibling(node, parent);\n        if (parent == leftIndex[grandParent]) {\n            leftIndex[grandParent] = (char) sibling;\n        } else {\n            rightIndex[grandParent] = (char) sibling;\n        }\n        if (parentIndex != null && isInternal(sibling)) {\n            parentIndex[sibling] = (byte) grandParent;\n        }\n    }\n\n    public int getCutDimension(int index) {\n        return cutDimension[index] & 0xff;\n    }\n\n    public int[] getCutDimension() {\n        return toIntArray(cutDimension);\n    }\n\n    public int[] getLeftIndex() {\n        return toIntArray(leftIndex);\n    }\n\n    public int[] getRightIndex() {\n        return toIntArray(rightIndex);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/NodeView.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\n\nimport java.util.HashMap;\n\nimport com.amazon.randomcutforest.store.IPointStoreView;\n\npublic class NodeView implements INodeView {\n\n    public static double SWITCH_FRACTION = 0.499;\n\n    RandomCutTree tree;\n    int currentNodeOffset;\n    float[] leafPoint;\n    BoundingBox currentBox;\n\n    public NodeView(RandomCutTree tree, IPointStoreView<float[]> pointStoreView, int root) {\n        this.currentNodeOffset = root;\n        this.tree = tree;\n    }\n\n    public int getMass() {\n        return tree.getMass(currentNodeOffset);\n    }\n\n    public IBoundingBoxView getBoundingBox() {\n        if (currentBox == null) {\n            return tree.getBox(currentNodeOffset);\n        }\n        return currentBox;\n    }\n\n    public IBoundingBoxView getSiblingBoundingBox(float[] point) {\n        return (toLeft(point)) ? tree.getBox(tree.nodeStore.getRightIndex(currentNodeOffset))\n                : tree.getBox(tree.nodeStore.getLeftIndex(currentNodeOffset));\n    }\n\n    public int getCutDimension() {\n        return tree.nodeStore.getCutDimension(currentNodeOffset);\n    }\n\n    @Override\n    public double getCutValue() {\n        return tree.nodeStore.getCutValue(currentNodeOffset);\n    }\n\n    public float[] getLeafPoint() {\n        return leafPoint;\n    }\n\n    public HashMap<Long, Integer> getSequenceIndexes() {\n        checkState(isLeaf(), \"can only be invoked for a leaf\");\n        if (tree.storeSequenceIndexesEnabled) {\n            return tree.getSequenceMap(tree.getPointIndex(currentNodeOffset));\n        } else {\n            return new HashMap<>();\n        }\n    }\n\n    @Override\n    public double probailityOfSeparation(float[] point) {\n        return tree.probabilityOfCut(currentNodeOffset, point, currentBox);\n    }\n\n    @Override\n    public int getLeafPointIndex() {\n        return tree.getPointIndex(currentNodeOffset);\n    }\n\n    public boolean isLeaf() {\n        return tree.nodeStore.isLeaf(currentNodeOffset);\n    }\n\n    protected void setCurrentNode(int newNode, int index, boolean setBox) {\n        currentNodeOffset = newNode;\n        leafPoint = tree.pointStoreView.getNumericVector(index);\n        if (setBox && tree.boundingBoxCacheFraction < SWITCH_FRACTION) {\n            currentBox = new BoundingBox(leafPoint, leafPoint);\n        }\n    }\n\n    protected void setCurrentNodeOnly(int newNode) {\n        currentNodeOffset = newNode;\n    }\n\n    public void updateToParent(int parent, int currentSibling, boolean updateBox) {\n        currentNodeOffset = parent;\n        if (updateBox && tree.boundingBoxCacheFraction < SWITCH_FRACTION) {\n            tree.growNodeBox(currentBox, tree.pointStoreView, parent, currentSibling);\n        }\n    }\n\n    // this function exists for matching the behavior of RCF2.0 and will be replaced\n    // this function explicitly uses the encoding of the new nodestore\n    protected boolean toLeft(float[] point) {\n        return point[tree.nodeStore.getCutDimension(currentNodeOffset)] <= tree.nodeStore\n                .getCutValue(currentNodeOffset);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/tree/RandomCutTree.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.checkState;\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.DEFAULT_STORE_PARENT;\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\nimport static java.lang.Math.max;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.Random;\nimport java.util.Stack;\n\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.MultiVisitor;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.Visitor;\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.store.IPointStoreView;\n\n/**\n * A Compact Random Cut Tree is a tree data structure whose leaves represent\n * points inserted into the tree and whose interior nodes represent regions of\n * space defined by Bounding Boxes and Cuts. New nodes and leaves are added to\n * the tree by making random cuts.\n *\n * The offsets are encoded as follows: an offset greater or equal maxSize\n * corresponds to a leaf node of offset (offset - maxSize) otherwise the offset\n * corresponds to an internal node\n *\n * The main use of this class is to be updated with points sampled from a\n * stream, and to define traversal methods. Users can then implement a\n * {@link Visitor} which can be submitted to a traversal method in order to\n * compute a statistic from the tree.\n */\npublic class RandomCutTree implements ITree<Integer, float[]> {\n\n    /**\n     * The index value used to represent the absence of a node. For example, when\n     * the tree is created the root node index will be NULL. After a point is added\n     * and a root node is created, the root node's parent will be NULL, and so on.\n     */\n\n    private Random testRandom;\n    protected boolean storeSequenceIndexesEnabled;\n    protected boolean centerOfMassEnabled;\n    private long randomSeed;\n    protected int root;\n    protected IPointStoreView<float[]> pointStoreView;\n    protected int numberOfLeaves;\n    protected AbstractNodeStore nodeStore;\n    protected double boundingBoxCacheFraction;\n    protected int outputAfter;\n    protected int dimension;\n    protected final HashMap<Integer, Integer> leafMass;\n    protected double[] rangeSumData;\n    protected float[] boundingBoxData;\n    protected float[] pointSum;\n    protected HashMap<Integer, List<Long>> sequenceMap;\n\n    protected RandomCutTree(Builder<?> builder) {\n        pointStoreView = builder.pointStoreView;\n        numberOfLeaves = builder.capacity;\n        randomSeed = builder.randomSeed;\n        testRandom = builder.random;\n        outputAfter = builder.outputAfter.orElse(max(1, numberOfLeaves / 4));\n        dimension = (builder.dimension != 0) ? builder.dimension : pointStoreView.getDimensions();\n        nodeStore = (builder.nodeStore != null) ? builder.nodeStore\n                : AbstractNodeStore.builder().capacity(numberOfLeaves - 1).storeParent(builder.storeParent)\n                        .dimension(dimension).build();\n        this.boundingBoxCacheFraction = builder.boundingBoxCacheFraction;\n        this.storeSequenceIndexesEnabled = builder.storeSequenceIndexesEnabled;\n        this.centerOfMassEnabled = builder.centerOfMassEnabled;\n        this.root = builder.root;\n        leafMass = new HashMap<>();\n        int cache_limit = (int) Math.floor(boundingBoxCacheFraction * (numberOfLeaves - 1));\n        rangeSumData = new double[cache_limit];\n        boundingBoxData = new float[2 * dimension * cache_limit];\n        if (this.centerOfMassEnabled) {\n            pointSum = new float[(numberOfLeaves - 1) * dimension];\n        }\n        if (this.storeSequenceIndexesEnabled) {\n            sequenceMap = new HashMap<>();\n        }\n    }\n\n    @Override\n    public <T> void setConfig(String name, T value, Class<T> clazz) {\n        if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {\n            checkArgument(Double.class.isAssignableFrom(clazz),\n                    () -> String.format(\"Setting '%s' must be a double value\", name));\n            setBoundingBoxCacheFraction((Double) value);\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    @Override\n    public <T> T getConfig(String name, Class<T> clazz) {\n        checkNotNull(clazz, \"clazz must not be null\");\n        if (Config.BOUNDING_BOX_CACHE_FRACTION.equals(name)) {\n            checkArgument(clazz.isAssignableFrom(Double.class),\n                    () -> String.format(\"Setting '%s' must be a double value\", name));\n            return clazz.cast(boundingBoxCacheFraction);\n        } else {\n            throw new IllegalArgumentException(\"Unsupported configuration setting: \" + name);\n        }\n    }\n\n    // dynamically change the fraction of the new nodes which caches their bounding\n    // boxes\n    // 0 would mean less space usage, but slower throughput\n    // 1 would imply larger space but better throughput\n    public void setBoundingBoxCacheFraction(double fraction) {\n        checkArgument(0 <= fraction && fraction <= 1, \"incorrect parameter\");\n        boundingBoxCacheFraction = fraction;\n        resizeCache(fraction);\n    }\n\n    /**\n     * Return a new {@link Cut}, which is chosen uniformly over the space of\n     * possible cuts for a bounding box and its union with a point. The cut must\n     * exist unless the union box is a single point. There are floating point issues\n     * -- even though the original values are in float anf the calculations are in\n     * double, which can show up with large number of dimensions (each trigerring an\n     * addition/substraction).\n     *\n     * @param factor A random cut\n     * @param point  the point whose union is taken with the box\n     * @param box    A bounding box that we want to find a random cut for.\n     * @return A new Cut corresponding to a random cut in the bounding box.\n     */\n    protected Cut randomCut(double factor, float[] point, BoundingBox box) {\n        double range = 0.0;\n\n        for (int i = 0; i < point.length; i++) {\n            float minValue = (float) box.getMinValue(i);\n            float maxValue = (float) box.getMaxValue(i);\n            if (point[i] < minValue) {\n                minValue = point[i];\n            } else if (point[i] > maxValue) {\n                maxValue = point[i];\n            }\n            range += maxValue - minValue;\n        }\n\n        checkArgument(range > 0, () -> \" the union is a single point \" + Arrays.toString(point)\n                + \"or the box is inappropriate, box\" + box.toString() + \"factor =\" + factor);\n\n        double breakPoint = factor * range;\n\n        for (int i = 0; i < box.getDimensions(); i++) {\n            float minValue = (float) box.getMinValue(i);\n            float maxValue = (float) box.getMaxValue(i);\n            if (point[i] < minValue) {\n                minValue = point[i];\n            } else if (point[i] > maxValue) {\n                maxValue = point[i];\n            }\n            double gap = maxValue - minValue;\n            if (breakPoint <= gap && gap > 0) {\n                float cutValue = (float) (minValue + breakPoint);\n\n                // Random cuts have to take a value in the half-open interval [minValue,\n                // maxValue) to ensure that a\n                // Node has a valid left child and right child.\n                if (cutValue >= maxValue) {\n                    cutValue = Math.nextAfter((float) maxValue, minValue);\n                }\n\n                return new Cut(i, cutValue);\n            }\n            breakPoint -= gap;\n        }\n\n        // if we are here then factor is likely almost 1 and we have floating point\n        // issues\n        // we will randomize between the first and the last non-zero ranges and choose\n        // the\n        // same cutValue as using nextAfter above -- we will use the factor as a seed\n        // and\n        // not be optimizing this sequel (either in execution or code) to ensure easier\n        // debugging\n        // this should be an anomaly - no pun intended.\n\n        Random rng = new Random((long) (factor * Long.MAX_VALUE / 2));\n        if (rng.nextDouble() < 0.5) {\n            for (int i = 0; i < box.getDimensions(); i++) {\n                float minValue = (float) box.getMinValue(i);\n                float maxValue = (float) box.getMaxValue(i);\n                if (point[i] < minValue) {\n                    minValue = point[i];\n                } else if (point[i] > maxValue) {\n                    maxValue = point[i];\n                }\n                if (maxValue > minValue) {\n                    double cutValue = Math.nextAfter((float) maxValue, minValue);\n                    return new Cut(i, cutValue);\n                }\n            }\n        } else {\n            for (int i = box.getDimensions() - 1; i >= 0; i--) {\n                float minValue = (float) box.getMinValue(i);\n                float maxValue = (float) box.getMaxValue(i);\n                if (point[i] < minValue) {\n                    minValue = point[i];\n                } else if (point[i] > maxValue) {\n                    maxValue = point[i];\n                }\n                if (maxValue > minValue) {\n                    double cutValue = Math.nextAfter((float) maxValue, minValue);\n                    return new Cut(i, cutValue);\n                }\n            }\n        }\n\n        throw new IllegalStateException(\"The break point did not lie inside the expected range; factor \" + factor\n                + \", point \" + Arrays.toString(point) + \" box \" + box.toString());\n\n    }\n\n    /**\n     * the following function adds a point to the tree\n     * \n     * @param pointIndex    the number corresponding to the point\n     * @param sequenceIndex sequence index of the point\n     * @return the value of the point index where the point was added; this is\n     *         pointIndex if there are no duplicates; otherwise it is the value of\n     *         the point being duplicated.\n     */\n    public Integer addPoint(Integer pointIndex, long sequenceIndex) {\n\n        if (root == Null) {\n            root = convertToLeaf(pointIndex);\n            addLeaf(pointIndex, sequenceIndex);\n            return pointIndex;\n        } else {\n\n            float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));\n            checkArgument(point.length == dimension, () -> \" mismatch in dimensions for \" + pointIndex);\n            Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);\n            int[] first = pathToRoot.pop();\n            int leafNode = first[0];\n            int savedParent = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];\n            int leafSavedSibling = first[1];\n            int sibling = leafSavedSibling;\n            int leafPointIndex = getPointIndex(leafNode);\n            float[] oldPoint = projectToTree(pointStoreView.getNumericVector(leafPointIndex));\n            checkArgument(oldPoint.length == dimension, () -> \" mismatch in dimensions for \" + pointIndex);\n\n            Stack<int[]> parentPath = new Stack<>();\n\n            if (Arrays.equals(point, oldPoint)) {\n                increaseLeafMass(leafNode);\n                manageAncestorsAdd(pathToRoot, point);\n                addLeaf(leafPointIndex, sequenceIndex);\n                return leafPointIndex;\n            } else {\n                int node = leafNode;\n                int savedNode = node;\n                int parent = savedParent;\n                float savedCutValue = (float) 0.0;\n                BoundingBox currentBox = new BoundingBox(oldPoint, oldPoint);\n                BoundingBox savedBox = currentBox.copy();\n                int savedDim = Integer.MAX_VALUE;\n                Random rng;\n                if (testRandom == null) {\n                    rng = new Random(randomSeed);\n                    randomSeed = rng.nextLong();\n                } else {\n                    rng = testRandom;\n                }\n                while (true) {\n                    double factor = rng.nextDouble();\n                    Cut cut = randomCut(factor, point, currentBox);\n                    int dim = cut.getDimension();\n                    float value = (float) cut.getValue();\n\n                    boolean separation = ((point[dim] <= value && value < currentBox.getMinValue(dim)\n                            || point[dim] > value && value >= currentBox.getMaxValue(dim)));\n\n                    if (separation) {\n                        savedCutValue = value;\n                        savedDim = dim;\n                        savedParent = parent;\n                        savedNode = node;\n                        savedBox = currentBox.copy();\n                        parentPath.clear();\n                    } else {\n                        parentPath.push(new int[] { node, sibling });\n                    }\n\n                    if (currentBox.contains(point) || parent == Null) {\n                        break;\n                    } else {\n                        growNodeBox(currentBox, pointStoreView, parent, sibling);\n                        int[] next = pathToRoot.pop();\n                        node = next[0];\n                        sibling = next[1];\n                        if (pathToRoot.size() != 0) {\n                            parent = pathToRoot.lastElement()[0];\n                        } else {\n                            parent = Null;\n                        }\n                    }\n                }\n                if (savedParent != Null) {\n                    while (!parentPath.isEmpty()) {\n                        pathToRoot.push(parentPath.pop());\n                    }\n                }\n\n                int childMassIfLeaf = isLeaf(savedNode) ? getLeafMass(savedNode) : 0;\n                int mergedNode = nodeStore.addNode(pathToRoot, point, sequenceIndex, pointIndex, savedNode,\n                        childMassIfLeaf, savedDim, savedCutValue, savedBox);\n                addLeaf(pointIndex, sequenceIndex);\n                addBox(mergedNode, point, savedBox);\n                manageAncestorsAdd(pathToRoot, point);\n                if (pointSum != null) {\n                    recomputePointSum(mergedNode);\n                }\n                if (savedParent == Null) {\n                    root = mergedNode;\n                }\n            }\n            return pointIndex;\n        }\n    }\n\n    protected void manageAncestorsAdd(Stack<int[]> path, float[] point) {\n        while (!path.isEmpty()) {\n            int index = path.pop()[0];\n            nodeStore.increaseMassOfInternalNode(index);\n            if (pointSum != null) {\n                recomputePointSum(index);\n            }\n            if (boundingBoxCacheFraction > 0.0) {\n                checkContainsAndRebuildBox(index, point, pointStoreView);\n                addPointInPlace(index, point);\n            }\n        }\n    }\n\n    /**\n     * the following is the same as in addPoint() except this function is used to\n     * rebuild the tree structure. This function does not create auxiliary arrays,\n     * which should be performed using validateAndReconstruct()\n     * \n     * @param pointIndex    index of point (in point store)\n     * @param sequenceIndex sequence index (stored in sampler)\n     */\n    public void addPointToPartialTree(Integer pointIndex, long sequenceIndex) {\n\n        checkArgument(root != Null, \" a null root is not a partial tree\");\n        float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));\n        checkArgument(point.length == dimension, () -> \" incorrect projection at index \" + pointIndex);\n\n        Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);\n        int[] first = pathToRoot.pop();\n        int leafNode = first[0];\n        int savedParent = (pathToRoot.size() == 0) ? Null : pathToRoot.lastElement()[0];\n        if (!isLeaf(leafNode)) {\n            if (savedParent == Null) {\n                root = convertToLeaf(pointIndex);\n            } else {\n                nodeStore.assignInPartialTree(savedParent, point, convertToLeaf(pointIndex));\n                nodeStore.manageInternalNodesPartial(pathToRoot);\n                addLeaf(pointIndex, sequenceIndex);\n            }\n            return;\n        }\n        int leafPointIndex = getPointIndex(leafNode);\n        float[] oldPoint = projectToTree(pointStoreView.getNumericVector(leafPointIndex));\n\n        checkArgument(oldPoint.length == dimension && Arrays.equals(point, oldPoint),\n                () -> \"incorrect state on adding \" + pointIndex);\n        increaseLeafMass(leafNode);\n        nodeStore.manageInternalNodesPartial(pathToRoot);\n        addLeaf(leafPointIndex, sequenceIndex);\n        return;\n    }\n\n    public Integer deletePoint(Integer pointIndex, long sequenceIndex) {\n\n        checkArgument(root != Null, \" deleting from an empty tree\");\n        float[] point = projectToTree(pointStoreView.getNumericVector(pointIndex));\n        checkArgument(point.length == dimension, () -> \" incorrect projection at index \" + pointIndex);\n        Stack<int[]> pathToRoot = nodeStore.getPath(root, point, false);\n        int[] first = pathToRoot.pop();\n        int leafSavedSibling = first[1];\n        int leafNode = first[0];\n        int leafPointIndex = getPointIndex(leafNode);\n\n        checkArgument(leafPointIndex == pointIndex,\n                () -> \" deleting wrong node \" + leafPointIndex + \" instead of \" + pointIndex);\n\n        removeLeaf(leafPointIndex, sequenceIndex);\n\n        if (decreaseLeafMass(leafNode) == 0) {\n            if (pathToRoot.size() == 0) {\n                root = Null;\n            } else {\n                int parent = pathToRoot.pop()[0];\n                if (pathToRoot.size() == 0) {\n                    root = leafSavedSibling;\n                } else {\n                    int grandParent = pathToRoot.lastElement()[0];\n                    nodeStore.replaceParentBySibling(grandParent, parent, leafNode);\n                    manageAncestorsDelete(pathToRoot, point);\n                }\n                nodeStore.deleteInternalNode(parent);\n                if (pointSum != null) {\n                    invalidatePointSum(parent);\n                }\n                int idx = translate(parent);\n                if (idx != Integer.MAX_VALUE) {\n                    rangeSumData[idx] = 0.0;\n                }\n            }\n        } else {\n            manageAncestorsDelete(pathToRoot, point);\n        }\n        return leafPointIndex;\n    }\n\n    protected void manageAncestorsDelete(Stack<int[]> path, float[] point) {\n        boolean resolved = false;\n        while (!path.isEmpty()) {\n            int index = path.pop()[0];\n            nodeStore.decreaseMassOfInternalNode(index);\n            if (pointSum != null) {\n                recomputePointSum(index);\n            }\n            if (boundingBoxCacheFraction > 0.0 && !resolved) {\n                resolved = checkContainsAndRebuildBox(index, point, pointStoreView);\n            }\n        }\n    }\n\n    //// leaf, nonleaf representations\n\n    public boolean isLeaf(int index) {\n        // note that numberOfLeaves - 1 corresponds to an unspefied leaf in partial tree\n        // 0 .. numberOfLeaves - 2 corresponds to internal nodes\n        return index >= numberOfLeaves;\n    }\n\n    public boolean isInternal(int index) {\n        // note that numberOfLeaves - 1 corresponds to an unspefied leaf in partial tree\n        // 0 .. numberOfLeaves - 2 corresponds to internal nodes\n        return index < numberOfLeaves - 1 && index >= 0;\n    }\n\n    public int convertToLeaf(int pointIndex) {\n        return pointIndex + numberOfLeaves;\n    }\n\n    public int getPointIndex(int index) {\n        checkArgument(index >= numberOfLeaves, () -> \" does not have a point associated \" + index);\n        return index - numberOfLeaves;\n    }\n\n    public int getLeftChild(int index) {\n        checkArgument(isInternal(index), () -> \"incorrect call to get left Index \" + index);\n        return nodeStore.getLeftIndex(index);\n    }\n\n    public int getRightChild(int index) {\n        checkArgument(isInternal(index), () -> \"incorrect call to get right child \" + index);\n        return nodeStore.getRightIndex(index);\n    }\n\n    public int getCutDimension(int index) {\n        checkArgument(isInternal(index), () -> \"incorrect call to get cut dimension \" + index);\n        return nodeStore.getCutDimension(index);\n    }\n\n    public double getCutValue(int index) {\n        checkArgument(isInternal(index), () -> \"incorrect call to get cut value \" + index);\n        return nodeStore.getCutValue(index);\n    }\n\n    ///// mass assignments; separating leafs and internal nodes\n\n    protected int getMass(int index) {\n        return (isLeaf(index)) ? getLeafMass(index) : nodeStore.getMass(index);\n    }\n\n    protected int getLeafMass(int index) {\n        int y = (index - numberOfLeaves);\n        Integer value = leafMass.get(y);\n        return (value != null) ? value + 1 : 1;\n    }\n\n    protected void increaseLeafMass(int index) {\n        int y = (index - numberOfLeaves);\n        leafMass.merge(y, 1, Integer::sum);\n    }\n\n    protected int decreaseLeafMass(int index) {\n        int y = (index - numberOfLeaves);\n        Integer value = leafMass.remove(y);\n        if (value != null) {\n            if (value > 1) {\n                leafMass.put(y, (value - 1));\n                return value;\n            } else {\n                return 1;\n            }\n        } else {\n            return 0;\n        }\n    }\n\n    @Override\n    public int getMass() {\n        return root == Null ? 0 : isLeaf(root) ? getLeafMass(root) : nodeStore.getMass(root);\n    }\n\n    /////// Bounding box\n\n    public void resizeCache(double fraction) {\n        if (fraction == 0) {\n            rangeSumData = null;\n            boundingBoxData = null;\n        } else {\n            int limit = (int) Math.floor(fraction * (numberOfLeaves - 1));\n            rangeSumData = (rangeSumData == null) ? new double[limit] : Arrays.copyOf(rangeSumData, limit);\n            boundingBoxData = (boundingBoxData == null) ? new float[limit * 2 * dimension]\n                    : Arrays.copyOf(boundingBoxData, limit * 2 * dimension);\n        }\n        boundingBoxCacheFraction = fraction;\n    }\n\n    protected int translate(int index) {\n        if (rangeSumData == null || rangeSumData.length <= index) {\n            return Integer.MAX_VALUE;\n        } else {\n            return index;\n        }\n    }\n\n    void copyBoxToData(int idx, BoundingBox box) {\n        int base = 2 * idx * dimension;\n        int mid = base + dimension;\n        System.arraycopy(box.getMinValues(), 0, boundingBoxData, base, dimension);\n        System.arraycopy(box.getMaxValues(), 0, boundingBoxData, mid, dimension);\n        rangeSumData[idx] = box.getRangeSum();\n    }\n\n    void addPointInPlace(int index, float[] point) {\n        int idx = translate(index);\n        if (idx != Integer.MAX_VALUE) {\n            int base = 2 * idx * dimension;\n            int mid = base + dimension;\n            double rangeSum = 0;\n            for (int i = 0; i < dimension; i++) {\n                boundingBoxData[base + i] = Math.min(boundingBoxData[base + i], point[i]);\n            }\n            for (int i = 0; i < dimension; i++) {\n                boundingBoxData[mid + i] = max(boundingBoxData[mid + i], point[i]);\n            }\n            for (int i = 0; i < dimension; i++) {\n                rangeSum += boundingBoxData[mid + i] - boundingBoxData[base + i];\n            }\n            rangeSumData[idx] = rangeSum;\n        }\n    }\n\n    public BoundingBox getBox(int index) {\n        if (isLeaf(index)) {\n            float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(index)));\n            checkArgument(point.length == dimension, () -> \"failure in projection at index \" + index);\n            return new BoundingBox(point, point);\n        } else {\n            checkState(isInternal(index), \" incomplete state\");\n            int idx = translate(index);\n            if (idx != Integer.MAX_VALUE) {\n                if (rangeSumData[idx] != 0) {\n                    // return non-trivial boxes\n                    return getBoxFromData(idx);\n                } else {\n                    BoundingBox box = reconstructBox(index, pointStoreView);\n                    copyBoxToData(idx, box);\n                    return box;\n                }\n            }\n            return reconstructBox(index, pointStoreView);\n        }\n    }\n\n    BoundingBox reconstructBox(int index, IPointStoreView<float[]> pointStoreView) {\n        BoundingBox mutatedBoundingBox = getBox(nodeStore.getLeftIndex(index));\n        growNodeBox(mutatedBoundingBox, pointStoreView, index, nodeStore.getRightIndex(index));\n        return mutatedBoundingBox;\n    }\n\n    boolean checkStrictlyContains(int index, float[] point) {\n        int idx = translate(index);\n        if (idx != Integer.MAX_VALUE) {\n            int base = 2 * idx * dimension;\n            int mid = base + dimension;\n            boolean isInside = true;\n            for (int i = 0; i < dimension && isInside; i++) {\n                if (point[i] >= boundingBoxData[mid + i] || boundingBoxData[base + i] >= point[i]) {\n                    isInside = false;\n                }\n            }\n            return isInside;\n        }\n        return false;\n    }\n\n    boolean checkContainsAndRebuildBox(int index, float[] point, IPointStoreView<float[]> pointStoreView) {\n        int idx = translate(index);\n        if (idx != Integer.MAX_VALUE) {\n            if (!checkStrictlyContains(index, point)) {\n                BoundingBox mutatedBoundingBox = reconstructBox(index, pointStoreView);\n                copyBoxToData(idx, mutatedBoundingBox);\n                return false;\n            }\n            return true;\n        }\n        return false;\n    }\n\n    BoundingBox getBoxFromData(int idx) {\n        int base = 2 * idx * dimension;\n        int mid = base + dimension;\n\n        return new BoundingBox(Arrays.copyOfRange(boundingBoxData, base, base + dimension),\n                Arrays.copyOfRange(boundingBoxData, mid, mid + dimension));\n    }\n\n    void addBox(int index, float[] point, BoundingBox box) {\n        int idx = translate(index);\n        if (idx != Integer.MAX_VALUE) { // always add irrespective of rangesum\n            copyBoxToData(idx, box);\n            addPointInPlace(index, point);\n        }\n    }\n\n    void growNodeBox(BoundingBox box, IPointStoreView<float[]> pointStoreView, int node, int sibling) {\n        if (isLeaf(sibling)) {\n            float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(sibling)));\n            checkArgument(point.length == dimension, () -> \" incorrect projection at index \" + sibling);\n            box.addPoint(point);\n        } else {\n            if (!isInternal(sibling)) {\n                throw new IllegalStateException(\" incomplete state \" + sibling);\n            }\n            int siblingIdx = translate(sibling);\n            if (siblingIdx != Integer.MAX_VALUE) {\n                if (rangeSumData[siblingIdx] != 0) {\n                    box.addBox(getBoxFromData(siblingIdx));\n                } else {\n                    BoundingBox newBox = getBox(siblingIdx);\n                    copyBoxToData(siblingIdx, newBox);\n                    box.addBox(newBox);\n                }\n                return;\n            }\n            growNodeBox(box, pointStoreView, sibling, nodeStore.getLeftIndex(sibling));\n            growNodeBox(box, pointStoreView, sibling, nodeStore.getRightIndex(sibling));\n            return;\n        }\n    }\n\n    public double probabilityOfCut(int node, float[] point, BoundingBox otherBox) {\n        int nodeIdx = translate(node);\n        if (nodeIdx != Integer.MAX_VALUE && rangeSumData[nodeIdx] != 0) {\n            int base = 2 * nodeIdx * dimension;\n            int mid = base + dimension;\n            double minsum = 0;\n            double maxsum = 0;\n            for (int i = 0; i < dimension; i++) {\n                minsum += max(boundingBoxData[base + i] - point[i], 0);\n            }\n            for (int i = 0; i < dimension; i++) {\n                maxsum += max(point[i] - boundingBoxData[mid + i], 0);\n            }\n            double sum = maxsum + minsum;\n\n            if (sum == 0.0) {\n                return 0.0;\n            }\n            return sum / (rangeSumData[nodeIdx] + sum);\n        } else if (otherBox != null) {\n            return otherBox.probabilityOfCut(point);\n        } else {\n            BoundingBox box = getBox(node);\n            return box.probabilityOfCut(point);\n        }\n    }\n\n    /// additional information at nodes\n\n    public float[] getPointSum(int index) {\n        checkArgument(centerOfMassEnabled, \" enable center of mass\");\n        if (isLeaf(index)) {\n            float[] point = projectToTree(pointStoreView.getNumericVector(getPointIndex(index)));\n            checkArgument(point.length == dimension, () -> \" incorrect projection\");\n            int mass = getMass(index);\n            for (int i = 0; i < point.length; i++) {\n                point[i] *= mass;\n            }\n            return point;\n        } else {\n            return Arrays.copyOfRange(pointSum, index * dimension, (index + 1) * dimension);\n        }\n    }\n\n    public void invalidatePointSum(int index) {\n        for (int i = 0; i < dimension; i++) {\n            pointSum[index * dimension + i] = 0;\n        }\n    }\n\n    public void recomputePointSum(int index) {\n        float[] left = getPointSum(nodeStore.getLeftIndex(index));\n        float[] right = getPointSum(nodeStore.getRightIndex(index));\n        for (int i = 0; i < dimension; i++) {\n            pointSum[index * dimension + i] = left[i] + right[i];\n        }\n    }\n\n    public HashMap<Long, Integer> getSequenceMap(int index) {\n        HashMap<Long, Integer> hashMap = new HashMap<>();\n        List<Long> list = getSequenceList(index);\n        for (Long e : list) {\n            hashMap.merge(e, 1, Integer::sum);\n        }\n        return hashMap;\n    }\n\n    public List<Long> getSequenceList(int index) {\n        return sequenceMap.get(index);\n    }\n\n    protected void addLeaf(int pointIndex, long sequenceIndex) {\n        if (storeSequenceIndexesEnabled) {\n            List<Long> leafList = sequenceMap.remove(pointIndex);\n            if (leafList == null) {\n                leafList = new ArrayList<>(1);\n            }\n            leafList.add(sequenceIndex);\n            sequenceMap.put(pointIndex, leafList);\n        }\n    }\n\n    public void removeLeaf(int leafPointIndex, long sequenceIndex) {\n        if (storeSequenceIndexesEnabled) {\n            List<Long> leafList = sequenceMap.remove(leafPointIndex);\n            checkArgument(leafList != null, \" leaf index not found in tree\");\n            checkArgument(leafList.remove(sequenceIndex), \" sequence index not found in leaf\");\n            if (!leafList.isEmpty()) {\n                sequenceMap.put(leafPointIndex, leafList);\n            }\n        }\n    }\n\n    //// validations\n\n    public void validateAndReconstruct() {\n        if (root != Null) {\n            validateAndReconstruct(root);\n        }\n    }\n\n    /**\n     * This function is supposed to validate the integrity of the tree and rebuild\n     * internal data structures. At this moment the only internal structure is the\n     * pointsum.\n     * \n     * @param index the node of a tree\n     * @return a bounding box of the points\n     */\n    public BoundingBox validateAndReconstruct(int index) {\n        if (isLeaf(index)) {\n            return getBox(index);\n        } else {\n            checkState(isInternal(index), \"illegal state\");\n            BoundingBox leftBox = validateAndReconstruct(getLeftChild(index));\n            BoundingBox rightBox = validateAndReconstruct(getRightChild(index));\n            if (leftBox.maxValues[getCutDimension(index)] > getCutValue(index)\n                    || rightBox.minValues[getCutDimension(index)] <= getCutValue(index)) {\n                throw new IllegalStateException(\" incorrect bounding state at index \" + index + \" cut value \"\n                        + getCutValue(index) + \"cut dimension \" + getCutDimension(index) + \" left Box \"\n                        + leftBox.toString() + \" right box \" + rightBox.toString());\n            }\n            if (centerOfMassEnabled) {\n                recomputePointSum(index);\n            }\n            rightBox.addBox(leftBox);\n            int idx = translate(index);\n            if (idx != Integer.MAX_VALUE) { // always add irrespective of rangesum\n                copyBoxToData(idx, rightBox);\n            }\n            return rightBox;\n        }\n    }\n\n    //// traversals\n\n    /**\n     * Starting from the root, traverse the canonical path to a leaf node and visit\n     * the nodes along the path. The canonical path is determined by the input\n     * point: at each interior node, we select the child node by comparing the\n     * node's {@link Cut} to the corresponding coordinate value in the input point.\n     * The method recursively traverses to the leaf node first and then invokes the\n     * visitor on each node in reverse order. That is, if the path to the leaf node\n     * determined by the input point is root, node1, node2, ..., node(N-1), nodeN,\n     * leaf; then we will first invoke visitor::acceptLeaf on the leaf node, and\n     * then we will invoke visitor::accept on the remaining nodes in the following\n     * order: nodeN, node(N-1), ..., node2, node1, and root.\n     *\n     * @param point          A point which determines the traversal path from the\n     *                       root to a leaf node.\n     * @param visitorFactory A visitor that will be invoked for each node on the\n     *                       path.\n     * @param <R>            The return type of the Visitor.\n     * @return the value of {@link Visitor#getResult()}} after the traversal.\n     */\n    @Override\n    public <R> R traverse(float[] point, IVisitorFactory<R> visitorFactory) {\n        checkArgument(root != Null, \"this tree doesn't contain any nodes\");\n        checkNotNull(point, \"point must not be null\");\n        checkNotNull(visitorFactory, \"visitor must not be null\");\n        Visitor<R> visitor = visitorFactory.newVisitor(this, point);\n        NodeView currentNodeView = new NodeView(this, pointStoreView, root);\n        traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, root, 0);\n        return visitorFactory.liftResult(this, visitor.getResult());\n    }\n\n    protected <R> void traversePathToLeafAndVisitNodes(float[] point, Visitor<R> visitor, NodeView currentNodeView,\n            int node, int depthOfNode) {\n        if (isLeaf(node)) {\n            currentNodeView.setCurrentNode(node, getPointIndex(node), true);\n            visitor.acceptLeaf(currentNodeView, depthOfNode);\n        } else {\n            checkState(isInternal(node), \" incomplete state \");\n            if (nodeStore.toLeft(point, node)) {\n                traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, nodeStore.getLeftIndex(node),\n                        depthOfNode + 1);\n                currentNodeView.updateToParent(node, nodeStore.getRightIndex(node), !visitor.isConverged());\n            } else {\n                traversePathToLeafAndVisitNodes(point, visitor, currentNodeView, nodeStore.getRightIndex(node),\n                        depthOfNode + 1);\n                currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), !visitor.isConverged());\n            }\n            visitor.accept(currentNodeView, depthOfNode);\n        }\n    }\n\n    /**\n     * This is a traversal method which follows the standard traversal path (defined\n     * in {@link #traverse(float[], IVisitorFactory)}) but at Node in checks to see\n     * whether the visitor should split. If a split is triggered, then independent\n     * copies of the visitor are sent down each branch of the tree and then merged\n     * before propagating the result.\n     *\n     * @param point          A point which determines the traversal path from the\n     *                       root to a leaf node.\n     * @param visitorFactory A visitor that will be invoked for each node on the\n     *                       path.\n     * @param <R>            The return type of the Visitor.\n     * @return the value of {@link Visitor#getResult()}} after the traversal.\n     */\n\n    @Override\n    public <R> R traverseMulti(float[] point, IMultiVisitorFactory<R> visitorFactory) {\n        checkArgument(root != Null, \"this tree doesn't contain any nodes\");\n        checkNotNull(point, \"point must not be null\");\n        checkNotNull(visitorFactory, \"visitor must not be null\");\n        MultiVisitor<R> visitor = visitorFactory.newVisitor(this, point);\n        NodeView currentNodeView = new NodeView(this, pointStoreView, root);\n        traverseTreeMulti(point, visitor, currentNodeView, root, 0);\n        return visitorFactory.liftResult(this, visitor.getResult());\n    }\n\n    protected <R> void traverseTreeMulti(float[] point, MultiVisitor<R> visitor, NodeView currentNodeView, int node,\n            int depthOfNode) {\n        if (isLeaf(node)) {\n            currentNodeView.setCurrentNode(node, getPointIndex(node), false);\n            visitor.acceptLeaf(currentNodeView, depthOfNode);\n        } else {\n            checkState(isInternal(node), \" incomplete state\");\n            currentNodeView.setCurrentNodeOnly(node);\n            if (visitor.trigger(currentNodeView)) {\n                traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getLeftIndex(node), depthOfNode + 1);\n                MultiVisitor<R> newVisitor = visitor.newPartialCopy();\n                currentNodeView.setCurrentNodeOnly(nodeStore.getRightIndex(node));\n                traverseTreeMulti(point, newVisitor, currentNodeView, nodeStore.getRightIndex(node), depthOfNode + 1);\n                currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), false);\n                visitor.combine(newVisitor);\n            } else if (nodeStore.toLeft(point, node)) {\n                traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getLeftIndex(node), depthOfNode + 1);\n                currentNodeView.updateToParent(node, nodeStore.getRightIndex(node), false);\n            } else {\n                traverseTreeMulti(point, visitor, currentNodeView, nodeStore.getRightIndex(node), depthOfNode + 1);\n                currentNodeView.updateToParent(node, nodeStore.getLeftIndex(node), false);\n            }\n            visitor.accept(currentNodeView, depthOfNode);\n        }\n    }\n\n    public int getNumberOfLeaves() {\n        return numberOfLeaves;\n    }\n\n    public boolean isCenterOfMassEnabled() {\n        return centerOfMassEnabled;\n    }\n\n    public boolean isStoreSequenceIndexesEnabled() {\n        return storeSequenceIndexesEnabled;\n    }\n\n    public double getBoundingBoxCacheFraction() {\n        return boundingBoxCacheFraction;\n    }\n\n    public int getDimension() {\n        return dimension;\n    }\n\n    /**\n     *\n     * @return the root of the tree\n     */\n\n    public Integer getRoot() {\n        return (int) root;\n    }\n\n    /**\n     * returns the number of samples that needs to be seen before returning\n     * meaningful inference\n     */\n    public int getOutputAfter() {\n        return outputAfter;\n    }\n\n    @Override\n    public boolean isOutputReady() {\n        return getMass() >= outputAfter;\n    }\n\n    public float[] projectToTree(float[] point) {\n        return Arrays.copyOf(point, point.length);\n    }\n\n    public float[] liftFromTree(float[] result) {\n        return Arrays.copyOf(result, result.length);\n    }\n\n    public double[] liftFromTree(double[] result) {\n        return Arrays.copyOf(result, result.length);\n    }\n\n    public int[] projectMissingIndices(int[] list) {\n        return Arrays.copyOf(list, list.length);\n    }\n\n    public long getRandomSeed() {\n        return randomSeed;\n    }\n\n    public AbstractNodeStore getNodeStore() {\n        return nodeStore;\n    }\n\n    public static class Builder<T extends Builder<T>> {\n        protected boolean storeSequenceIndexesEnabled = RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n        protected boolean centerOfMassEnabled = RandomCutForest.DEFAULT_CENTER_OF_MASS_ENABLED;\n        protected double boundingBoxCacheFraction = RandomCutForest.DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\n        protected long randomSeed = new Random().nextLong();\n        protected Random random = null;\n        protected int capacity = RandomCutForest.DEFAULT_SAMPLE_SIZE;\n        protected Optional<Integer> outputAfter = Optional.empty();\n        protected int dimension;\n        protected IPointStoreView<float[]> pointStoreView;\n        protected AbstractNodeStore nodeStore;\n        protected int root = Null;\n        protected boolean storeParent = DEFAULT_STORE_PARENT;\n\n        public T capacity(int capacity) {\n            this.capacity = capacity;\n            return (T) this;\n        }\n\n        public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {\n            this.boundingBoxCacheFraction = boundingBoxCacheFraction;\n            return (T) this;\n        }\n\n        public T pointStoreView(IPointStoreView<float[]> pointStoreView) {\n            this.pointStoreView = pointStoreView;\n            return (T) this;\n        }\n\n        public T nodeStore(AbstractNodeStore nodeStore) {\n            this.nodeStore = nodeStore;\n            return (T) this;\n        }\n\n        public T randomSeed(long randomSeed) {\n            this.randomSeed = randomSeed;\n            return (T) this;\n        }\n\n        public T random(Random random) {\n            this.random = random;\n            return (T) this;\n        }\n\n        public T outputAfter(int outputAfter) {\n            this.outputAfter = Optional.of(outputAfter);\n            return (T) this;\n        }\n\n        public T dimension(int dimension) {\n            this.dimension = dimension;\n            return (T) this;\n        }\n\n        public T setRoot(int root) {\n            this.root = root;\n            return (T) this;\n        }\n\n        public T storeParent(boolean storeParent) {\n            this.storeParent = storeParent;\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        public T centerOfMassEnabled(boolean centerOfMassEnabled) {\n            this.centerOfMassEnabled = centerOfMassEnabled;\n            return (T) this;\n        }\n\n        public RandomCutTree build() {\n            return new RandomCutTree(this);\n        }\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/util/ArrayPacking.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static java.lang.Math.min;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\n\npublic class ArrayPacking {\n\n    /**\n     * For a given base value, return the smallest int value {@code p} so that\n     * {@code base^(p + 1) >= Integer.MAX_VALUE}. If\n     * {@code base >= Integer.MAX_VALUE}, return 1.\n     * \n     * @param base Compute the approximate log of {@code Integer.MAX_VALUE} in this\n     *             base.\n     * @return the largest int value {@code p} so that\n     *         {@code base^p >= Integer.MAX_VALUE} or 1 if\n     *         {@code base >= Integer.MAX_VALUE}.\n     */\n    public static int logMax(long base) {\n        checkArgument(base > 1, \"Absolute value of base must be greater than 1\");\n\n        int pack = 0;\n        long num = base;\n        while (num < Integer.MAX_VALUE) {\n            num = num * base;\n            ++pack;\n        }\n        return Math.max(pack, 1); // pack can be 0 for max - min being more than Integer.MaxValue\n    }\n\n    /**\n     * Pack an array of ints. If {@code compress} is true, then this method will\n     * apply arithmetic compression to the inputs, otherwise it returns a copy of\n     * the input.\n     *\n     * @param inputArray An array of ints to pack.\n     * @param compress   A flag indicating whether to apply arithmetic compression.\n     * @return an array of packed ints.\n     */\n    public static int[] pack(int[] inputArray, boolean compress) {\n        return pack(inputArray, inputArray.length, compress);\n    }\n\n    /**\n     * Pack an array of ints. If {@code compress} is true, then this method will\n     * apply arithmetic compression to the inputs, otherwise it returns a copy of\n     * the input.\n     *\n     * @param inputArray An array of ints to pack.\n     * @param length     The length of the output array. Only the first\n     *                   {@code length} values in {@code inputArray} will be packed.\n     * @param compress   A flag indicating whether to apply arithmetic compression.\n     * @return an array of packed ints.\n     */\n    public static int[] pack(int[] inputArray, int length, boolean compress) {\n        checkNotNull(inputArray, \"inputArray must not be null\");\n        checkArgument(0 <= length && length <= inputArray.length,\n                \"length must be between 0 and inputArray.length (inclusive)\");\n\n        if (!compress || length < 3) {\n            return Arrays.copyOf(inputArray, length);\n        }\n\n        int min = inputArray[0];\n        int max = inputArray[0];\n        for (int i = 1; i < length; i++) {\n            min = min(min, inputArray[i]);\n            max = Math.max(max, inputArray[i]);\n        }\n        long base = (long) max - min + 1;\n        if (base == 1) {\n            return new int[] { min, max, length };\n        } else {\n            int packNum = logMax(base);\n\n            int[] output = new int[3 + (int) Math.ceil(1.0 * length / packNum)];\n            output[0] = min;\n            output[1] = max;\n            output[2] = length;\n            int len = 0;\n            int used = 0;\n            while (len < length) {\n                long code = 0;\n                int reach = min(len + packNum - 1, length - 1);\n                for (int i = reach; i >= len; i--) {\n                    code = base * code + (inputArray[i] - min);\n                }\n                output[3 + used++] = (int) code;\n                len += packNum;\n            }\n            // uncomment for debug; should be always true\n            // checkArgument(used + 3 == output.length, \"incorrect state\");\n            return output;\n        }\n    }\n\n    /**\n     * Pack an array of shorts. If {@code compress} is true, then this method will\n     * apply arithmetic compression to the inputs, otherwise it returns a copy of\n     * the input.\n     *\n     * @param inputArray An array of ints to pack.\n     * @param compress   A flag indicating whether to apply arithmetic compression.\n     * @return an array of packed ints.\n     */\n    public static int[] pack(short[] inputArray, boolean compress) {\n        return pack(inputArray, inputArray.length, compress);\n    }\n\n    /**\n     * Pack an array of shorts. If {@code compress} is true, then this method will\n     * apply arithmetic compression to the inputs, otherwise it returns a copy of\n     * the input.\n     *\n     * @param inputArray An array of ints to pack.\n     * @param length     The length of the output array. Only the first\n     *                   {@code length} values in {@code inputArray} will be packed.\n     * @param compress   A flag indicating whether to apply arithmetic compression.\n     * @return an array of packed ints.\n     */\n    public static int[] pack(short[] inputArray, int length, boolean compress) {\n        checkNotNull(inputArray, \"inputArray must not be null\");\n        checkArgument(0 <= length && length <= inputArray.length,\n                \"length must be between 0 and inputArray.length (inclusive)\");\n\n        if (!compress || length < 3) {\n            int[] ret = new int[length];\n            for (int i = 0; i < length; i++) {\n                ret[i] = inputArray[i];\n            }\n            return ret;\n        }\n\n        int min = inputArray[0];\n        int max = inputArray[0];\n        for (int i = 1; i < length; i++) {\n            min = min(min, inputArray[i]);\n            max = Math.max(max, inputArray[i]);\n        }\n        long base = (long) max - min + 1;\n        if (base == 1) {\n            return new int[] { min, max, length };\n        } else {\n            int packNum = logMax(base);\n\n            int[] output = new int[3 + (int) Math.ceil(1.0 * length / packNum)];\n            output[0] = min;\n            output[1] = max;\n            output[2] = length;\n            int len = 0;\n            int used = 0;\n            while (len < length) {\n                long code = 0;\n                int reach = min(len + packNum - 1, length - 1);\n                for (int i = reach; i >= len; i--) {\n                    code = base * code + (inputArray[i] - min);\n                }\n                output[3 + used++] = (int) code;\n                len += packNum;\n            }\n            // uncomment for debug; should be always true\n            // checkArgument(used + 3 == output.length, \"incorrect state\");\n            return output;\n        }\n    }\n\n    /**\n     * Unpack an array previously created by {@link #pack(int[], int, boolean)}.\n     * \n     * @param packedArray An array previously created by\n     *                    {@link #pack(int[], int, boolean)}.\n     * @param decompress  A flag indicating whether the packed array was created\n     *                    with arithmetic compression enabled.\n     * @return the array of unpacked ints.\n     */\n    public static int[] unpackInts(int[] packedArray, boolean decompress) {\n        checkNotNull(packedArray, \" array unpacking invoked on null arrays\");\n\n        if (!decompress) {\n            return Arrays.copyOf(packedArray, packedArray.length);\n        }\n\n        return (packedArray.length < 3) ? unpackInts(packedArray, packedArray.length, decompress)\n                : unpackInts(packedArray, packedArray[2], decompress);\n    }\n\n    /**\n     * Unpack an array previously created by {@link #pack(int[], int, boolean)}.\n     * \n     * @param packedArray An array previously created by\n     *                    {@link #pack(int[], int, boolean)}.\n     * @param length      The desired length of the output array. If this number is\n     *                    different from the length of the array that was originally\n     *                    packed, then the result will be truncated or padded with\n     *                    zeros as needed.\n     * @param decompress  A flag indicating whether the packed array was created\n     *                    with arithmetic compression enabled.\n     * @return the array of unpacked ints.\n     */\n    public static int[] unpackInts(int[] packedArray, int length, boolean decompress) {\n        checkNotNull(packedArray, \" array unpacking invoked on null arrays\");\n        checkArgument(length >= 0, \"incorrect length parameter\");\n\n        if (packedArray.length < 3 || !decompress) {\n            return Arrays.copyOf(packedArray, length);\n        }\n        int min = packedArray[0];\n        int max = packedArray[1];\n        int[] output = new int[length];\n        if (min == max) {\n            if (packedArray[2] >= length) {\n                Arrays.fill(output, min);\n            } else {\n                for (int i = 0; i < packedArray[2]; i++) {\n                    output[i] = min;\n                }\n            }\n        } else {\n            long base = ((long) max - min + 1);\n            int packNum = logMax(base);\n            int count = 0;\n            for (int i = 3; i < packedArray.length; i++) {\n                long code = packedArray[i];\n                for (int j = 0; j < packNum && count < min(packedArray[2], length); j++) {\n                    output[count++] = (int) (min + code % base);\n                    code = (int) (code / base);\n                }\n            }\n        }\n        return output;\n    }\n\n    private static short[] copyToShort(int[] array, int length) {\n        short[] ret = new short[length];\n        for (int i = 0; i < Math.min(length, array.length); i++) {\n            ret[i] = (short) array[i];\n        }\n        return ret;\n    }\n\n    /**\n     * Unpack an array previously created by {@link #pack(short[], int, boolean)}.\n     *\n     * @param packedArray An array previously created by\n     *                    {@link #pack(short[], int, boolean)}.\n     * @param decompress  A flag indicating whether the packed array was created\n     *                    with arithmetic compression enabled.\n     * @return the array of unpacked shorts.\n     */\n    public static short[] unpackShorts(int[] packedArray, boolean decompress) {\n        checkNotNull(packedArray, \" array unpacking invoked on null arrays\");\n\n        if (!decompress) {\n            return copyToShort(packedArray, packedArray.length);\n        }\n\n        return (packedArray.length < 3) ? unpackShorts(packedArray, packedArray.length, decompress)\n                : unpackShorts(packedArray, packedArray[2], decompress);\n    }\n\n    /**\n     * Unpack an array previously created by {@link #pack(short[], int, boolean)}.\n     *\n     * @param packedArray An array previously created by\n     *                    {@link #pack(short[], int, boolean)}.\n     * @param length      The desired length of the output array. If this number is\n     *                    different from the length of the array that was originally\n     *                    packed, then the result will be truncated or padded with\n     *                    zeros as needed.\n     * @param decompress  A flag indicating whether the packed array was created\n     *                    with arithmetic compression enabled.\n     * @return the array of unpacked ints.\n     */\n    public static short[] unpackShorts(int[] packedArray, int length, boolean decompress) {\n        checkNotNull(packedArray, \" array unpacking invoked on null arrays\");\n        checkArgument(length >= 0, \"incorrect length parameter\");\n\n        if (packedArray.length < 3 || !decompress) {\n            return copyToShort(packedArray, length);\n        }\n        int min = packedArray[0];\n        int max = packedArray[1];\n        short[] output = new short[length];\n        if (min == max) {\n            if (packedArray[2] >= length) {\n                Arrays.fill(output, (short) min);\n            } else {\n                for (int i = 0; i < packedArray[2]; i++) {\n                    output[i] = (short) min;\n                }\n            }\n        } else {\n            long base = ((long) max - min + 1);\n            int packNum = logMax(base);\n            int count = 0;\n            for (int i = 3; i < packedArray.length; i++) {\n                long code = packedArray[i];\n                for (int j = 0; j < packNum && count < min(packedArray[2], length); j++) {\n                    output[count++] = (short) (min + code % base);\n                    code = (int) (code / base);\n                }\n            }\n        }\n        return output;\n    }\n\n    /**\n     * Pack an array of doubles into an array of bytes.\n     * \n     * @param array An array of doubles.\n     * @return An array of bytes representing the original array of doubles.\n     */\n    public static byte[] pack(double[] array) {\n        checkNotNull(array, \"array must not be null\");\n        return pack(array, array.length);\n    }\n\n    /**\n     * Pack an array of doubles into an array of bytes.\n     * \n     * @param array  An array of doubles.\n     * @param length The number of doubles in the input array to pack into the\n     *               resulting byte array.\n     * @return An array of bytes representing the original array of doubles.\n     */\n    public static byte[] pack(double[] array, int length) {\n        checkNotNull(array, \"array must not be null\");\n        checkArgument(0 <= length, \"incorrect length parameter\");\n        checkArgument(length <= array.length, \"length must be between 0 and inputArray.length (inclusive)\");\n\n        ByteBuffer buf = ByteBuffer.allocate(length * Double.BYTES);\n        for (int i = 0; i < length; i++) {\n            buf.putDouble(array[i]);\n        }\n\n        return buf.array();\n    }\n\n    /**\n     * Pack an array of floats into an array of bytes.\n     * \n     * @param array An array of floats.\n     * @return An array of bytes representing the original array of floats.\n     */\n    public static byte[] pack(float[] array) {\n        checkNotNull(array, \"array must not be null\");\n        return pack(array, array.length);\n    }\n\n    /**\n     * Pack an array of floats into an array of bytes.\n     * \n     * @param array  An array of floats.\n     * @param length The number of doubles in the input array to pack into the\n     *               resulting byte array.\n     * @return An array of bytes representing the original array of floats.\n     */\n    public static byte[] pack(float[] array, int length) {\n        checkArgument(0 <= length, \"incorrect length parameter\");\n        checkArgument(length <= array.length, \"length must be between 0 and inputArray.length (inclusive)\");\n\n        ByteBuffer buf = ByteBuffer.allocate(length * Float.BYTES);\n        for (int i = 0; i < length; i++) {\n            buf.putFloat(array[i]);\n        }\n\n        return buf.array();\n    }\n\n    /**\n     * Unpack an array of bytes as an array of doubles.\n     * \n     * @param bytes An array of bytes.\n     * @return an array of doubles obtained by marshalling consecutive bytes in the\n     *         input array into doubles.\n     */\n    public static double[] unpackDoubles(byte[] bytes) {\n        checkNotNull(bytes, \"bytes must not be null\");\n        return unpackDoubles(bytes, bytes.length / Double.BYTES);\n    }\n\n    /**\n     * Unpack an array of bytes as an array of doubles.\n     * \n     * @param bytes  An array of bytes.\n     * @param length The desired length of the resulting double array. The input\n     *               will be truncated or padded with zeros as needed.\n     * @return an array of doubles obtained by marshalling consecutive bytes in the\n     *         input array into doubles.\n     */\n    public static double[] unpackDoubles(byte[] bytes, int length) {\n        checkNotNull(bytes, \"bytes must not be null\");\n        checkArgument(length >= 0, \"length must be greater than or equal to 0\");\n        checkArgument(bytes.length % Double.BYTES == 0, \"bytes.length must be divisible by Double.BYTES\");\n\n        ByteBuffer buf = ByteBuffer.wrap(bytes);\n        double[] result = new double[length];\n        int m = Math.min(length, bytes.length / Double.BYTES);\n\n        for (int i = 0; i < m; i++) {\n            result[i] = buf.getDouble();\n        }\n\n        return result;\n    }\n\n    /**\n     * Unpack an array of bytes as an array of floats.\n     * \n     * @param bytes An array of bytes.\n     * @return an array of floats obtained by marshalling consecutive bytes in the\n     *         input array into floats.\n     */\n    public static float[] unpackFloats(byte[] bytes) {\n        checkNotNull(bytes, \"bytes must not be null\");\n        return unpackFloats(bytes, bytes.length / Float.BYTES);\n    }\n\n    /**\n     * Unpack an array of bytes as an array of floats.\n     * \n     * @param bytes  An array of bytes.\n     * @param length The desired length of the resulting float array. The input will\n     *               be truncated or padded with zeros as needed.\n     * @return an array of doubles obtained by marshalling consecutive bytes in the\n     *         input array into floats.\n     */\n    public static float[] unpackFloats(byte[] bytes, int length) {\n        checkNotNull(bytes, \"bytes must not be null\");\n        checkArgument(length >= 0, \"length must be greater than or equal to 0\");\n        checkArgument(bytes.length % Float.BYTES == 0, \"bytes.length must be divisible by Float.BYTES\");\n\n        ByteBuffer buf = ByteBuffer.wrap(bytes);\n        float[] result = new float[length];\n        int m = Math.min(length, bytes.length / Float.BYTES);\n\n        for (int i = 0; i < m; i++) {\n            result[i] = buf.getFloat();\n        }\n\n        return result;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/util/ArrayUtils.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport java.util.Arrays;\n\n/**\n * A utility class for data arrays.\n */\npublic class ArrayUtils {\n\n    /**\n     * Returns a clean deep copy of the point. Current clean-ups include changing\n     * negative zero -0.0 to positive zero 0.0.\n     *\n     * @param point The original data point.\n     * @return a clean deep copy of the original point.\n     */\n    public static double[] cleanCopy(double[] point) {\n        double[] pointCopy = Arrays.copyOf(point, point.length);\n        for (int i = 0; i < point.length; i++) {\n            if (pointCopy[i] == 0.0) {\n                pointCopy[i] = 0.0;\n            }\n        }\n        return pointCopy;\n    }\n\n    public static float[] cleanCopy(float[] point) {\n        float[] pointCopy = Arrays.copyOf(point, point.length);\n        for (int i = 0; i < point.length; i++) {\n            if (pointCopy[i] == 0.0) {\n                pointCopy[i] = 0.0f;\n            }\n        }\n        return pointCopy;\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/util/ShingleBuilder.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\n\n/**\n * A utility class for creating shingled points, which are also referred to as\n * shingles. A shingle consists of multiple points appended together. If\n * individual points have n dimensions, and we include k points in a shingle,\n * then the shingle will have size n * m.\n *\n * There are two strategies for shingling: sliding and cyclic. In a sliding\n * shingle, new points are appended to the end of the shingle, and old points\n * are removed from the front. For example, if we have a shingle size of 4 which\n * currently contains the points a, b, c, and d, then we can represent the\n * shingle as abcd. The following schematic shows how the shingle is updated as\n * we add new points e and f.\n * \n * <pre>\n *     abcd =&gt; bcde\n *     bcde =&gt; cdef\n * </pre>\n *\n * With cycling shingling, when a new point is added to a shingle it overwrites\n * the oldest point in the shingle. Using the same setup as above, a cyclic\n * shingle would be updated as follows:\n * \n * <pre>\n *     abcd =&gt; ebcd\n *     ebcd =&gt; efcd\n * </pre>\n */\npublic class ShingleBuilder {\n\n    /**\n     * Number of dimensions of each point in the shingle.\n     */\n    private final int dimensions;\n\n    /**\n     * Number of points in the shingle.\n     */\n    private final int shingleSize;\n\n    /**\n     * A buffer containing points recently added to the shingle.\n     */\n    private final double[][] recentPoints;\n\n    /**\n     * A flag indicating whether we should use a cyclic shift or a linear shift when\n     * creating shingles.\n     */\n    private final boolean cyclic;\n\n    /**\n     * The index where the next point will be copied to. This is equal to the index\n     * of the oldest point currently in the shingle.\n     */\n    private int shingleIndex;\n\n    /**\n     * A flag indicating whether the shingle has been completely filled once.\n     */\n    private boolean full;\n\n    /**\n     * Create a new ShingleBuilder with the given dimensions and shingle size.\n     * \n     * @param dimensions  The number of dimensions in the input points.\n     * @param shingleSize The number of points to store in a shingle.\n     * @param cyclic      If true, the shingle will use cyclic updates. If false, it\n     *                    will use sliding updates.\n     */\n    public ShingleBuilder(int dimensions, int shingleSize, boolean cyclic) {\n        checkArgument(dimensions > 0, \"dimensions must be greater than 0\");\n        checkArgument(shingleSize > 0, \"shingleSize must be greater than 0\");\n\n        this.dimensions = dimensions;\n        this.shingleSize = shingleSize;\n        this.cyclic = cyclic;\n        recentPoints = new double[shingleSize][dimensions];\n\n        shingleIndex = 0;\n        full = false;\n    }\n\n    /**\n     * Create a ShingleBuilder with the given dimensions and shingleSize. The\n     * resulting builder uses sliding updates.\n     * \n     * @param dimensions  The number of dimensions in the input points.\n     * @param shingleSize The number of points to store in a shingle.\n     */\n    public ShingleBuilder(int dimensions, int shingleSize) {\n        this(dimensions, shingleSize, false);\n    }\n\n    /**\n     * @return true if the shingle has been completely filled once, false otherwise.\n     */\n    public boolean isFull() {\n        return full;\n    }\n\n    /**\n     * @return the number of dimensions in input points.\n     */\n    public int getInputPointSize() {\n        return dimensions;\n    }\n\n    /**\n     * @return the number of dimensions in a shingled point.\n     */\n    public int getShingledPointSize() {\n        return dimensions * shingleSize;\n    }\n\n    /**\n     * @return true if this ShingleBuilder uses cyclic updates, false otherwise.\n     */\n    public boolean isCyclic() {\n        return cyclic;\n    }\n\n    /**\n     * Return the index where the next input point will be stored in the internal\n     * shingle buffer. If the ShingleBuilder uses cyclic updates, this value\n     * indicates the current point in the cycle.\n     *\n     * @return the index where the next input point will be stored in the internal\n     *         shingle buffer.\n     */\n    public int getShingleIndex() {\n        return shingleIndex;\n    }\n\n    /**\n     * Add a new point to this shingle. The point values are copied.\n     * \n     * @param point The new point to be added to the shingle.\n     */\n    public void addPoint(double[] point) {\n        checkNotNull(point, \"point must not be null\");\n        checkArgument(point.length == dimensions, String.format(\"point.length must equal %d\", dimensions));\n        System.arraycopy(point, 0, recentPoints[shingleIndex], 0, dimensions);\n\n        shingleIndex = (shingleIndex + 1) % shingleSize;\n        if (!full && shingleIndex == 0) {\n            full = true;\n        }\n    }\n\n    /**\n     * @return the current shingled point.\n     */\n    public double[] getShingle() {\n        double[] shingle = new double[shingleSize * dimensions];\n        getShingle(shingle);\n        return shingle;\n    }\n\n    /**\n     * Write the current shingled point into the supplied buffer.\n     * \n     * @param shingle A buffer where the shingled point will be written.\n     */\n    public void getShingle(double[] shingle) {\n        checkNotNull(shingle, \"shingle must not be null\");\n        checkArgument(shingle.length == dimensions * shingleSize, \"shingle.length must be dimensions * shingleSize\");\n\n        int beginIndex = cyclic ? 0 : shingleIndex;\n\n        for (int i = 0; i < shingleSize; i++) {\n            System.arraycopy(recentPoints[(beginIndex + i) % shingleSize], 0, shingle, i * dimensions, dimensions);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/main/java/com/amazon/randomcutforest/util/Weighted.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Random;\n\n/**\n * a container class that manages weights\n * \n * @param <Q>\n */\npublic class Weighted<Q> {\n\n    public Q index;\n    public float weight;\n\n    public Weighted(Q object, float weight) {\n        this.index = object;\n        this.weight = weight;\n    }\n\n    /**\n     * a generic MonteCarlo sampler that creates an Arraylist of WeightedIndexes\n     * \n     * @param input               input list of weighted objects\n     * @param seed                random seed for repreoducibility\n     * @param forceSampleFraction add the items which have weight over this fraction\n     * @param scale               scale that multiples the weights of the remainder.\n     *                            Note that elements that are sampled are rescaled\n     *                            to have ensured that the total weight (after\n     *                            removal of heavy items) remains the same in\n     *                            expectation\n     * @param <Q>                 a generic index type, typically float[] in the\n     *                            current usage\n     * @return a randomly sampled arraylist (which can be the same list) of length\n     *         about LengthBound\n     */\n    public static <Q> List<Weighted<Q>> createSample(List<Weighted<Q>> input, long seed, int lengthBound,\n            double forceSampleFraction, double scale) {\n\n        if (input.size() < lengthBound) {\n            return input;\n        }\n\n        ArrayList<Weighted<Q>> samples = new ArrayList<>();\n        Random rng = new Random(seed);\n        double totalWeight = input.stream().map(x -> (double) x.weight).reduce(Double::sum).get();\n        double remainder = totalWeight;\n\n        if (forceSampleFraction > 0) {\n            remainder = input.stream().map(e -> {\n                if (e.weight > totalWeight * forceSampleFraction) {\n                    samples.add(new Weighted<>(e.index, e.weight));\n                    return 0.0;\n                } else {\n                    return (double) e.weight;\n                }\n            }).reduce(Double::sum).get();\n        }\n        float factor = (float) (lengthBound * 1.0 / input.size());\n        float newScale = (float) (scale * (remainder / totalWeight) / factor);\n        input.stream().forEach(e -> {\n            if ((e.weight <= totalWeight * forceSampleFraction) && (rng.nextDouble() < factor)) {\n                samples.add(new Weighted<>(e.index, e.weight * newScale));\n            }\n        });\n\n        return samples;\n    }\n\n    /**\n     * an utility routine to pick the element such that the prefix sum including\n     * that element exceeds a weight (or is the last element)\n     * \n     * @param points a list of weighted objects\n     * @param wt     a parameter determining the cumulative weight\n     * @return the position of the item satisfying the prefix condition or the last\n     *         element\n     */\n\n    public static <Q> Weighted<Q> prefixPick(List<Weighted<Q>> points, double wt) {\n        checkArgument(points.size() > 0, \"cannot pick from an empty list\");\n        double running = wt;\n        Weighted<Q> saved = points.get(0);\n        for (Weighted<Q> point : points) {\n            if (running - point.weight <= 0.0) {\n                return point;\n            }\n            running -= point.weight;\n            saved = point;\n        }\n        return saved;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/AttributionExamplesFunctionalTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n@Tag(\"functional\")\npublic class AttributionExamplesFunctionalTest {\n\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n    private static RandomCutForest parallelExecutionForest;\n    private static RandomCutForest singleThreadedForest;\n    private static RandomCutForest forestSpy;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    @Test\n    public void RRCFattributionTest() {\n\n        // starts with the same setup as rrcfTest; data corresponds to two small\n        // clusters at x=+/-5.0\n        // queries q_1=(0,0,0, ..., 0)\n        // inserts updates (0,1,0, ..., 0) a few times\n        // queries q_2=(0,1,0, ..., 0)\n        // attribution of q_2 is now affected by q_1 (which is still an anomaly)\n\n        int newDimensions = 30;\n        randomSeed = 101;\n        sampleSize = 256;\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(sampleSize)\n                .dimensions(newDimensions).randomSeed(randomSeed).compact(true).boundingBoxCacheFraction(0.0).build();\n\n        dataSize = 2000 + 5;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.0;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, 100);\n\n        for (int i = 0; i < 2000; i++) {\n            // shrink, shift at random\n            for (int j = 0; j < newDimensions; j++)\n                data[i][j] *= 0.01;\n            if (prg.nextDouble() < 0.5)\n                data[i][0] += 5.0;\n            else\n                data[i][0] -= 5.0;\n            newForest.update(data[i]);\n        }\n\n        double[] queryOne = new double[newDimensions];\n        double[] queryTwo = new double[newDimensions];\n        queryTwo[1] = 1;\n        double originalScoreTwo = newForest.getAnomalyScore(queryTwo);\n        DiVector originalAttrTwo = newForest.getAnomalyAttribution(queryTwo);\n\n        assertTrue(originalScoreTwo > 3.0);\n        assertEquals(originalScoreTwo, originalAttrTwo.getHighLowSum(), 1E-5);\n\n        assertTrue(originalAttrTwo.high[0] > 1.0); // due to -5 cluster\n        assertTrue(originalAttrTwo.low[0] > 1.0); // due to +5 cluster\n        assertTrue(originalAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(originalAttrTwo.getHighLowSum(0) > 1.1 * originalAttrTwo.getHighLowSum(1));\n\n        // we insert queryOne a few times to make sure it is sampled\n        for (int i = 2000; i < 2000 + 5; i++) {\n            double score = newForest.getAnomalyScore(queryOne);\n            double score2 = newForest.getAnomalyScore(queryTwo);\n            DiVector attr2 = newForest.getAnomalyAttribution(queryTwo);\n\n            // verify\n            assertTrue(score > 2.0);\n            assertTrue(score2 > 2.0);\n            assertEquals(attr2.getHighLowSum(), score2, 1E-5);\n\n            for (int j = 0; j < newDimensions; j++)\n                data[i][j] *= 0.01;\n            newForest.update(data[i]);\n            // 5 different anomalous points\n        }\n\n        double midScoreTwo = newForest.getAnomalyScore(queryTwo);\n        DiVector midAttrTwo = newForest.getAnomalyAttribution(queryTwo);\n\n        assertTrue(midScoreTwo > 2.4);\n        assertEquals(midScoreTwo, midAttrTwo.getHighLowSum(), 1E-5);\n\n        assertTrue(midAttrTwo.high[0] < 1); // due to -5 cluster !!!\n        assertTrue(midAttrTwo.low[0] < 1); // due to +5 cluster !!!\n        assertTrue(midAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(midAttrTwo.getHighLowSum(0) < 1.1 * midAttrTwo.high[1]);\n        // reversal of the dominant dimension\n        // still an anomaly; but the attribution is masked by points\n\n        // a few more updates, which are identical\n        for (int i = 2005; i < 2010; i++) {\n            newForest.update(queryOne);\n        }\n\n        double finalScoreTwo = newForest.getAnomalyScore(queryTwo);\n        DiVector finalAttrTwo = newForest.getAnomalyAttribution(queryTwo);\n        assertTrue(finalScoreTwo > 2.4);\n        assertEquals(finalScoreTwo, finalAttrTwo.getHighLowSum(), 1E-5);\n        assertTrue(finalAttrTwo.high[0] < 0.5); // due to -5 cluster !!!\n        assertTrue(finalAttrTwo.low[0] < 0.5); // due to +5 cluster !!!\n        assertTrue(finalAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(2.5 * finalAttrTwo.getHighLowSum(0) < finalAttrTwo.high[1]);\n        // the drop in high[0] and low[0] is steep and the attribution has shifted\n\n    }\n\n    @Test\n    public void attributionUnMaskingTest() {\n\n        // starts with the same setup as rrcfTest; data corresponds to two small\n        // clusters at x=+/-5.0\n        // queries q_1=(0,0,0, ..., 0)\n        // inserts updates (0,1,0, ..., 0) a few times\n        // queries q_2=(0,1,0, ..., 0)\n        // attribution of q_2 is now affected by q_1 (which is still an anomaly)\n\n        int newDimensions = 30;\n        randomSeed = 179;\n        sampleSize = 256;\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(sampleSize)\n                .dimensions(newDimensions).randomSeed(randomSeed).compact(true)\n                .boundingBoxCacheFraction(new Random().nextDouble()).timeDecay(1e-5).build();\n\n        dataSize = 2000 + 5;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.5;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, 100);\n\n        for (int i = 0; i < 2000; i++) {\n            // shrink, shift at random\n            for (int j = 0; j < newDimensions; j++)\n                data[i][j] *= 0.01;\n            if (prg.nextDouble() < 0.5)\n                data[i][0] += 5.0;\n            else\n                data[i][0] -= 5.0;\n            newForest.update(data[i]);\n        }\n\n        float[] queryOne = new float[30];\n        float[] queryTwo = new float[30];\n        queryTwo[1] = 1;\n        double originalScoreTwo = newForest.getAnomalyScore(queryTwo);\n        // testing approximation with precision 0 (no approximation)\n        DiVector originalAttrTwo = newForest.getApproximateDynamicAttribution(queryTwo, 0, true, 0,\n                CommonUtils::defaultScoreSeenFunction, CommonUtils::defaultScoreUnseenFunction,\n                CommonUtils::defaultDampFunction);\n\n        originalAttrTwo.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n        assertTrue(originalScoreTwo > 3.0);\n\n        assertEquals(originalScoreTwo, originalAttrTwo.getHighLowSum(), 1E-5);\n\n        assertTrue(originalAttrTwo.high[0] > 0.75); // due to -5 cluster\n        assertTrue(originalAttrTwo.low[0] > 0.75); // due to +5 cluster\n        assertTrue(originalAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(originalAttrTwo.getHighLowSum(0) > originalAttrTwo.getHighLowSum(1));\n\n        double apx = newForest.getApproximateDynamicScore(queryTwo, 0.1, true, 0, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        assertEquals(originalScoreTwo, CommonUtils.defaultScalarNormalizerFunction(apx, sampleSize), 0.2);\n        assertEquals(apx,\n                newForest\n                        .getApproximateDynamicAttribution(queryTwo, 0.1, true, 0, CommonUtils::defaultScoreSeenFunction,\n                                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction)\n                        .getHighLowSum(),\n                1e-5);\n\n        // we insert queryOne a few times to make sure it is sampled\n        for (int i = 2000; i < 2000 + 5; i++) {\n            double score = newForest.getAnomalyScore(queryOne);\n            double score2 = newForest.getAnomalyScore(queryTwo);\n            DiVector attr2 = newForest.getDynamicAttribution(queryTwo, 0, CommonUtils::defaultScoreSeenFunction,\n                    CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n            attr2.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n\n            double score3 = newForest.getDynamicScore(queryTwo, 1, CommonUtils::defaultScoreSeenFunction,\n                    CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n            score3 = CommonUtils.defaultScalarNormalizerFunction(score3, sampleSize);\n            DiVector attr3 = newForest.getDynamicAttribution(queryTwo, 1, CommonUtils::defaultScoreSeenFunction,\n                    CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n            attr3.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n\n            // verify\n            assertTrue(score > 2.0);\n            assertTrue(score2 > 2.0);\n            assertTrue(score3 > 2.0);\n            assertEquals(attr2.getHighLowSum(), score2, 1E-5);\n            assertEquals(attr3.getHighLowSum(), score3, 1E-5);\n\n            for (int j = 0; j < newDimensions; j++)\n                data[i][j] *= 0.01;\n            newForest.update(data[i]);\n            // 5 different anomalous points\n        }\n\n        double midScoreTwo = newForest.getAnomalyScore(queryTwo);\n        DiVector midAttrTwo = newForest.getDynamicAttribution(queryTwo, 0, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        midAttrTwo.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n\n        assertTrue(midScoreTwo > 2.5);\n        assertEquals(midScoreTwo, midAttrTwo.getHighLowSum(), 1E-5);\n\n        assertTrue(midAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(midAttrTwo.getHighLowSum(0) < 1.2 * midAttrTwo.high[1]);\n        // reversal of the dominant dimension\n        // still an anomaly; but the attribution is masked by points\n\n        double midUnmaskedScore = newForest.getDynamicScore(queryTwo, 1, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        midUnmaskedScore = CommonUtils.defaultScalarNormalizerFunction(midUnmaskedScore, sampleSize);\n        DiVector midUnmaskedAttr = newForest.getDynamicAttribution(queryTwo, 1, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        midUnmaskedAttr.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n\n        assertTrue(midUnmaskedScore > 3.0);\n        assertEquals(midUnmaskedScore, midUnmaskedAttr.getHighLowSum(), 1E-5);\n\n        assertTrue(midUnmaskedAttr.high[1] > 1); // due to +1 in query\n        assertTrue(midUnmaskedAttr.getHighLowSum(0) > midUnmaskedAttr.getHighLowSum(1));\n        // contribution from dimension 0 is still dominant\n        // the attributions in dimension 0 are reduced, but do not\n        // or become as small as quickly as in the other case\n\n        // a few more updates, which are identical\n        for (int i = 2005; i < 2010; i++) {\n            newForest.update(queryOne);\n        }\n\n        double finalScoreTwo = newForest.getAnomalyScore(queryTwo);\n        DiVector finalAttrTwo = newForest.getDynamicAttribution(queryTwo, 0, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        finalAttrTwo.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n        assertTrue(finalScoreTwo > 2.5);\n        assertEquals(finalScoreTwo, finalAttrTwo.getHighLowSum(), 1E-5);\n\n        assertTrue(finalAttrTwo.high[1] > 1); // due to +1 in query\n        assertTrue(2 * finalAttrTwo.getHighLowSum(0) < finalAttrTwo.high[1]);\n        // the drop in high[0] and low[0] is steep and the attribution has shifted\n\n        // different thresholds\n        double finalUnmaskedScore = newForest.getDynamicScore(queryTwo, 5, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        finalUnmaskedScore = CommonUtils.defaultScalarNormalizerFunction(finalUnmaskedScore, sampleSize);\n        DiVector finalUnmaskedAttr = newForest.getDynamicAttribution(queryTwo, 5, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        finalUnmaskedAttr.componentwiseTransform(x -> CommonUtils.defaultScalarNormalizerFunction(x, sampleSize));\n\n        assertTrue(finalUnmaskedScore > 3.0);\n        assertEquals(finalUnmaskedScore, finalUnmaskedAttr.getHighLowSum(), 1E-5);\n\n        assertTrue(finalUnmaskedAttr.high[1] > 1); // due to +1 in query\n        assertTrue(finalUnmaskedAttr.getHighLowSum(0) > 0.8 * finalUnmaskedAttr.getHighLowSum(1));\n\n        // the attributions in dimension 0 continue to be reduced, but do not vanish\n        // or become small as in the other case; the gap is not a factor of 4\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/CPUTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.Arrays;\nimport java.util.concurrent.ForkJoinPool;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n/**\n * The following \"test\" is intended to provide an approximate estimate of the\n * improvement from parallelization. At the outset, we remark that running the\n * test from inside an IDE/environment may reflect more of the environment.\n * Issues such as warming are not reflected in this test.\n *\n * Users who wish to obtain more calibrated estimates should use a benchmark --\n * preferably using their own \"typical\" data and their end to end setup.\n * Performance of RCF is data dependent. Such users may be invoking RCF\n * functions differently from a standard \"impute, score, update\" process\n * recommended for streaming time series data.\n *\n * Moreover, in the context of a large number of models, the rate at which the\n * models require updates is also a factor and not controlled herein.\n *\n * The two tests should produce near identical sum of scores, and (root) mean\n * squared error of the impute up to machine precision (since the order of the\n * arithmetic operations would vary).\n *\n * To summarize the lessons, it appears that parallelism almost always helps\n * (upto resource limitations). If an user is considering a single model -- say\n * from a console or dashboard, they should consider having parallel threads\n * enabled. For large number of models, it may be worthwhile to also investigate\n * different ways of achieving parallelism and not just attempt to change the\n * executor framework.\n *\n */\n\n@Tag(\"functional\")\npublic class CPUTest {\n\n    int numberOfTrees = 30;\n    int DATA_SIZE = 10000;\n    int numberOfForests = 6;\n    int numberOfAttributes = 5;\n    int shingleSize = 30;\n    int sampleSize = 256;\n    // set numberOfThreads = 1 to turn off parallelism\n    int numberOfThreads = 3;\n    // change boundingBoxCacheFraction to see different memory consumption\n    // this would be germane for large number of models cache/memory contention\n    double boundingBoxCacheFraction = 1.0;\n    int dimensions = shingleSize * numberOfAttributes;\n\n    @Test\n    public void profileTestSync() {\n        double[] mse = new double[numberOfForests];\n        int[] mseCount = new int[numberOfForests];\n        double[] score = new double[numberOfForests];\n\n        double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(DATA_SIZE, 60, 100, 5, 0,\n                numberOfAttributes).data;\n\n        RandomCutForest[] forests = new RandomCutForest[numberOfForests];\n        for (int k = 0; k < numberOfForests; k++) {\n            forests[k] = RandomCutForest.builder().numberOfTrees(numberOfTrees).dimensions(dimensions)\n                    .shingleSize(shingleSize).boundingBoxCacheFraction(boundingBoxCacheFraction).randomSeed(99 + k)\n                    .outputAfter(10).parallelExecutionEnabled(true).threadPoolSize(numberOfThreads)\n                    .internalShinglingEnabled(true).initialAcceptFraction(0.1).sampleSize(sampleSize).build();\n        }\n\n        for (int j = 0; j < data.length; j++) {\n            for (int k = 0; k < numberOfForests; k++) {\n                score[k] += forests[k].getAnomalyScore(data[j]);\n                if (j % 10 == 0 && j > 0) {\n                    double[] result = forests[k].extrapolate(1);\n                    double sum = 0;\n                    for (int i = 0; i < result.length; i++) {\n                        double t = result[i] - data[j][i];\n                        sum += t * t;\n                    }\n                    sum = Math.sqrt(sum);\n                    mse[k] += sum;\n                    mseCount[k]++;\n                }\n                forests[k].update(data[j]);\n            }\n        }\n        for (int k = 0; k < numberOfForests; k++) {\n            System.out.println(\" Forest \" + k);\n            System.out.println(\" MSE \" + mse[k] / mseCount[k]);\n            System.out.println(\" scoresum \" + score[k] / data.length);\n        }\n    }\n\n    @Test\n    public void profileTestASync() {\n        double[] mse = new double[numberOfForests];\n        int[] mseCount = new int[numberOfForests];\n        double[] score = new double[numberOfForests];\n\n        double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(DATA_SIZE, 60, 100, 5, 0,\n                numberOfAttributes).data;\n\n        RandomCutForest[] forests = new RandomCutForest[numberOfForests];\n        for (int k = 0; k < numberOfForests; k++) {\n            forests[k] = RandomCutForest.builder().numberOfTrees(numberOfTrees).dimensions(dimensions)\n                    .shingleSize(shingleSize).boundingBoxCacheFraction(boundingBoxCacheFraction).randomSeed(99 + k)\n                    .outputAfter(10).parallelExecutionEnabled(false).internalShinglingEnabled(true)\n                    .initialAcceptFraction(0.1).sampleSize(sampleSize).build();\n        }\n\n        ForkJoinPool forkJoinPool = new ForkJoinPool(numberOfThreads);\n        int[] indices = new int[numberOfForests];\n        for (int k = 0; k < numberOfForests; k++) {\n            indices[k] = k;\n        }\n\n        for (int j = 0; j < data.length; j++) {\n            int finalJ = j;\n            forkJoinPool.submit(() -> Arrays.stream(indices).parallel().forEach(k -> {\n                score[k] += forests[k].getAnomalyScore(data[finalJ]);\n                if (finalJ % 10 == 0 && finalJ > 0) {\n                    double[] result = forests[k].extrapolate(1);\n                    double sum = 0;\n                    for (int i = 0; i < result.length; i++) {\n                        double t = result[i] - data[finalJ][i];\n                        sum += t * t;\n                    }\n                    sum = Math.sqrt(sum);\n                    mse[k] += sum;\n                    mseCount[k]++;\n                }\n                forests[k].update(data[finalJ]);\n            })).join();\n        }\n        for (int k = 0; k < numberOfForests; k++) {\n            System.out.println(\" Forest \" + k);\n            System.out.println(\" MSE \" + mse[k] / mseCount[k]);\n            System.out.println(\" scoresum \" + score[k] / data.length);\n        }\n    }\n\n}"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/ConditionalFieldTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class ConditionalFieldTest {\n\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n    private static RandomCutForest parallelExecutionForest;\n    private static RandomCutForest singleThreadedForest;\n    private static RandomCutForest forestSpy;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    @Test\n    public void SimpleTest() {\n\n        int newDimensions = 30;\n        randomSeed = 101;\n        sampleSize = 256;\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(sampleSize)\n                .dimensions(newDimensions).randomSeed(randomSeed).boundingBoxCacheFraction(0.0).build();\n\n        dataSize = 2000 + 5;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.0;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, 100);\n\n        for (int i = 0; i < 2000; i++) {\n            // shrink, shift at random\n            for (int j = 0; j < newDimensions; j++)\n                data[i][j] *= 0.01;\n            if (prg.nextDouble() < 0.5)\n                data[i][0] += 5.0;\n            else\n                data[i][0] -= 5.0;\n            newForest.update(data[i]);\n        }\n\n        float[] queryOne = new float[newDimensions];\n        float[] queryTwo = new float[newDimensions];\n        queryTwo[1] = 1;\n        SampleSummary summary = newForest.getConditionalFieldSummary(queryOne, new int[] { 0 }, 1, 0, true, false, 1,\n                1);\n\n        assert (summary.summaryPoints.length == 2);\n        assert (summary.relativeWeight.length == 2);\n        assert (Math.abs(summary.summaryPoints[0][0] - 5.0) < 0.01\n                || Math.abs(summary.summaryPoints[0][0] + 5.0) < 0.01);\n        assert (Math.abs(summary.summaryPoints[1][0] - 5.0) < 0.01\n                || Math.abs(summary.summaryPoints[1][0] + 5.0) < 0.01);\n        assert (summary.relativeWeight[0] > 0.25);\n        assert (summary.relativeWeight[1] > 0.25);\n        SampleSummary projectedSummaryOne = newForest.getConditionalFieldSummary(queryOne, new int[] { 0 }, 1, 0, false,\n                true, 1, 1);\n        assertTrue(projectedSummaryOne.summaryPoints == null);\n        assertTrue(projectedSummaryOne.mean.length == 1);\n        SampleSummary projectedSummaryTwo = newForest.getConditionalFieldSummary(queryOne, new int[] { 0 }, 1, 0, true,\n                true, 0, 1);\n        assertTrue(projectedSummaryTwo.summaryPoints != null);\n        assertTrue(projectedSummaryTwo.mean.length == 1);\n        SampleSummary projectedSummaryThree = newForest.getConditionalFieldSummary(queryOne, new int[] { 0 }, 1, 0,\n                false, false, 1, 3);\n        assertTrue(projectedSummaryThree.summaryPoints == null);\n        assertTrue(projectedSummaryThree.mean.length == newDimensions / 3);\n        SampleSummary projectedSummaryFour = newForest.getConditionalFieldSummary(queryOne, new int[] { 0 }, 1, 0, true,\n                false, 1, 4);\n        assertTrue(projectedSummaryFour.summaryPoints != null);\n        assertTrue(projectedSummaryFour.mean.length == newDimensions / 4);\n\n        summary = newForest.getConditionalFieldSummary(queryTwo, new int[] { 0 }, 1, 0, true, false, 1, 1);\n\n        assert (summary.summaryPoints.length == 2);\n        assert (summary.relativeWeight.length == 2);\n        assertEquals(summary.summaryPoints[0][1], 1, 1e-6);\n        assertEquals(summary.summaryPoints[1][1], 1, 1e-6);\n        assert (Math.abs(summary.summaryPoints[0][0] - 5.0) < 0.01\n                || Math.abs(summary.summaryPoints[0][0] + 5.0) < 0.01);\n        assert (Math.abs(summary.summaryPoints[1][0] - 5.0) < 0.01\n                || Math.abs(summary.summaryPoints[1][0] + 5.0) < 0.01);\n        assert (summary.relativeWeight[0] > 0.25);\n        assert (summary.relativeWeight[1] > 0.25);\n\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/DynamicPointSetFunctionalTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.generateFan;\nimport static java.lang.Math.PI;\nimport static java.lang.Math.cos;\nimport static java.lang.Math.sin;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.List;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\n\n@Tag(\"functional\")\npublic class DynamicPointSetFunctionalTest {\n\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n    private static RandomCutForest parallelExecutionForest;\n    private static RandomCutForest singleThreadedForest;\n    private static RandomCutForest forestSpy;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    static double[] rotateClockWise(double[] point, double theta) {\n        double[] result = new double[2];\n        result[0] = cos(theta) * point[0] + sin(theta) * point[1];\n        result[1] = -sin(theta) * point[0] + cos(theta) * point[1];\n        return result;\n    }\n\n    @Test\n    public void movingDensity() {\n        int newDimensions = 2;\n        randomSeed = 123;\n\n        RandomCutForest newForest = RandomCutForest.builder().dimensions(newDimensions).randomSeed(randomSeed)\n                .timeDecay(1.0 / 800).centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).build();\n\n        double[][] data = generateFan(1000, 3);\n\n        double[] queryPoint = new double[] { 0.7, 0 };\n        for (int degree = 0; degree < 360; degree += 2) {\n            for (int j = 0; j < data.length; j++) {\n                newForest.update(rotateClockWise(data[j], 2 * PI * degree / 360));\n            }\n            DensityOutput density = newForest.getSimpleDensity(queryPoint);\n            double value = density.getDensity(0.001, 2);\n            if ((degree <= 60) || ((degree >= 120) && (degree <= 180)) || ((degree >= 240) && (degree <= 300)))\n                assertTrue(value < 0.8); // the fan is above at 90,210,330\n\n            if (((degree >= 75) && (degree <= 105)) || ((degree >= 195) && (degree <= 225))\n                    || ((degree >= 315) && (degree <= 345)))\n                assertTrue(value > 0.5);\n            // fan is close by\n            // intentionally 0.5 is below 0.8 for a robust test\n\n            // Testing for directionality\n            // There can be unclear directionality when the\n            // blades are right above\n\n            double bladeAboveInY = density.getDirectionalDensity(0.001, 2).low[1];\n            double bladeBelowInY = density.getDirectionalDensity(0.001, 2).high[1];\n            double bladesToTheLeft = density.getDirectionalDensity(0.001, 2).high[0];\n            double bladesToTheRight = density.getDirectionalDensity(0.001, 2).low[0];\n\n            assertEquals(value, bladeAboveInY + bladeBelowInY + bladesToTheLeft + bladesToTheRight, 1E-6);\n\n            // the tests below have a freedom of 10% of the total value\n            if (((degree >= 75) && (degree <= 85)) || ((degree >= 195) && (degree <= 205))\n                    || ((degree >= 315) && (degree <= 325))) {\n                assertTrue(bladeAboveInY + 0.1 * value > bladeBelowInY);\n                assertTrue(bladeAboveInY + 0.1 * value > bladesToTheRight);\n            }\n\n            if (((degree >= 95) && (degree <= 105)) || ((degree >= 215) && (degree <= 225))\n                    || ((degree >= 335) && (degree <= 345))) {\n                assertTrue(bladeBelowInY + 0.1 * value > bladeAboveInY);\n                assertTrue(bladeBelowInY + 0.1 * value > bladesToTheRight);\n            }\n\n            if (((degree >= 60) && (degree <= 75)) || ((degree >= 180) && (degree <= 195))\n                    || ((degree >= 300) && (degree <= 315))) {\n                assertTrue(bladeAboveInY + 0.1 * value > bladesToTheLeft);\n                assertTrue(bladeAboveInY + 0.1 * value > bladesToTheRight);\n            }\n\n            if (((degree >= 105) && (degree <= 120)) || ((degree >= 225) && (degree <= 240)) || (degree >= 345)) {\n                assertTrue(bladeBelowInY + 0.1 * value > bladesToTheLeft);\n                assertTrue(bladeBelowInY + 0.1 * value > bladesToTheRight);\n            }\n\n            // fans are farthest to the left at 30,150 and 270\n            if (((degree >= 15) && (degree <= 45)) || ((degree >= 135) && (degree <= 165))\n                    || ((degree >= 255) && (degree <= 285))) {\n                assertTrue(bladesToTheLeft + 0.1 * value > bladeAboveInY + bladeBelowInY + bladesToTheRight);\n                assertTrue(bladeAboveInY + bladeBelowInY + 0.1 * value > bladesToTheRight);\n            }\n\n        }\n\n    }\n\n    @Test\n    public void movingNeighbors() {\n        int newDimensions = 2;\n        randomSeed = 123;\n\n        RandomCutForest newForest = RandomCutForest.builder().dimensions(newDimensions).randomSeed(randomSeed)\n                .timeDecay(1.0 / 800).centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).build();\n\n        double[][] data = generateFan(1000, 3);\n\n        double[] queryPoint = new double[] { 0.7, 0 };\n        for (int degree = 0; degree < 360; degree += 2) {\n            for (int j = 0; j < data.length; j++) {\n                newForest.update(rotateClockWise(data[j], 2 * PI * degree / 360));\n            }\n            List<Neighbor> ans = newForest.getNearNeighborsInSample(queryPoint, 1);\n            List<Neighbor> closeNeighBors = newForest.getNearNeighborsInSample(queryPoint, 0.1);\n            Neighbor best = null;\n            if (ans != null) {\n                best = ans.get(0);\n                for (int j = 1; j < ans.size(); j++) {\n                    assert (ans.get(j).distance >= best.distance);\n                }\n            }\n\n            // fan is away at 30, 150 and 270\n            if (((degree > 15) && (degree < 45)) || ((degree >= 135) && (degree <= 165))\n                    || ((degree >= 255) && (degree <= 285))) {\n                assertTrue(closeNeighBors.size() == 0); // no close neighbor\n                assertTrue(best.distance > 0.3);\n            }\n\n            // fan is overhead at 90, 210 and 330\n            if (((degree > 75) && (degree < 105)) || ((degree >= 195) && (degree <= 225))\n                    || ((degree >= 315) && (degree <= 345))) {\n                assertTrue(closeNeighBors.size() > 0);\n                assertEquals(closeNeighBors.get(0).distance, best.distance, 1E-10);\n            }\n\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/ForecastTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n@Tag(\"functional\")\npublic class ForecastTest {\n\n    @Test\n    public void basic() {\n\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        System.out.println(seed);\n\n        int length = 4 * sampleSize;\n        int outputAfter = 128;\n\n        RandomCutForest forest = new RandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                .precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize)\n                .outputAfter(outputAfter).build();\n\n        // as the ratio of amplitude (signal) to noise is changed, the estimation range\n        // in forecast\n        // (or any other inference) should increase\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,\n                baseDimensions);\n\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n        double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);\n\n        assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n        int horizon = 20;\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            // forecast first; change centrality to achieve a control over the sampling\n            // setting centrality = 0 would correspond to random sampling from the leaves\n            // reached by\n            // impute visitor\n            RangeVector forecast = forest.extrapolateFromShingle(forest.lastShingledPoint(), horizon, 1, 1.0);\n            assert (forecast.values.length == horizon);\n            for (int i = 0; i < horizon; i++) {\n                // check ranges\n                assert (forecast.values[i] >= forecast.lower[i]);\n                assert (forecast.values[i] <= forecast.upper[i]);\n                // compute errors\n                if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {\n                    double t = dataWithKeys.data[j + i][0] - forecast.values[i];\n                    error[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.lower[i];\n                    lowerError[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.upper[i];\n                    upperError[i] += t * t;\n                }\n            }\n            forest.update(dataWithKeys.data[j]);\n        }\n\n        System.out.println(\"RMSE \");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower \");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper \");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/MultiCenterTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static java.lang.Math.min;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.MethodSource;\n\nimport com.amazon.randomcutforest.summarization.GenericMultiCenter;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.MultiCenter;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\n@Tag(\"functional\")\npublic class MultiCenterTest {\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    @Test\n    public void constructorTest() {\n        assertThrows(IllegalArgumentException.class, () -> MultiCenter.initialize(new float[4], 0, -1.0, 1));\n        assertThrows(IllegalArgumentException.class, () -> MultiCenter.initialize(new float[4], 0, 2.0, 1));\n        assertThrows(IllegalArgumentException.class, () -> MultiCenter.initialize(new float[4], 0, 1.0, 0));\n        assertThrows(IllegalArgumentException.class, () -> MultiCenter.initialize(new float[4], 0, 1.0, 1000));\n        assertThrows(IllegalArgumentException.class, () -> GenericMultiCenter.initialize(new float[4], 0, -1.0, 1));\n        assertThrows(IllegalArgumentException.class, () -> GenericMultiCenter.initialize(new float[4], 0, 2.0, 1));\n        assertThrows(IllegalArgumentException.class, () -> GenericMultiCenter.initialize(new float[4], 0, 1.0, 0));\n        assertThrows(IllegalArgumentException.class, () -> GenericMultiCenter.initialize(new float[4], 0, 1.0, 1000));\n    }\n\n    @Test\n    public void initializationTest() {\n        GenericMultiCenter genericMultiCenter = GenericMultiCenter.initialize(new float[4], 0, 0.5, 1);\n        MultiCenter multiCenter = MultiCenter.initialize(new float[4], 0, 0.5, 1);\n        List<Weighted<Integer>> a = new ArrayList<>();\n        assertEquals(multiCenter.getAssignedPoints().getClass(), a.getClass());\n        assertEquals(genericMultiCenter.getAssignedPoints(), Collections.emptyList());\n        assertEquals(genericMultiCenter.averageRadius(), 0);\n        assertEquals(genericMultiCenter.extentMeasure(), 0);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"generateArguments\")\n    public void SummaryTest(BiFunction<float[], float[], Double> distance) {\n\n        int over = 0;\n        int under = 0;\n\n        for (int numTrials = 0; numTrials < 10; numTrials++) {\n            long seed = new Random().nextLong();\n            Random random = new Random(seed);\n            int newDimensions = random.nextInt(10) + 3;\n            dataSize = 200000;\n\n            float[][] points = getData(dataSize, newDimensions, random.nextInt(), distance);\n\n            List<ICluster<float[]>> summary = Summarizer.multiSummarize(points, 5 * newDimensions, 10 * newDimensions,\n                    1, false, 0.8, distance, random.nextInt(), false, random.nextDouble(), 1);\n            System.out.println(\"trial \" + numTrials + \" : \" + summary.size() + \" clusters for \" + newDimensions\n                    + \" dimensions, seed : \" + seed);\n            if (summary.size() < 2 * newDimensions) {\n                ++under;\n            } else if (summary.size() > 2 * newDimensions) {\n                ++over;\n            }\n        }\n        assert (under <= 1);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"generateArguments\")\n    public void MultiSummaryTestGeneric(BiFunction<float[], float[], Double> distance) {\n\n        int over = 0;\n        int under = 0;\n\n        for (int numTrials = 0; numTrials < 10; numTrials++) {\n            long seed = new Random().nextLong();\n            Random random = new Random(seed);\n            int newDimensions = random.nextInt(10) + 3;\n            dataSize = 200000;\n\n            float[][] points = getData(dataSize, newDimensions, random.nextInt(), distance);\n\n            List<ICluster<float[]>> summary = Summarizer.multiSummarize(points, 5 * newDimensions, 10 * newDimensions,\n                    1, false, 0.8, distance, random.nextInt(), false, random.nextDouble(), 5);\n            System.out.println(\"trial \" + numTrials + \" : \" + summary.size() + \" clusters for \" + newDimensions\n                    + \" dimensions, seed : \" + seed);\n            if (summary.size() < 2 * newDimensions) {\n                ++under;\n            } else if (summary.size() > 2 * newDimensions) {\n                ++over;\n            }\n        }\n        assert (under <= 1);\n    }\n\n    @Test\n    public void MultiSummaryTest() {\n\n        int over = 0;\n        int under = 0;\n\n        for (int numTrials = 0; numTrials < 10; numTrials++) {\n            long seed = new Random().nextLong();\n            Random random = new Random(seed);\n            int newDimensions = random.nextInt(10) + 3;\n            dataSize = 200000;\n\n            float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n\n            List<ICluster<float[]>> summary = Summarizer.multiSummarize(points, 5 * newDimensions, 0.9, true, 1, seed);\n            System.out.println(\"trial \" + numTrials + \" : \" + summary.size() + \" clusters for \" + newDimensions\n                    + \" dimensions, seed : \" + seed);\n            if (summary.size() < 2 * newDimensions) {\n                ++under;\n            } else if (summary.size() > 2 * newDimensions) {\n                ++over;\n            }\n        }\n        assert (under <= 1);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"generateArguments\")\n    public void ParallelTest(BiFunction<float[], float[], Double> distance) {\n\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), distance);\n        System.out.println(\"checking parallelEnabled seed : \" + seed);\n        int nextSeed = random.nextInt();\n        // these can differ for shinkage != 0 due to floating point issues\n        List<ICluster<float[]>> summary1 = Summarizer.multiSummarize(points, 5 * newDimensions, 10 * newDimensions, 1,\n                false, 0.8, distance, nextSeed, false, 0, 5);\n        ArrayList<float[]> list = new ArrayList<>();\n        for (float[] point : points) {\n            list.add(point);\n        }\n        List<ICluster<float[]>> summary2 = Summarizer.multiSummarize(list, 5 * newDimensions, 10 * newDimensions, 1,\n                false, 0.8, distance, nextSeed, true, 0, 5);\n\n        assertEquals(summary2.size(), summary1.size(), \" incorrect number of clusters\");\n        for (int i = 0; i < summary2.size(); i++) {\n            assertEquals(summary1.get(i).getWeight(), summary2.get(i).getWeight(), 1e-6);\n            assertEquals(summary1.get(i).extentMeasure(), summary2.get(i).extentMeasure(), 1e-6);\n            List<Weighted<float[]>> reps1 = summary1.get(i).getRepresentatives();\n            List<Weighted<float[]>> reps2 = summary2.get(i).getRepresentatives();\n            assertEquals(reps1.size(), reps2.size());\n            for (int j = 0; j < reps1.size(); j++) {\n                assertEquals(reps1.get(j).weight, reps2.get(j).weight, 1e-6);\n                assertArrayEquals(reps1.get(j).index, reps2.get(j).index, 1e-6f);\n            }\n        }\n\n    }\n\n    @Test\n    public void StringTest() {\n\n        long seed = new Random().nextLong();\n        System.out.println(\"checking String summarization seed : \" + seed);\n        Random random = new Random(seed);\n        int size = 100;\n        int numberOfStrings = 20000;\n\n        String[] points = new String[numberOfStrings];\n        for (int i = 0; i < numberOfStrings; i++) {\n            if (random.nextDouble() < 0.5) {\n                points[i] = getABString(size, 0.8, random);\n            } else {\n                points[i] = getABString(size, 0.2, random);\n            }\n        }\n\n        int nextSeed = random.nextInt();\n\n        List<ICluster<String>> summary = Summarizer.multiSummarize(points, 5, 10, 1, false, 0.8,\n                MultiCenterTest::toyDistance, nextSeed, false, 0.1, 5);\n        System.out.println();\n        assertEquals(summary.size(), 2);\n    }\n\n    public static double toyDistance(String a, String b) {\n        if (a.length() > b.length()) {\n            return toyDistance(b, a);\n        }\n        double[][] dist = new double[2][b.length() + 1];\n        for (int j = 0; j < b.length() + 1; j++) {\n            dist[0][j] = j;\n        }\n\n        for (int i = 1; i < a.length() + 1; i++) {\n            dist[1][0] = i;\n            for (int j = 1; j < b.length() + 1; j++) {\n                double t = dist[0][j - 1] + ((a.charAt(i - 1) == b.charAt(j - 1)) ? 0 : 1);\n                dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);\n            }\n            for (int j = 0; j < b.length() + 1; j++) {\n                dist[0][j] = dist[1][j];\n            }\n        }\n        return dist[1][b.length()];\n    }\n\n    public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.0;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n        float[][] floatData = new float[dataSize][];\n\n        float[] allZero = new float[newDimensions];\n        float[] sigma = new float[newDimensions];\n        Arrays.fill(sigma, 1f);\n        double scale = distance.apply(allZero, sigma);\n\n        for (int i = 0; i < dataSize; i++) {\n            // shrink, shift at random\n            int nextD = prg.nextInt(newDimensions);\n            for (int j = 0; j < newDimensions; j++) {\n                data[i][j] *= 1.0 / (3.0);\n                // standard deviation adds up across dimension; taking square root\n                // and using s 3 sigma ball\n                if (j == nextD) {\n                    if (prg.nextDouble() < 0.5)\n                        data[i][j] += 2.0 * scale;\n                    else\n                        data[i][j] -= 2.0 * scale;\n                }\n            }\n            floatData[i] = toFloatArray(data[i]);\n        }\n\n        return floatData;\n    }\n\n    public String getABString(int size, double probabilityOfA, Random random) {\n        StringBuilder stringBuilder = new StringBuilder();\n        int newSize = size + random.nextInt(size / 5);\n        for (int i = 0; i < newSize; i++) {\n            if (random.nextDouble() < probabilityOfA) {\n                stringBuilder.append(\"-\");\n            } else {\n                stringBuilder.append(\"_\");\n            }\n        }\n        return stringBuilder.toString();\n    }\n\n    private static Stream<Arguments> generateArguments() {\n        return Stream.of(Arguments.of((BiFunction<float[], float[], Double>) Summarizer::L1distance),\n                Arguments.of((BiFunction<float[], float[], Double>) Summarizer::L2distance));\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/PredictiveRandomCutForestTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.config.ForestMode.STANDARD;\nimport static com.amazon.randomcutforest.config.ForestMode.TIME_AUGMENTED;\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.NEXT;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.config.ImputationMethod.ZERO;\nimport static com.amazon.randomcutforest.config.TransformMethod.NONE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.state.PredictiveRandomCutForestMapper;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData.NormalDistribution;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class PredictiveRandomCutForestTest {\n\n    @Test\n    public void testConfig() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        // have to enable internal shingling or keep it unspecified\n        assertDoesNotThrow(\n                () -> PredictiveRandomCutForest.builder().sampleSize(sampleSize).inputDimensions(baseDimensions)\n                        .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).shingleSize(shingleSize).build());\n\n        PredictiveRandomCutForest forest = PredictiveRandomCutForest.builder().sampleSize(sampleSize)\n                .inputDimensions(baseDimensions).randomSeed(seed).startNormalization(1)\n                .forestMode(ForestMode.TIME_AUGMENTED).shingleSize(shingleSize).build();\n        assertNotNull(((Preprocessor) forest.getPreprocessor()).getInitialTimeStamps());\n        assertEquals(forest.getForest().getDimensions(), (baseDimensions + 1) * shingleSize);\n        assertThrows(IllegalArgumentException.class,\n                () -> PredictiveRandomCutForest.builder().inputDimensions(baseDimensions).randomSeed(seed)\n                        .forestMode(STANDARD).weights(new double[] { -1.0, 0.0 }).shingleSize(shingleSize).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> PredictiveRandomCutForest.builder().inputDimensions(baseDimensions).randomSeed(seed)\n                        .forestMode(STANDARD).startNormalization(-10).shingleSize(shingleSize).threadPoolSize(1)\n                        .build());\n        assertThrows(IllegalArgumentException.class,\n                () -> PredictiveRandomCutForest.builder().inputDimensions(baseDimensions).randomSeed(seed)\n                        .forestMode(STANDARD).outputAfter(1).startNormalization(shingleSize + 10)\n                        .shingleSize(shingleSize).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> PredictiveRandomCutForest.builder().inputDimensions(baseDimensions).randomSeed(seed)\n                        .forestMode(STANDARD).shingleSize(shingleSize).transformMethod(NORMALIZE)\n                        .startNormalization(111).stopNormalization(100).build());\n    }\n\n    public void simpleExample(int dataSize, TransformMethod method, ForestMode mode, double error) {\n        int shingleSize = 1;\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n\n        // 5 dimensions, three are known and 4,5 th unknown (and stochastic)\n        int baseDimensions = 5;\n\n        PredictiveRandomCutForest forest = new PredictiveRandomCutForest.Builder<>().inputDimensions(baseDimensions)\n                .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .forestMode(mode).startNormalization(32).transformMethod(method).build();\n\n        long seed = 17;\n\n        NormalDistribution normal = new NormalDistribution(new Random(seed));\n        double total = 0;\n        double extTotal = 0;\n        Random random = new Random(seed + 10);\n        for (int i = 0; i < dataSize; i++) {\n            float[] record = generateRecordKey(random);\n            checkArgument(record[3] == 0, \" should not be filled\");\n            checkArgument(record[4] == 0, \" should not be filled\");\n\n            SampleSummary answer = forest.predict(record, 0, new int[] { 3, 4 });\n            assertEquals(answer.summaryPoints.length, answer.measure.length);\n            fillInValues(record, random, normal);\n            forest.update(record, 0);\n            double tag = Double.MAX_VALUE;\n            double ext = Double.MAX_VALUE;\n            for (int y = 0; y < answer.summaryPoints.length; y++) {\n                double t = Summarizer.L2distance(record, answer.summaryPoints[y]);\n                double u = Summarizer.L2distance(new float[5], answer.measure[y]);\n                if (t < tag) {\n                    tag = t;\n                    ext = u;\n                }\n            }\n\n            if (i > forest.forest.getOutputAfter()) {\n                total += tag;\n                extTotal += ext;\n            }\n        }\n\n        assertTrue(5 * error > total / (dataSize - forest.getForest().getOutputAfter()));\n        assertTrue(5 * error > extTotal / (dataSize - forest.getForest().getOutputAfter()));\n\n        PredictiveRandomCutForestMapper mapper = new PredictiveRandomCutForestMapper();\n        PredictiveRandomCutForest second = mapper.toModel(mapper.toState(forest));\n        assertArrayEquals(second.preprocessor.getLastShingledPoint(), forest.preprocessor.getLastShingledPoint(),\n                1e-10f);\n    }\n\n    @Test\n    public void configTest() {\n        simpleExample(1000, NORMALIZE, STANDARD, 2);\n        simpleExample(1000, NORMALIZE, TIME_AUGMENTED, 2);\n        simpleExample(1000, NONE, STANDARD, 2);\n        simpleExample(1000, NONE, TIME_AUGMENTED, 2);\n    }\n\n    float[] generateRecordKey(Random random) {\n        float[] record = new float[5];\n        double firstToss = random.nextDouble();\n        double secondToss = random.nextDouble();\n        double thirdToss = random.nextDouble();\n        if (firstToss < 0.8) {\n            record[0] = 1.0f;\n            if (secondToss < 0.8) {\n                record[1] = 19;\n            } else {\n                record[1] = 25;\n            }\n            record[2] = (float) thirdToss * 10;\n        } else {\n            record[0] = 0.0f;\n            if (secondToss < 0.3) {\n                record[1] = 16;\n                record[2] = 12;\n            } else {\n                record[1] = 20;\n                record[2] = 4;\n            }\n        }\n        return record;\n    }\n\n    void fillInValues(float[] record, Random random, NormalDistribution normal) {\n        if (record[0] < 0.5) {\n            double next = random.nextDouble();\n            record[3] = (float) ((next < 0.5) ? normal.nextDouble(20, 5) : normal.nextDouble(40, 5));\n            record[4] = (float) normal.nextDouble(30, 3);\n        } else {\n            if (record[1] < 20) {\n                record[3] = (float) normal.nextDouble(30, 10);\n                record[4] = (float) normal.nextDouble(10, 3);\n            } else {\n                if (record[2] < 6) {\n                    double next = random.nextDouble();\n                    record[3] = (float) ((next < 0.3) ? normal.nextDouble(20, 5) : normal.nextDouble(40, 3));\n                    record[4] = (float) normal.nextDouble(50, 1);\n                } else {\n                    double next = random.nextDouble();\n                    record[3] = (float) normal.nextDouble(30, 1);\n                    record[4] = (float) ((next < 0.7) ? normal.nextDouble(10, 3) : normal.nextDouble(30, 5));\n                }\n            }\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(ImputationMethod.class)\n    void testImpute(ImputationMethod method) {\n        int baseDimensions = 1;\n\n        // long seed = new Random().nextLong();\n\n        // shingle size 1 ie not useful for impute\n        assertThrows(IllegalArgumentException.class, () -> {\n            PredictiveRandomCutForest forest = PredictiveRandomCutForest.builder().inputDimensions(baseDimensions)\n                    .randomSeed(0).forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(method).shingleSize(1)\n                    .build();\n        });\n\n        int newShingleSize = 4;\n\n        PredictiveRandomCutForest forest = PredictiveRandomCutForest.builder().inputDimensions(baseDimensions)\n                .randomSeed(42).forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(method)\n                .transformMethod(NORMALIZE).storeSequenceIndexesEnabled(true).shingleSize(newShingleSize)\n                .useImputedFraction(0.76).fillValues(new double[] { 0 }).build();\n\n        float[] fixedData = new float[] { 1.0f };\n        float[] newData = new float[] { 10.0f };\n        float[] negativeData = new float[] { -10.0f };\n        Random random = new Random(0);\n        int count = 0;\n        for (int i = 0; i < 200 + new Random().nextInt(100); i++) {\n            long timeStamp = (long) count * 113 + random.nextInt(10);\n            float[] test = (random.nextDouble() < 0.5) ? newData : negativeData;\n            double scoreA = forest.getExpectedInverseDepthScore(test, timeStamp);\n            assertTrue(scoreA == 0.0 || scoreA > 2.0);\n            double scoreB = forest.getExpectedInverseDepthAttribution(test, timeStamp).getHighLowSum();\n            assertEquals(scoreA, scoreB, 1e-6);\n            double scoreC = forest.getRCFDistanceAttribution(test, timeStamp).getHighLowSum();\n            assertTrue(scoreC == 0.0 || scoreC > 8.0);\n            if (i != 20 && random.nextDouble() < 0.9) {\n                // few drops -- and definitely one during normalization\n                forest.update(fixedData, timeStamp);\n            } else {\n                // note that the large should be imputed away\n                forest.update(test, timeStamp, new int[] { 0 });\n            }\n            ++count;\n        }\n\n        long timestamp = (long) count * 113 + 1000;\n        double score = forest.getExpectedInverseDepthScore(newData, timestamp);\n        assertEquals(score, forest.getExpectedInverseDepthAttribution(newData, timestamp).getHighLowSum(), 1e-6);\n        assertTrue(score > 1.0);\n        if (method != NEXT && method != ZERO && method != FIXED_VALUES) {\n            if (method == RCF) {\n                SampleSummary summary = forest.predict(newData, timestamp, new int[] { 0 });\n                assertArrayEquals(summary.summaryPoints[0], fixedData, 1e-6f);\n            }\n        }\n        assertEquals(forest.getForest().getTotalUpdates(), count);\n        // the next gap is 1226 + 113 which is about 11 times 113\n        long newstamp = (long) count * 113 + 1226;\n        assertEquals(11, forest.preprocessor.numberOfImputes(newstamp));\n        forest.update(newData, newstamp);\n\n        // time has to increase for streamingImpute\n        assertThrows(IllegalArgumentException.class, () -> {\n            forest.update(newData, newstamp - 1);\n        });\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void timeAugmentedTest(TransformMethod transformMethod) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n\n        int numTrials = 1; // test is exact equality, reducing the number of trials\n        int numberOfTrees = 30; // and using fewer trees to speed up test\n        int length = 10 * sampleSize;\n        int dataSize = 2 * length;\n        for (int i = 0; i < numTrials; i++) {\n            long seed = new Random().nextLong();\n            System.out.println(\"seed = \" + seed);\n\n            PredictiveRandomCutForest first = PredictiveRandomCutForest.builder().inputDimensions(baseDimensions)\n                    .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                    .forestMode(ForestMode.STANDARD).transformMethod(transformMethod).outputAfter(32)\n                    .initialAcceptFraction(0.125).build();\n            PredictiveRandomCutForest second = PredictiveRandomCutForest.builder().inputDimensions(baseDimensions)\n                    .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                    .forestMode(ForestMode.TIME_AUGMENTED).weightTime(0).transformMethod(transformMethod)\n                    .outputAfter(32).initialAcceptFraction(0.125).build();\n\n            Random noise = new Random(0);\n\n            // change the last argument seed for a different run\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1,\n                    50, 100, 5, seed, baseDimensions);\n\n            int count = 0;\n            for (int j = 0; j < length; j++) {\n\n                long timestamp = 100 * count + noise.nextInt(10) - 5;\n                assertEquals(first.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp),\n                        second.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp));\n                first.update(toFloatArray(dataWithKeys.data[j]), timestamp);\n                second.update(toFloatArray(dataWithKeys.data[j]), timestamp);\n                // grade will not be the same because dimension changes\n                ++count;\n            }\n\n            PredictiveRandomCutForestMapper mapper = new PredictiveRandomCutForestMapper();\n            PredictiveRandomCutForest third = mapper.toModel(mapper.toState(second));\n            for (int j = length; j < 2 * length; j++) {\n\n                // can be a different gap\n                long timestamp = 150 * count + noise.nextInt(10) - 5;\n                assertEquals(first.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp),\n                        second.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp));\n                assertEquals(first.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp),\n                        third.getExpectedInverseDepthScore(toFloatArray(dataWithKeys.data[j]), timestamp));\n                first.update(toFloatArray(dataWithKeys.data[j]), timestamp);\n                second.update(toFloatArray(dataWithKeys.data[j]), timestamp);\n                third.update(toFloatArray(dataWithKeys.data[j]), timestamp);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/RandomCutForestBuilderTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.validateInternalState;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.store.PointStore;\n\npublic class RandomCutForestBuilderTest {\n\n    private int numberOfTrees;\n    private int sampleSize;\n    private int outputAfter;\n    private int dimensions;\n    private double lambda;\n    private long randomSeed;\n    private int threadPoolSize;\n    private RandomCutForest forest;\n\n    public static final int DEFAULT_OUTPUT_AFTER_FRACTION = 4;\n\n    @BeforeEach\n    public void setUp() {\n\n        numberOfTrees = 99;\n        sampleSize = 201;\n        outputAfter = 201 / 5;\n        dimensions = 2;\n        lambda = 0.12;\n        randomSeed = 12345;\n        threadPoolSize = 9;\n\n        forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize).outputAfter(outputAfter)\n                .dimensions(dimensions).timeDecay(lambda).randomSeed(randomSeed).storeSequenceIndexesEnabled(true)\n                .centerOfMassEnabled(true).parallelExecutionEnabled(true).threadPoolSize(threadPoolSize).build();\n    }\n\n    @Test\n    public void testForestBuilderWithCustomArguments() {\n        assertEquals(numberOfTrees, forest.getNumberOfTrees());\n        assertEquals(sampleSize, forest.getSampleSize());\n        assertEquals(outputAfter, forest.getOutputAfter());\n        assertEquals(dimensions, forest.getDimensions());\n        assertEquals(lambda, forest.getTimeDecay());\n        assertTrue(forest.isStoreSequenceIndexesEnabled());\n        assertTrue(forest.isCenterOfMassEnabled());\n        assertTrue(forest.isParallelExecutionEnabled());\n        assertEquals(threadPoolSize, forest.getThreadPoolSize());\n    }\n\n    @Test\n    public void testDefaultForestWithDimensionArgument() {\n        RandomCutForest f = RandomCutForest.defaultForest(10);\n        assertEquals(10, f.getDimensions());\n        assertEquals(256, f.getSampleSize());\n        assertEquals(256 / DEFAULT_OUTPUT_AFTER_FRACTION, f.getOutputAfter());\n        assertFalse(f.isStoreSequenceIndexesEnabled());\n        assertFalse(f.isCenterOfMassEnabled());\n        assertFalse(f.isParallelExecutionEnabled());\n        assertEquals(0, f.getThreadPoolSize());\n    }\n\n    @Test\n    public void testDefaultForestWithDimensionAndRandomSeedArguments() {\n        RandomCutForest f = RandomCutForest.defaultForest(11, 123);\n        assertEquals(11, f.getDimensions());\n        assertEquals(256, f.getSampleSize());\n        assertEquals(256 / DEFAULT_OUTPUT_AFTER_FRACTION, f.getOutputAfter());\n        assertFalse(f.isStoreSequenceIndexesEnabled());\n        assertFalse(f.isCenterOfMassEnabled());\n        assertFalse(f.isParallelExecutionEnabled());\n        assertEquals(0, f.getThreadPoolSize());\n    }\n\n    @Test\n    public void testDefaultForestWithCustomOutputAfterArgument() {\n        RandomCutForest f = RandomCutForest.defaultForest(10);\n        assertEquals(10, f.getDimensions());\n        assertEquals(256, f.getSampleSize());\n        assertEquals(256 / DEFAULT_OUTPUT_AFTER_FRACTION, f.getOutputAfter());\n        assertFalse(f.isStoreSequenceIndexesEnabled());\n        assertFalse(f.isCenterOfMassEnabled());\n        assertFalse(f.isParallelExecutionEnabled());\n        assertEquals(0, f.getThreadPoolSize());\n    }\n\n    @Test\n    public void testForestBuilderWithDefaultParallelExecutionThreadPoolSize() {\n        RandomCutForest forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .outputAfter(outputAfter).dimensions(dimensions).timeDecay(lambda).randomSeed(randomSeed)\n                .storeSequenceIndexesEnabled(true).centerOfMassEnabled(true).parallelExecutionEnabled(true).build();\n        assertEquals(numberOfTrees, forest.getNumberOfTrees());\n        assertEquals(sampleSize, forest.getSampleSize());\n        assertEquals(outputAfter, forest.getOutputAfter());\n        assertEquals(dimensions, forest.getDimensions());\n        assertEquals(lambda, forest.getTimeDecay());\n        assertTrue(forest.isStoreSequenceIndexesEnabled());\n        assertTrue(forest.isCenterOfMassEnabled());\n        assertTrue(forest.isParallelExecutionEnabled());\n        assertEquals(Runtime.getRuntime().availableProcessors() - 1, forest.getThreadPoolSize());\n    }\n\n    @Test\n    public void testForestBuilderWithDefaultLambdaValue() {\n        RandomCutForest forest = RandomCutForest.builder().dimensions(4).sampleSize(sampleSize).build();\n        assertEquals(1.0 / (RandomCutForest.DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY * sampleSize),\n                forest.getTimeDecay());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenNumberOfTreesIsZero() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(0)\n                .sampleSize(sampleSize).dimensions(dimensions).timeDecay(lambda).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenSampleSizeIsZero() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(numberOfTrees)\n                .sampleSize(0).dimensions(dimensions).timeDecay(lambda).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenOutputAfterIsNegative() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).outputAfter(-10).dimensions(dimensions).timeDecay(lambda).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsNotThrownWhenOutputAfterIsGreaterThanSample() {\n        assertDoesNotThrow(() -> RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .outputAfter(sampleSize + 1).dimensions(dimensions).timeDecay(lambda).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenDimensionIsNotProvided() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).timeDecay(lambda).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenLambdaIsNegative() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).dimensions(dimensions).timeDecay(-0.1).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenPoolSizeIsZero() {\n        assertThrows(IllegalArgumentException.class,\n                () -> RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                        .dimensions(dimensions).threadPoolSize(0).parallelExecutionEnabled(true).build());\n    }\n\n    @Test\n    public void testIllegalExceptionIsThrownWhenPoolSizeIsNegative() {\n        assertThrows(IllegalArgumentException.class, () -> RandomCutForest.builder().numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).dimensions(dimensions).threadPoolSize(-10).build());\n    }\n\n    @Test\n    public void testPoolSizeIsZeroWhenParallelExecutionIsDisabled() {\n        RandomCutForest f = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(dimensions).parallelExecutionEnabled(false).build();\n\n        assertFalse(f.isParallelExecutionEnabled());\n        assertEquals(0, f.getThreadPoolSize());\n    }\n\n    @Test\n    public void testShingleSize() {\n        assertThrows(IllegalArgumentException.class,\n                () -> RandomCutForest.builder().dimensions(dimensions).shingleSize(3).build());\n    }\n\n    @Test\n    public void testCache() {\n        assertThrows(IllegalArgumentException.class,\n                () -> RandomCutForest.builder().dimensions(dimensions).boundingBoxCacheFraction(-1).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> RandomCutForest.builder().dimensions(dimensions).boundingBoxCacheFraction(2).build());\n    }\n\n    @Test\n    public void initalPointStore() {\n        assertThrows(IllegalArgumentException.class,\n                () -> RandomCutForest.builder().dimensions(1).initialPointStoreSize(-1).build());\n        RandomCutForest f = RandomCutForest.builder().dimensions(1).numberOfTrees(1).initialPointStoreSize(10)\n                .dynamicResizingEnabled(true).build();\n        assertEquals(((PointStore) f.stateCoordinator.getStore()).getCapacity(), 512);\n        assertEquals(((PointStore) f.stateCoordinator.getStore()).getCurrentStoreCapacity(), 10);\n        for (int i = 0; i < 1000; i++) {\n            f.update(new double[] { new Random().nextDouble() });\n        }\n        assertThrows(IllegalStateException.class, () -> validateInternalState(false, \"message\"));\n        validateInternalState(((PointStore) f.stateCoordinator.getStore()).getCurrentStoreCapacity() > 10, \"error\");\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/RandomCutForestConsistencyFunctionalTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n/**\n * This class validates that forests configured with different execution modes\n * (sequential or parallel) or different internal data representations are\n * executing the algorithm steps in the same way.\n */\n@Tag(\"functional\")\npublic class RandomCutForestConsistencyFunctionalTest {\n\n    private int dimensions = 5;\n    private int sampleSize = 128;\n    private long randomSeed = 123L;\n    private int testSize = 2048;\n\n    @Test\n    public void testConsistentScoring() {\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().dimensions(dimensions).sampleSize(sampleSize)\n                .randomSeed(randomSeed);\n\n        RandomCutForest pointerCachedSequential = builder.compact(false).boundingBoxCacheFraction(1.0)\n                .parallelExecutionEnabled(false).build();\n        RandomCutForest pointerCachedParallel = builder.compact(false).boundingBoxCacheFraction(1.0)\n                .parallelExecutionEnabled(true).build();\n        RandomCutForest pointerCachedRandomSequential = builder.compact(false)\n                .boundingBoxCacheFraction(new Random().nextDouble()).parallelExecutionEnabled(false).build();\n        RandomCutForest pointerCachedRandomParallel = builder.compact(false)\n                .boundingBoxCacheFraction(new Random().nextDouble()).parallelExecutionEnabled(true).build();\n        RandomCutForest pointerUncachedSequential = builder.compact(false).boundingBoxCacheFraction(0.0)\n                .parallelExecutionEnabled(false).build();\n        RandomCutForest pointerUncachedParallel = builder.compact(false).boundingBoxCacheFraction(0.0)\n                .parallelExecutionEnabled(true).build();\n        RandomCutForest compactCachedSequential = builder.compact(true).boundingBoxCacheFraction(1.0)\n                .parallelExecutionEnabled(false).build();\n        RandomCutForest compactCachedParallel = builder.compact(true).boundingBoxCacheFraction(1.0)\n                .parallelExecutionEnabled(true).build();\n        RandomCutForest compactUncachedSequential = builder.compact(true).boundingBoxCacheFraction(0.0)\n                .parallelExecutionEnabled(false).build();\n        RandomCutForest compactUncachedParallel = builder.compact(true).boundingBoxCacheFraction(0.0)\n                .parallelExecutionEnabled(true).build();\n        RandomCutForest compactCachedRandomSequential = builder.compact(true)\n                .boundingBoxCacheFraction(new Random().nextDouble()).parallelExecutionEnabled(false).build();\n        RandomCutForest compactCachedRandomParallel = builder.compact(true)\n                .boundingBoxCacheFraction(new Random().nextDouble()).parallelExecutionEnabled(true).build();\n\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        double delta = 1e-10;\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions, 99)) {\n            double score = pointerCachedSequential.getAnomalyScore(point);\n\n            if (score > 0) {\n                anomalies++;\n            }\n\n            assertEquals(score, pointerCachedParallel.getAnomalyScore(point), delta);\n            assertEquals(score, pointerUncachedSequential.getAnomalyScore(point), delta);\n            assertEquals(score, pointerUncachedParallel.getAnomalyScore(point), delta);\n            assertEquals(score, compactCachedSequential.getAnomalyScore(point), delta);\n            assertEquals(score, compactCachedParallel.getAnomalyScore(point), delta);\n            assertEquals(score, compactUncachedSequential.getAnomalyScore(point), delta);\n            assertEquals(score, compactUncachedParallel.getAnomalyScore(point), delta);\n            assertEquals(score, pointerCachedRandomSequential.getAnomalyScore(point), delta);\n            assertEquals(score, pointerCachedRandomParallel.getAnomalyScore(point), delta);\n            assertEquals(score, compactCachedRandomSequential.getAnomalyScore(point), delta);\n            assertEquals(score, compactCachedRandomParallel.getAnomalyScore(point), delta);\n\n            pointerCachedSequential.update(point);\n            pointerCachedParallel.update(point);\n            pointerUncachedSequential.update(point);\n            pointerUncachedParallel.update(point);\n            pointerCachedRandomSequential.update(point);\n            pointerCachedRandomParallel.update(point);\n            compactCachedSequential.update(point);\n            compactCachedParallel.update(point);\n            compactUncachedSequential.update(point);\n            compactUncachedParallel.update(point);\n            compactCachedRandomSequential.update(point);\n            compactCachedRandomParallel.update(point);\n        }\n\n        // verify that the test is nontrivial\n        assertTrue(anomalies > 0);\n    }\n\n    @Test\n    public void testConsistentScoringSinglePrecision() {\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().dimensions(dimensions).sampleSize(sampleSize)\n                .randomSeed(randomSeed).parallelExecutionEnabled(false).compact(true);\n\n        RandomCutForest compactFloatCached = builder.boundingBoxCacheFraction(1.0).precision(Precision.FLOAT_32)\n                .build();\n        RandomCutForest compactFloatCachedParallel = builder.boundingBoxCacheFraction(1.0).precision(Precision.FLOAT_32)\n                .parallelExecutionEnabled(true).build();\n        RandomCutForest compactFloatUncached = builder.boundingBoxCacheFraction(0.0).precision(Precision.FLOAT_32)\n                .build();\n        RandomCutForest compactFloatCachedRandom = builder.boundingBoxCacheFraction(new Random().nextDouble())\n                .precision(Precision.FLOAT_32).build();\n        RandomCutForest compactFloatCachedRandomParallel = builder.boundingBoxCacheFraction(new Random().nextDouble())\n                .precision(Precision.FLOAT_32).parallelExecutionEnabled(true).build();\n        RandomCutForest compactFloatUncachedParallel = builder.boundingBoxCacheFraction(0.0)\n                .precision(Precision.FLOAT_32).parallelExecutionEnabled(true).build();\n        RandomCutForest compactDoubleCached = builder.boundingBoxCacheFraction(1.0).precision(Precision.FLOAT_64)\n                .build();\n\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions, 99)) {\n            double score = compactFloatCached.getAnomalyScore(point);\n\n            if (score > 0) {\n                anomalies++;\n            }\n\n            assertEquals(score, compactFloatUncached.getAnomalyScore(point), 1e-10);\n            assertEquals(score, compactFloatUncachedParallel.getAnomalyScore(point), 1e-10);\n            assertEquals(score, compactFloatCachedRandom.getAnomalyScore(point), 1e-10);\n            assertEquals(score, compactFloatCachedRandomParallel.getAnomalyScore(point), 1e-10);\n\n            // we expect some loss of precision when comparing to the score computed as a\n            // double\n            assertEquals(score, compactDoubleCached.getAnomalyScore(point), 1e-2);\n\n            compactFloatCached.update(point);\n            compactFloatCachedParallel.update(point);\n            compactFloatUncached.update(point);\n            compactFloatUncachedParallel.update(point);\n            compactFloatCachedRandom.update(point);\n            compactFloatCachedRandomParallel.update(point);\n            compactDoubleCached.update(point);\n        }\n\n        // verify that the test is nontrivial\n        assertTrue(anomalies > 0);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/RandomCutForestFunctionalTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.Mockito.spy;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Random;\nimport java.util.stream.IntStream;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.BeforeAll;\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.ArgumentsProvider;\nimport org.junit.jupiter.params.provider.ArgumentsSource;\nimport org.junit.jupiter.params.provider.CsvSource;\n\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n@Tag(\"functional\")\npublic class RandomCutForestFunctionalTest {\n\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n    private static RandomCutForest parallelExecutionForest;\n    private static RandomCutForest singleThreadedForest;\n    private static RandomCutForest forestSpy;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    @BeforeAll\n    public static void oneTimeSetUp() { // this is a stochastic dataset and will have different values for different\n                                        // runs\n        numberOfTrees = 100;\n        sampleSize = 256;\n        dimensions = 3;\n        randomSeed = 123;\n\n        parallelExecutionForest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(dimensions).randomSeed(randomSeed).centerOfMassEnabled(true)\n                .storeSequenceIndexesEnabled(true).build();\n\n        singleThreadedForest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(dimensions).randomSeed(randomSeed).centerOfMassEnabled(true)\n                .storeSequenceIndexesEnabled(true).parallelExecutionEnabled(false).build();\n\n        dataSize = 10_000;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 5.0;\n        anomalySigma = 1.5;\n        transitionToAnomalyProbability = 0.01;\n        transitionToBaseProbability = 0.4;\n\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, dimensions);\n\n        for (int i = 0; i < dataSize; i++) {\n            parallelExecutionForest.update(data[i]);\n            singleThreadedForest.update(data[i]);\n        }\n    }\n\n    // Use this ArgumentsProvider to run a test on both single-threaded and\n    // multi-threaded forests\n    static class TestForestProvider implements ArgumentsProvider {\n        @Override\n        public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {\n            return Stream.of(singleThreadedForest, parallelExecutionForest).map(Arguments::of);\n        }\n    }\n\n    // displacement scoring (multiplied by the normalizer log_2(treesize)) on the\n    // fly !!\n    // as introduced in Robust Random Cut Forest Based Anomaly Detection in Streams\n    // @ICML 2016. This does not address co-displacement (duplicity).\n    // seen function is (x,y) -> 1 which basically ignores everything\n    // unseen function is (x,y) -> y which corresponds to mass of sibling\n    // damp function is (x,y) -> 1 which is no dampening\n\n    public static double getDisplacementScore(RandomCutForest forest, float[] point) {\n        return forest.getDynamicScore(point, 0, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0);\n    }\n\n    public double getDisplacementScoreApproximate(RandomCutForest forest, float[] point, double precision) {\n        return forest.getApproximateDynamicScore(point, precision, true, 0, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0);\n    }\n\n    // Expected height (multiplied by the normalizer log_2(treesize) ) scoring on\n    // the fly !!\n    // seen function is (x,y) -> x+log(Y)/log(2) which depth + duplicity converted\n    // to depth\n    // unseen function is (x,y) -> x which is depth\n    // damp function is (x,y) -> 1 which is no dampening\n    // note that this is *NOT* anything like the expected height in\n    // Isolation Forest/Random Forest algorithms, because here\n    // the Expected height takes into account the contrafactual\n    // that \"what would have happened had the point been available during\n    // the construction of the forest\"\n\n    public static double getHeightScore(RandomCutForest forest, float[] point) {\n        return forest.getDynamicScore(point, 0, (x, y) -> 1.0 * (x + Math.log(y)), (x, y) -> 1.0 * x, (x, y) -> 1.0);\n    }\n\n    public double getHeightScoreApproximate(RandomCutForest forest, float[] point, double precision) {\n        return forest.getApproximateDynamicScore(point, precision, false, 0, (x, y) -> 1.0 * (x + Math.log(y)),\n                (x, y) -> 1.0 * x, (x, y) -> 1.0);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    private void testGetAnomalyScore(RandomCutForest forest) {\n        float[] point = { 0.0f, 0.0f, 0.0f };\n        double score = forest.getAnomalyScore(point);\n        assertTrue(score < 1);\n        assertTrue(forest.getApproximateAnomalyScore(point) < 1);\n\n        /**\n         * This part demonstrates testing of dynamic scoring where score functions are\n         * changed on the fly.\n         */\n\n        // displacement scoring on the fly!!\n\n        score = getDisplacementScore(forest, point);\n        assertTrue(score < 25);\n        // testing that the leaf exclusion does not affect anything\n        // tests the masking effect\n        assertTrue(forest.getDynamicScore(point, 1, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0) < 25);\n        double newScore = getDisplacementScoreApproximate(forest, point, 0);\n        assertEquals(score, newScore, 1E-10);\n        double otherScore = getDisplacementScoreApproximate(forest, point, 0.1);\n        assertTrue(otherScore < 25);\n        // the approximation bound is increased to accomodate the\n        // larger variance of the probabilistic test\n        // adjust the parameters in early convergence to\n        // get 0.1*score+0.1\n        assertEquals(otherScore, newScore, 0.3 * score + 0.1);\n\n        /**\n         * Using expected height -- note that this height is not the same as the height\n         * in a random forest, because it accounts for the contrafactual of having\n         * constructed the forest with the knowledge of the point.\n         */\n\n        score = getHeightScore(forest, point);\n        assertTrue(score > 50);\n        newScore = getHeightScoreApproximate(forest, point, 0);\n        assertEquals(score, newScore, 1E-10);\n        otherScore = getHeightScoreApproximate(forest, point, 0.1);\n        assertTrue(otherScore > 50);\n        // the approximation bound is increased to accomodate the\n        // larger variance of the probabilistic test\n        assertEquals(score, otherScore, 0.3 * score + 0.1);\n\n        point = new float[] { 8.0f, 8.0f, 8.0f };\n        score = forest.getAnomalyScore(point);\n        assertTrue(score > 1);\n        assertTrue(forest.getApproximateAnomalyScore(point) > 1);\n\n        // displacement scoring on the fly !!\n        score = getDisplacementScore(forest, point);\n        assertTrue(score > 100);\n        // testing masking\n        assertTrue(forest.getDynamicScore(point, 1, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0) > 100);\n        newScore = getDisplacementScoreApproximate(forest, point, 0);\n        assertEquals(score, newScore, 1E-10);\n        otherScore = getDisplacementScoreApproximate(forest, point, 0.1);\n        assertTrue(otherScore > 100);\n        // the approximation bound is increased to accomodate the\n        // larger variance of the probabilistic test\n        assertEquals(score, otherScore, 0.3 * score + 0.1);\n\n        // Expected height scoring on the fly !!\n        score = getHeightScore(forest, point);\n        assertTrue(score < 30);\n        newScore = getHeightScoreApproximate(forest, point, 0);\n        assertEquals(score, newScore, 1E-10);\n        otherScore = getHeightScoreApproximate(forest, point, 0.1);\n        assertTrue(otherScore < 30);\n        // the approximation bound is increased to accomodate the\n        // larger variance of the probabilistic test\n        assertEquals(score, otherScore, 0.3 * score + 0.1);\n\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testSideEffectsA(RandomCutForest forest) {\n        double score = forest.getAnomalyScore(new double[] { 0.0, 0.0, 0.0 });\n        NormalMixtureTestData generator2 = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] newData = generator2.generateTestData(dataSize, dimensions);\n        for (int i = 0; i < dataSize; i++) {\n            forest.getAnomalyScore(newData[i]);\n        }\n        double newScore = forest.getAnomalyScore(new double[] { 0.0, 0.0, 0.0 });\n        assertEquals(score, newScore, 10E-10);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testSideEffectsB(RandomCutForest forest) {\n        /* the changes to score and attribution should be in sync */\n        DiVector initial = forest.getAnomalyAttribution(new double[] { 0.0, 0.0, 0.0 });\n        NormalMixtureTestData generator2 = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] newData = generator2.generateTestData(dataSize, dimensions);\n        for (int i = 0; i < dataSize; i++) {\n            forest.getAnomalyAttribution(newData[i]);\n        }\n        double newScore = forest.getAnomalyScore(new double[] { 0.0, 0.0, 0.0 });\n        DiVector newVector = forest.getAnomalyAttribution(new double[] { 0.0, 0.0, 0.0 });\n        assertEquals(initial.getHighLowSum(), newVector.getHighLowSum(), 10E-10);\n        assertEquals(initial.getHighLowSum(), newScore, 1E-10);\n        assertArrayEquals(initial.high, newVector.high, 1E-10);\n        assertArrayEquals(initial.low, newVector.low, 1E-10);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testGetAnomalyAttribution(RandomCutForest forest) {\n\n        /* This method checks that the scores and attributions are consistent */\n\n        double[] point = { 0.0, 0.0, 0.0 };\n        DiVector seenResult = forest.getAnomalyAttribution(point);\n        double seenScore = forest.getAnomalyScore(point);\n        assertTrue(seenResult.getHighLowSum(0) < 0.5);\n        assertTrue(seenResult.getHighLowSum(1) < 0.5);\n        assertTrue(seenResult.getHighLowSum(2) < 0.5);\n        assertTrue(seenScore < 1.0);\n        assertEquals(seenScore, seenResult.getHighLowSum(), 1E-10);\n\n        DiVector likelyResult = forest.getApproximateAnomalyAttribution(point);\n        double score = forest.getApproximateAnomalyScore(point);\n        assertTrue(likelyResult.getHighLowSum(0) < 0.5);\n        assertTrue(likelyResult.getHighLowSum(1) < 0.5);\n        assertTrue(likelyResult.getHighLowSum(2) < 0.5);\n        assertEquals(score, likelyResult.getHighLowSum(), 0.1);\n        assertEquals(seenResult.getHighLowSum(), likelyResult.getHighLowSum(), 0.1);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testMultipleAttributions(RandomCutForest forest) {\n\n        /**\n         * We will test the attribution over random runs. Narrow tests can fail -- we\n         * will keep track of the aggregate number of narrow tests and test for large\n         * characterization that would be misleading in failure.\n         */\n        int hardPass = 0;\n        int causal = 0;\n        double[] point = { 6.0, 0.0, 0.0 };\n        DiVector result = forest.getAnomalyAttribution(point);\n        assertTrue(result.low[0] < 0.2);\n        if (result.getHighLowSum(1) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(2) < 0.5)\n            ++hardPass;\n        assertTrue(result.getHighLowSum(1) + result.getHighLowSum(2) < 1.0);\n        assertTrue(result.high[0] > forest.getAnomalyScore(point) / 3);\n        if (result.high[0] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        // the last line states that first coordinate was high and was a majority\n        // contributor to the score\n        // the previous test states that the contribution is twice the average of the 12\n        // possible contributors.\n        // these tests all subparts of the score at once\n\n        point = new double[] { -6.0, 0.0, 0.0 };\n        result = forest.getAnomalyAttribution(point);\n        assertTrue(result.getHighLowSum() > 1.0);\n        assertTrue(result.high[0] < 0.5);\n        if (result.getHighLowSum(1) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(2) < 0.5)\n            ++hardPass;\n        assertTrue(result.low[0] > forest.getAnomalyScore(point) / 3);\n        if (result.low[0] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        point = new double[] { 0.0, 6.0, 0.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        if (result.getHighLowSum(0) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(2) < 0.5)\n            ++hardPass;\n        assertTrue(result.low[1] < 0.5);\n        assertTrue(result.high[1] > forest.getAnomalyScore(point) / 3);\n        if (result.high[1] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        point = new double[] { 0.0, -6.0, 0.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        if (result.getHighLowSum(0) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(2) < 0.5)\n            ++hardPass;\n        assertTrue(result.high[1] < 0.5);\n        assertTrue(result.low[1] > forest.getAnomalyScore(point) / 3);\n        if (result.low[1] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        point = new double[] { 0.0, 0.0, 6.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        if (result.getHighLowSum(0) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(1) < 0.5)\n            ++hardPass;\n        assertTrue(result.low[2] < 0.5);\n        assertTrue(result.high[2] > forest.getAnomalyScore(point) / 3);\n        if (result.high[2] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        point = new double[] { 0.0, 0.0, -6.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        if (result.getHighLowSum(0) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(1) < 0.5)\n            ++hardPass;\n        assertTrue(result.high[2] < 0.5);\n        assertTrue(result.low[2] > forest.getAnomalyScore(point) / 3);\n        if (result.low[2] > 0.5 * forest.getAnomalyScore(point))\n            ++causal;\n\n        assertTrue(causal >= 5); // maximum is 6; there can be skew in one direction\n\n        point = new double[] { -3.0, 0.0, 0.0 };\n        result = forest.getAnomalyAttribution(point);\n        assertTrue(result.high[0] < 0.5);\n        if (result.getHighLowSum(1) < 0.5)\n            ++hardPass;\n        if (result.getHighLowSum(2) < 0.5)\n            ++hardPass;\n        assertTrue(result.low[0] > forest.getAnomalyScore(point) / 3);\n\n        /*\n         * For multiple causes, the relationship of scores only hold for larger\n         * distances.\n         */\n\n        point = new double[] { -3.0, 6.0, 0.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        if (result.low[0] > 0.5)\n            ++hardPass;\n        assertTrue(result.high[0] < 0.5);\n        assertTrue(result.low[1] < 0.5);\n        assertTrue(result.high[1] > 0.5);\n        if (result.high[1] > 0.9)\n            ++hardPass;\n        assertTrue(result.getHighLowSum(2) < 0.5);\n        assertTrue(result.high[1] + result.low[0] > 0.8 * forest.getAnomalyScore(point));\n\n        point = new double[] { 6.0, -3.0, 0.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        assertTrue(result.low[0] < 0.5);\n        assertTrue(result.high[0] > 0.5);\n        if (result.high[0] > 0.9)\n            ++hardPass;\n        if (result.low[1] > 0.5)\n            ++hardPass;\n        assertTrue(result.high[1] < 0.5);\n        assertTrue(result.getHighLowSum(2) < 0.5);\n        assertTrue(result.high[0] + result.low[1] > 0.8 * forest.getAnomalyScore(point));\n\n        point = new double[] { 20.0, -10.0, 0.0 };\n        assertTrue(result.getHighLowSum() > 1.0);\n        result = forest.getAnomalyAttribution(point);\n        assertTrue(result.high[0] + result.low[1] > 0.8 * forest.getAnomalyScore(point));\n        if (result.high[0] > 1.8 * result.low[1])\n            ++hardPass;\n        if (result.low[1] > result.high[0] / 2.2)\n            ++hardPass;\n\n        assertTrue(hardPass >= 15); // maximum is 20\n    }\n\n    @Test\n    public void testUpdateWithSignedZeros() {\n        RandomCutForest forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(2).dimensions(1)\n                .randomSeed(randomSeed).centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).build();\n\n        forest.update(new double[] { 0.0 });\n        forest.getAnomalyScore(new double[] { 0.0 });\n        forest.getAnomalyScore(new double[] { -0.0 });\n\n        forest.update(new double[] { -0.0 });\n        forest.getAnomalyScore(new double[] { 0.0 });\n        forest.getAnomalyScore(new double[] { -0.0 });\n    }\n\n    @Test\n    public void testShadowBuffer() {\n        /**\n         * This test checks that the attribution *DOES NOT* change as a ratio as more\n         * copies of the points are added. The shadowbox in\n         * the @DirectionalAttributionVisitor allows us to simulate a deletion without\n         * performing a deletion.\n         *\n         * The goal is to measure the attribution and have many copies of the same point\n         * and eventually the attribution will become uniform in all directions.\n         *\n         * we create a new forest so that other tests are unaffected.\n         */\n        numberOfTrees = 100;\n        sampleSize = 256;\n        dimensions = 3;\n        randomSeed = 123;\n\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(dimensions).randomSeed(randomSeed).centerOfMassEnabled(true).timeDecay(1e-5)\n                .storeSequenceIndexesEnabled(true).build();\n\n        dataSize = 10_000;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 5.0;\n        anomalySigma = 1.5;\n        transitionToAnomalyProbability = 0.01;\n        transitionToBaseProbability = 0.4;\n\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, dimensions);\n\n        for (int i = 0; i < dataSize; i++) {\n            newForest.update(data[i]);\n        }\n\n        double[] point = new double[] { -8.0, -8.0, 0.0 };\n        DiVector result = newForest.getAnomalyAttribution(point);\n        double score = newForest.getAnomalyScore(point);\n        assertEquals(score, result.getHighLowSum(), 1E-5);\n        assertTrue(score > 2);\n        assertTrue(result.getHighLowSum(2) < 0.2);\n        // the third dimension has little influence in classification\n\n        // this is going to add {8,8,0} into the forest\n        // but not enough to cause large scale changes\n        // note the probability of a tree seeing a change is\n        // 256/10_000\n        for (int i = 0; i < 5; i++) {\n            newForest.update(point);\n        }\n\n        DiVector newResult = newForest.getAnomalyAttribution(point);\n        double newScore = newForest.getAnomalyScore(point);\n\n        assertEquals(newScore, newResult.getHighLowSum(), 1E-5);\n        assertTrue(newScore < score);\n        for (int j = 0; j < 3; j++) {\n            // relationship holds at larger values\n            if (result.high[j] > 0.2) {\n                assertEquals(score * newResult.high[j], newScore * result.high[j], 0.1 * score);\n            } else {\n                assertTrue(newResult.high[j] < 0.2);\n            }\n\n            if (result.low[j] > 0.2) {\n                assertEquals(score * newResult.low[j], newScore * result.low[j], 0.1 * score);\n            } else {\n                assertTrue(newResult.low[j] < 0.2);\n            }\n        }\n\n        // this will make the point an inlier\n        for (int i = 0; i < 5000; i++) {\n            newForest.update(point);\n        }\n\n        DiVector finalResult = newForest.getAnomalyAttribution(point);\n        double finalScore = newForest.getAnomalyScore(point);\n        assertTrue(finalScore < 1);\n        assertEquals(finalScore, finalResult.getHighLowSum(), 1E-5);\n\n        for (int j = 0; j < 3; j++) {\n            // relationship holds at larger values\n            if (finalResult.high[j] > 0.2) {\n                assertEquals(score * finalResult.high[j], finalScore * result.high[j], 0.1 * score);\n            } else {\n                assertTrue(newResult.high[j] < 0.2);\n            }\n\n            if (finalResult.low[j] > 0.2) {\n                assertEquals(score * finalResult.low[j], finalScore * result.low[j], 0.1 * score);\n            } else {\n                assertTrue(finalResult.low[j] < 0.2);\n            }\n        }\n\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testSimpleDensity(RandomCutForest forest) {\n\n        DensityOutput output1 = forest.getSimpleDensity(new double[] { 0.0, 0.0, 0.0 });\n        DensityOutput output2 = forest.getSimpleDensity(new double[] { 6.0, 6.0, 0.0 });\n        DensityOutput output3 = forest.getSimpleDensity(new double[] { -4.0, -4.0, 0.0 });\n        DensityOutput output4 = forest.getSimpleDensity(new double[] { -6.0, -6.0, 0.0 });\n\n        assertTrue(output1.getDensity(0.001, 3) > output2.getDensity(0.001, 3));\n        assertTrue(output1.getDensity(0.001, 3) > output3.getDensity(0.001, 3));\n        assertTrue(output1.getDensity(0.001, 3) > output4.getDensity(0.001, 3));\n        assertTrue(output3.getDensity(0.001, 3) > output4.getDensity(0.001, 3));\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testSimpleDensityWhenSamplerNotFullThenDensityIsZero(RandomCutForest forest) {\n        RandomCutForest forestSpy = spy(forest);\n        when(forestSpy.isOutputReady()).thenReturn(false);\n\n        DensityOutput output = forestSpy.getSimpleDensity(new double[] { 0.0, 0.0, 0.0 });\n        assertEquals(0, output.getDensity(0.001, 3));\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestForestProvider.class)\n    public void testImputeMissingValues(RandomCutForest forest) {\n\n        double[] queryPoint = new double[] { Double.NaN, 0.02, 0.01 };\n        int numberOfMissingValues = 1;\n        int[] missingIndexes = new int[] { 0 };\n\n        double[] imputedPoint = forest.imputeMissingValues(queryPoint, numberOfMissingValues, missingIndexes);\n        assertEquals(queryPoint[1], imputedPoint[1], 1e-5);\n        assertTrue(Math.abs(imputedPoint[0]) < 0.5);\n    }\n\n    @Test\n    public void getTotalUpdates_returnExpectedSize() {\n        assertEquals(dataSize, singleThreadedForest.getTotalUpdates());\n        assertEquals(dataSize, parallelExecutionForest.getTotalUpdates());\n    }\n\n    @ParameterizedTest(name = \"{index} => numDims={0}, numTrees={1}, numSamples={2}, numTrainSamples={3}, \"\n            + \"numTestSamples={4}, enableParallel={5}, numThreads={6}\")\n    @CsvSource({ \"10, 50, 256, 50000, 0, 0, 0\" })\n    public void dynamicCachingChangeTest(int numDims, int numTrees, int numSamples, int numTrainSamples,\n            int numTestSamples, int enableParallel, int numThreads) {\n        RandomCutForest.Builder<?> forestBuilder = RandomCutForest.builder().dimensions(numDims).numberOfTrees(numTrees)\n                .sampleSize(numSamples).randomSeed(0).boundingBoxCacheFraction(1.0).compact(false);\n        if (enableParallel == 0) {\n            forestBuilder.parallelExecutionEnabled(false);\n        }\n        if (numThreads > 0) {\n            forestBuilder.threadPoolSize(numThreads);\n        }\n        RandomCutForest forest = forestBuilder.build();\n        RandomCutForest anotherForest = RandomCutForest.builder().dimensions(numDims).numberOfTrees(numTrees)\n                .sampleSize(numSamples).randomSeed(0).compact(true).boundingBoxCacheFraction(1.0).build();\n\n        int count = 0;\n        for (double[] point : generate(numTrainSamples, numDims, 0)) {\n            ++count;\n            double score = forest.getAnomalyScore(point);\n            double anotherScore = anotherForest.getAnomalyScore(point);\n            assertEquals(score, anotherScore, 1E-10);\n            forest.update(point);\n            anotherForest.update(point);\n            if (count % 2000 == 1000) {\n                double fraction = Math.random();\n                // System.out.println(\" second forest fraction \" + fraction);\n                anotherForest.setBoundingBoxCacheFraction(fraction);\n            }\n            if (count % 2000 == 0) {\n                double fraction = Math.random();\n                // System.out.println(\" first forest fraction \" + fraction);\n                forest.setBoundingBoxCacheFraction(fraction);\n            }\n        }\n\n    }\n\n    @ParameterizedTest(name = \"{index} => numDims={0}, numTrees={1}, numSamples={2}, numTrainSamples={3}, \"\n            + \"numTestSamples={4}, enableParallel={5}, numThreads={6}\")\n    @CsvSource({ \"10, 10, 30000, 50000, 0, 0, 0\" })\n    public void dynamicCachingChangeTestLarge(int numDims, int numTrees, int numSamples, int numTrainSamples,\n            int numTestSamples, int enableParallel, int numThreads) {\n        RandomCutForest.Builder<?> forestBuilder = RandomCutForest.builder().dimensions(numDims).numberOfTrees(numTrees)\n                .sampleSize(numSamples).randomSeed(0).boundingBoxCacheFraction(1.0).compact(false);\n        if (enableParallel == 0) {\n            forestBuilder.parallelExecutionEnabled(false);\n        }\n        if (numThreads > 0) {\n            forestBuilder.threadPoolSize(numThreads);\n        }\n        RandomCutForest forest = forestBuilder.build();\n        RandomCutForest anotherForest = RandomCutForest.builder().dimensions(numDims).numberOfTrees(numTrees)\n                .sampleSize(numSamples).randomSeed(0).compact(true).boundingBoxCacheFraction(1.0).build();\n\n        int count = 0;\n        for (double[] point : generate(numTrainSamples, numDims, 0)) {\n            ++count;\n            double score = forest.getAnomalyScore(point);\n            double anotherScore = anotherForest.getAnomalyScore(point);\n            assertEquals(score, anotherScore, 1E-10);\n            forest.update(point);\n            anotherForest.update(point);\n        }\n\n    }\n\n    private double[][] generate(int numSamples, int numDimensions, int seed) {\n        return IntStream.range(0, numSamples).mapToObj(i -> new Random(seed + i).doubles(numDimensions).toArray())\n                .toArray(double[][]::new);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/RandomCutForestShingledFunctionalTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;\nimport static java.lang.Math.PI;\nimport static java.lang.Math.cos;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeAll;\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\nimport com.amazon.randomcutforest.util.ShingleBuilder;\n\n@Tag(\"functional\")\npublic class RandomCutForestShingledFunctionalTest {\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n    private static int shingleSize;\n    private static ShingleBuilder shingleBuilder;\n    private static RandomCutForest forest;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n\n    @BeforeAll\n    public static void oneTimeSetUp() {\n        numberOfTrees = 100;\n        sampleSize = 256;\n        dimensions = 2;\n        randomSeed = 123;\n        shingleSize = 3;\n\n        shingleBuilder = new ShingleBuilder(dimensions, shingleSize);\n\n        forest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shingleBuilder.getShingledPointSize()).randomSeed(randomSeed).centerOfMassEnabled(true)\n                .initialAcceptFraction(0.5).storeSequenceIndexesEnabled(true).build();\n\n        dataSize = 10_000;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 5.0;\n        anomalySigma = 1.5;\n        transitionToAnomalyProbability = 0.01;\n        transitionToBaseProbability = 0.4;\n\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, dimensions);\n\n        for (int i = 0; i < dataSize; i++) {\n            shingleBuilder.addPoint(data[i]);\n            if (shingleBuilder.isFull()) {\n                forest.update(shingleBuilder.getShingle());\n            }\n        }\n    }\n\n    @Test\n    public void testExtrapolateBasic() {\n        double[] result = forest.extrapolateBasic(shingleBuilder.getShingle(), 4, dimensions, false);\n        assertEquals(4 * dimensions, result.length);\n\n        result = forest.extrapolateBasic(shingleBuilder.getShingle(), 4, dimensions, true, 2);\n        assertEquals(4 * dimensions, result.length);\n\n        result = forest.extrapolateBasic(shingleBuilder, 4);\n        assertEquals(4 * dimensions, result.length);\n\n        // use a block size which is too big\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.extrapolateBasic(shingleBuilder.getShingle(), 4, 4, true, 2));\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    public void InternalShinglingTest(boolean rotation) {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 2;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        System.out.println(seed);\n        Random rng = new Random(seed);\n\n        int numTrials = 3; // test is exact equality, reducing the number of trials\n        int length = 40 * sampleSize;\n\n        for (int i = 0; i < numTrials; i++) {\n\n            int outputAfter = 1 + rng.nextInt(10 * sampleSize);\n            long newSeed = rng.nextLong();\n            RandomCutForest first = new RandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).randomSeed(newSeed).internalShinglingEnabled(true)\n                    .outputAfter(outputAfter + shingleSize - 1).internalRotationEnabled(rotation)\n                    .shingleSize(shingleSize).build();\n\n            RandomCutForest second = new RandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).randomSeed(newSeed).internalShinglingEnabled(false)\n                    .outputAfter(outputAfter).shingleSize(shingleSize).build();\n\n            RandomCutForest third = new RandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).randomSeed(newSeed).internalShinglingEnabled(false).shingleSize(1)\n                    .outputAfter(outputAfter).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    seed + i, baseDimensions);\n\n            double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, rotation);\n\n            assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n            int count = shingleSize - 1;\n            // insert initial points\n            for (int j = 0; j < shingleSize - 1; j++) {\n                first.update(dataWithKeys.data[j]);\n            }\n\n            for (int j = 0; j < shingledData.length; j++) {\n                // validate equality of points\n                for (int y = 0; y < baseDimensions; y++) {\n                    int position = (rotation) ? (count % shingleSize) : shingleSize - 1;\n                    assertEquals(dataWithKeys.data[count][y], shingledData[j][position * baseDimensions + y], 1e-10);\n                }\n\n                double firstResult = first.getAnomalyScore(dataWithKeys.data[count]);\n                first.update(dataWithKeys.data[count]);\n                ++count;\n                double secondResult = second.getAnomalyScore(shingledData[j]);\n                second.update(shingledData[j]);\n                double thirdResult = third.getAnomalyScore(shingledData[j]);\n                third.update(shingledData[j]);\n\n                assertEquals(firstResult, secondResult, 1e-10);\n                assertEquals(secondResult, thirdResult, 1e-10);\n            }\n            PointStore store = (PointStore) first.getUpdateCoordinator().getStore();\n            assertEquals(store.getCurrentStoreCapacity() * dimensions, store.getStore().length);\n            List<ICluster<float[]>> firstSummary = store.summarize(5, 0.5, 3, 0.8, Summarizer::L2distance, null);\n\n            store = (PointStore) second.getUpdateCoordinator().getStore();\n            assertEquals(store.getCurrentStoreCapacity() * dimensions, store.getStore().length);\n            List<ICluster<float[]>> secondSummary = store.summarize(5, 0.5, 3, 0.8, Summarizer::L2distance, null);\n            assert (secondSummary.size() == firstSummary.size());\n            for (int j = 0; j < firstSummary.size(); j++) {\n                assertEquals(firstSummary.get(j).getWeight(), secondSummary.get(j).getWeight(), 1e-3);\n                assertEquals(firstSummary.get(j).averageRadius(), secondSummary.get(j).averageRadius(), 1e-3);\n            }\n\n            store = (PointStore) third.getUpdateCoordinator().getStore();\n            assertEquals(store.getCurrentStoreCapacity() * dimensions, store.getStore().length);\n            List<ICluster<float[]>> thirdSummary = store.summarize(5, 0.5, 3, 0.8, Summarizer::L2distance, null);\n            assert (thirdSummary.size() == firstSummary.size());\n            for (int j = 0; j < firstSummary.size(); j++) {\n                assertEquals(firstSummary.get(j).getWeight(), thirdSummary.get(j).getWeight(), 1e-3);\n                assertEquals(firstSummary.get(j).averageRadius(), thirdSummary.get(j).averageRadius(), 1e-3);\n            }\n        }\n    }\n\n    @Test\n    public void testExtrapolateShingleAwareSinglePrecision() {\n\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n        int shinglesize = 10;\n        long randomSeed = 123;\n\n        RandomCutForest newforest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).shingleSize(shinglesize)\n                .precision(Precision.FLOAT_32).build();\n        RandomCutForest anotherforest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).shingleSize(1)\n                .precision(Precision.FLOAT_32).build();\n        RandomCutForest yetAnotherforest = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).shingleSize(shinglesize)\n                .internalShinglingEnabled(true).precision(Precision.FLOAT_32).build();\n\n        double amplitude = 50.0;\n        double noise = 2.0;\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[shinglesize];\n        int num = 850;\n        double[] data = getDataA(amplitude, noise);\n        double[] answer = null;\n        double error = 0;\n        double[] record = null;\n\n        for (int j = 0; j < num; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % shinglesize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            // input is always double[], internal representation is float[]\n            // input is 1 dimensional for internal shingling (for 1 dimensional sequences)\n            yetAnotherforest.update(new double[] { data[j] });\n\n            if (filledShingleAtleastOnce) {\n\n                record = getShinglePoint(history, entryIndex, shinglesize);\n                newforest.update(record);\n                anotherforest.update(record);\n            }\n        }\n\n        answer = newforest.extrapolateBasic(record, 200, 1, false);\n        double[] anotherAnswer = anotherforest.extrapolateBasic(record, 200, 1, false);\n        double[] yetAnotherAnswer = yetAnotherforest.extrapolate(200);\n        assertArrayEquals(anotherAnswer, answer, 1e-10);\n        assertArrayEquals(yetAnotherAnswer, answer, 1e-10);\n\n        error = 0;\n        for (int j = 0; j < 200; j++) {\n            double prediction = amplitude * cos((j + 850 - 50) * 2 * PI / 120);\n            error += Math.abs(prediction - answer[j]);\n        }\n        error = error / 200;\n\n        assertTrue(error < 4 * noise);\n\n    }\n\n    @Test\n    public void testExtrapolateInternalRotationSinglePrecision() {\n\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n        int shinglesize = 120;\n        long randomSeed = 123;\n\n        RandomCutForest newforestA = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).precision(Precision.FLOAT_32).build();\n\n        RandomCutForest newforestB = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).internalShinglingEnabled(true)\n                .internalRotationEnabled(true).compact(true).shingleSize(shinglesize).precision(Precision.FLOAT_32)\n                .build();\n        RandomCutForest newforestC = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).shingleSize(shinglesize)\n                .precision(Precision.FLOAT_32).build();\n        double amplitude = 50.0;\n        double noise = 2.0;\n        Random noiseprg = new Random(72);\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[shinglesize];\n        int num = 850;\n        double[] data = getDataA(amplitude, noise);\n        double[] answer = null;\n        double error = 0;\n\n        double[] record = null;\n        for (int j = 0; j < num; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % shinglesize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n\n            newforestB.update(new double[] { data[j] });\n            if (filledShingleAtleastOnce) {\n                // produce cyclic vectors\n                record = getShinglePoint(history, 0, shinglesize);\n                newforestA.update(record);\n                newforestC.update(record);\n            }\n        }\n\n        answer = newforestA.extrapolateBasic(record, 200, 1, true, entryIndex);\n        double[] anotherAnswer = newforestB.extrapolate(200);\n        double[] yetAnotherAnswer = newforestC.extrapolateBasic(record, 200, 1, true, entryIndex);\n        assertArrayEquals(answer, yetAnotherAnswer, 1e-10);\n        double[] othershingle = toDoubleArray(newforestB.lastShingledPoint());\n        assertEquals(entryIndex, newforestB.nextSequenceIndex() % shinglesize);\n        assertArrayEquals(record, othershingle, 1e-5);\n        assertArrayEquals(answer, anotherAnswer, 1e-5);\n        error = 0;\n        for (int j = 0; j < 200; j++) {\n            double prediction = amplitude * cos((j + 850 - 50) * 2 * PI / 120);\n            error += Math.abs(prediction - answer[j]);\n        }\n        error = error / 200;\n        assertTrue(error < 4 * noise);\n\n    }\n\n    @Test\n    public void testExtrapolateC() {\n\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n        int shinglesize = 20;\n        long randomSeed = 124;\n\n        // build two identical copies; we will be giving them different\n        // subsequent inputs and test adaptation to stream evolution\n\n        RandomCutForest newforestC = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).timeDecay(1.0 / 300).build();\n\n        RandomCutForest newforestD = RandomCutForest.builder().numberOfTrees(numberOfTrees).sampleSize(sampleSize)\n                .dimensions(shinglesize).randomSeed(randomSeed).compact(true).timeDecay(1.0 / 300).build();\n\n        double amplitude = 50.0;\n        double noise = 2.0;\n        Random noiseprg = new Random(72);\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[shinglesize];\n        int num = 1330;\n        double[] data = getDataB(amplitude, noise);\n        double[] answer = null;\n        double error = 0;\n\n        double[] record = null;\n        for (int j = 0; j < num; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % shinglesize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n\n                record = getShinglePoint(history, entryIndex, shinglesize);\n                newforestC.update(record);\n                newforestD.update(record);\n            }\n        }\n        /**\n         * the two forests are identical up to this point we will now provide two\n         * different input to each num+2*expLife=1930, but since the shape of the\n         * pattern remains the same in a phase shift, the prediction comes back to\n         * \"normal\" fairly quickly.\n         */\n\n        for (int j = num; j < 1630; ++j) { // we stream here ....\n            double t = cos(2 * PI * (j - 50) / 240);\n            history[entryIndex] = amplitude * t + noise * noiseprg.nextDouble();\n            ;\n            entryIndex = (entryIndex + 1) % shinglesize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n\n                record = getShinglePoint(history, entryIndex, shinglesize);\n                newforestC.update(record);\n            }\n        }\n        answer = newforestC.extrapolateBasic(record, 200, 1, false);\n\n        error = 0;\n        for (int j = 0; j < 200; j++) {\n            double t = cos(2 * PI * (1630 + j - 50) / 240);\n            double prediction = amplitude * t;\n            error += Math.abs(prediction - answer[j]);\n        }\n        error = error / 200;\n        assertTrue(error < 2 * noise);\n\n        /**\n         * Here num+2*expLife=1930 for a small explife such as 300, num+expLife is\n         * already sufficient increase the factor for larger expLife or increase the\n         * sampleSize to absorb the longer range dependencies of a larger expLife\n         */\n        for (int j = num; j < 1630; ++j) { // we stream here ....\n            double t = cos(2 * PI * (j + 50) / 120);\n            int sign = (t > 0) ? 1 : -1;\n            history[entryIndex] = amplitude * sign * Math.pow(t * sign, 1.0 / 3) + noise * noiseprg.nextDouble();\n            entryIndex = (entryIndex + 1) % shinglesize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n\n                record = getShinglePoint(history, entryIndex, shinglesize);\n                newforestD.update(record);\n            }\n        }\n        answer = newforestD.extrapolateBasic(record, 200, 1, false);\n\n        error = 0;\n        for (int j = 0; j < 200; j++) {\n            double t = cos(2 * PI * (1630 + j + 50) / 120);\n            int sign = (t > 0) ? 1 : -1;\n            double prediction = amplitude * sign * Math.pow(t * sign, 1.0 / 3);\n            error += Math.abs(prediction - answer[j]);\n        }\n        error = error / 200;\n        assertTrue(error < 2 * noise);\n    }\n\n    double[] getDataA(double amplitude, double noise) {\n        int num = 850;\n        double[] data = new double[num];\n        Random noiseprg = new Random(9000);\n\n        for (int i = 0; i < 510; i++) {\n            data[i] = amplitude * cos(2 * PI * (i - 50) / 120) + noise * noiseprg.nextDouble();\n        }\n        for (int i = 510; i < 525; i++) { // flatline\n            data[i] = 0;\n        }\n        for (int i = 525; i < 825; i++) {\n            data[i] = amplitude * cos(2 * PI * (i - 50) / 120) + noise * noiseprg.nextDouble();\n        }\n        for (int i = 825; i < num; i++) { // high frequency noise\n            data[i] = amplitude * cos(2 * PI * (i - 50) / 12) + noise * noiseprg.nextDouble();\n        }\n        return data;\n    }\n\n    double[] getDataB(double amplitude, double noise) {\n        int num = 1330;\n        double[] data = new double[num];\n        Random noiseprg = new Random(9001);\n        for (int i = 0; i < 990; i++) {\n            data[i] = amplitude * cos(2 * PI * (i + 50) / 240) + noise * noiseprg.nextDouble();\n        }\n        for (int i = 990; i < 1005; i++) { // flatline\n            data[i] = 0;\n        }\n        for (int i = 1005; i < 1305; i++) {\n            data[i] = amplitude * cos(2 * PI * (i + 50) / 240) + noise * noiseprg.nextDouble();\n        }\n        for (int i = 1305; i < num; i++) { // high frequency noise\n            data[i] = amplitude * cos(2 * PI * (i + 50) / 12) + noise * noiseprg.nextDouble();\n        }\n        return data;\n    }\n\n    private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {\n        double[] shingledPoint = new double[shingleLength];\n        int i = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            shingledPoint[i++] = point;\n\n        }\n        return shingledPoint;\n    }\n\n    @Test\n    public void testUpdate() {\n        int dimensions = 10;\n\n        RandomCutForest forest = RandomCutForest.builder().numberOfTrees(100).compact(true).dimensions(dimensions)\n                .randomSeed(0).sampleSize(200).precision(Precision.FLOAT_32).build();\n\n        double[][] trainingData = genShingledData(1000, dimensions, 0);\n        double[][] testData = genShingledData(100, dimensions, 1);\n\n        for (int i = 0; i < testData.length; i++) {\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(true);\n\n            double score = forest.getAnomalyScore(testData[i]);\n            forest.update(testData[i]);\n            RandomCutForestState forestState = mapper.toState(forest);\n            forest = mapper.toModel(forestState);\n        }\n    }\n\n    private static double[][] genShingledData(int size, int dimensions, long seed) {\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[dimensions];\n        int count = 0;\n        double[] data = getDataD(size + dimensions - 1, 100, 5, seed);\n        for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % dimensions;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                // System.out.println(\"Adding \" + j);\n                answer[count++] = getShinglePoint(history, entryIndex, dimensions);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getDataD(int num, double amplitude, double noise, long seed) {\n\n        double[] data = new double[num];\n        Random noiseprg = new Random(seed);\n        for (int i = 0; i < num; i++) {\n            data[i] = amplitude * cos(2 * PI * (i + 50) / 1000) + noise * noiseprg.nextDouble();\n        }\n\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/RandomCutForestTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static java.lang.Math.PI;\nimport static java.lang.Math.abs;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.core.Is.is;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.AdditionalMatchers.aryEq;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.ArgumentMatchers.anyDouble;\nimport static org.mockito.ArgumentMatchers.anyInt;\nimport static org.mockito.ArgumentMatchers.eq;\nimport static org.mockito.Mockito.doNothing;\nimport static org.mockito.Mockito.doReturn;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.never;\nimport static org.mockito.Mockito.reset;\nimport static org.mockito.Mockito.spy;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.Random;\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.powermock.reflect.Whitebox;\n\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.executor.AbstractForestTraversalExecutor;\nimport com.amazon.randomcutforest.executor.AbstractForestUpdateExecutor;\nimport com.amazon.randomcutforest.executor.IStateCoordinator;\nimport com.amazon.randomcutforest.executor.PointStoreCoordinator;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.executor.SequentialForestTraversalExecutor;\nimport com.amazon.randomcutforest.executor.SequentialForestUpdateExecutor;\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.InterpolationMeasure;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.returntypes.OneSidedConvergingDiVectorAccumulator;\nimport com.amazon.randomcutforest.returntypes.OneSidedConvergingDoubleAccumulator;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.tree.ITree;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\nimport com.amazon.randomcutforest.util.ShingleBuilder;\n\npublic class RandomCutForestTest {\n\n    private int dimensions;\n    private int sampleSize;\n    private int numberOfTrees;\n    private ComponentList<Integer, float[]> components;\n    private AbstractForestTraversalExecutor traversalExecutor;\n    private IStateCoordinator<Integer, float[]> updateCoordinator;\n    private AbstractForestUpdateExecutor<Integer, float[]> updateExecutor;\n    private RandomCutForest forest;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 2;\n        sampleSize = 256;\n        numberOfTrees = 10;\n\n        components = new ComponentList<>();\n        for (int i = 0; i < numberOfTrees; i++) {\n            CompactSampler sampler = mock(CompactSampler.class);\n            when(sampler.getCapacity()).thenReturn(sampleSize);\n            RandomCutTree tree = mock(RandomCutTree.class);\n            components.add(spy(new SamplerPlusTree<>(sampler, tree)));\n\n        }\n        updateCoordinator = spy(\n                new PointStoreCoordinator<>(new PointStore.Builder().dimensions(2).capacity(1).build()));\n        traversalExecutor = spy(new SequentialForestTraversalExecutor(components));\n        updateExecutor = spy(new SequentialForestUpdateExecutor<>(updateCoordinator, components));\n\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize);\n        forest = spy(new RandomCutForest(builder, updateCoordinator, components, builder.getRandom()));\n\n        Whitebox.setInternalState(forest, \"traversalExecutor\", traversalExecutor);\n        Whitebox.setInternalState(forest, \"updateExecutor\", updateExecutor);\n    }\n\n    @Test\n    void checkOutput() {\n        assertFalse(forest.isOutputReady());\n        assertThrows(IllegalArgumentException.class, () -> forest.getConditionalField(null, new int[1], 1.0).size());\n        assertEquals(forest.extrapolateBasic(new float[2], 1, 1, false)[0], 0);\n        assertEquals(forest.getDynamicScore(new float[2], 1, null, null, null), 0);\n        assertEquals(forest.getDynamicAttribution(new float[2], 1, null, null, null).getHighLowSum(), 0);\n        assertEquals(forest.getDynamicSimulatedScore(new float[2], null, null, null, null), 0);\n        assertEquals(forest.getApproximateDynamicScore(new float[2], 0.1, true, 1, null, null, null), 0);\n        assertEquals(\n                forest.getApproximateDynamicAttribution(new float[2], 0.1, true, 1, null, null, null).getHighLowSum(),\n                0);\n    }\n\n    @Test\n    void checkParameters() {\n        assertThrows(IllegalArgumentException.class, () -> forest.getConditionalField(null, null, 1));\n        assertThrows(IllegalArgumentException.class, () -> forest.getConditionalField(null, new int[1], 1));\n        assertThrows(IllegalArgumentException.class, () -> forest.getConditionalField(null, new int[1], -1));\n        assertThrows(IllegalArgumentException.class, () -> forest.getConditionalField(null, new int[1], 2));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.getConditionalFieldSummary(new float[2], new int[0], 1, 0, false, false, -1, 1));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.getConditionalFieldSummary(new float[2], new int[0], 1, 0, false, false, 2, 1));\n        assertDoesNotThrow(() -> forest.getConditionalFieldSummary(new float[2], new int[0], 1, 0, false, false, 1, 1));\n        assertThrows(IllegalArgumentException.class, () -> forest.setTimeDecay(-2));\n        assertThrows(IllegalArgumentException.class, () -> forest.setBoundingBoxCacheFraction(-1));\n        assertThrows(IllegalArgumentException.class, () -> forest.setBoundingBoxCacheFraction(2));\n        assertThrows(IllegalArgumentException.class, () -> forest.getDynamicScore(new float[2], -1, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.getApproximateDynamicScore(new float[2], 0.1, true, -1, null, null, null));\n    }\n\n    @Test\n    public void testUpdate() {\n        float[] point = { 2.2f, -1.1f };\n        forest.update(point);\n        verify(updateExecutor, times(1)).update(point, false);\n        assertEquals(updateCoordinator.getStore().getCapacity(), 1);\n    }\n\n    @Test\n    public void testUpdateShingled() {\n        float[] point = { 2.2f, -1.1f };\n        RandomCutForest newForest = RandomCutForest.builder().internalShinglingEnabled(true).dimensions(2).build();\n        assertThrows(IllegalArgumentException.class, () -> newForest.update(point, 0L));\n        assertDoesNotThrow(() -> newForest.update(point));\n    }\n\n    @Test\n    public void testUpdateInvalid() {\n        assertThrows(NullPointerException.class, () -> forest.update((double[]) null));\n        assertThrows(NullPointerException.class, () -> forest.update((float[]) null));\n        assertThrows(IllegalArgumentException.class, () -> forest.update(new double[] { 1.2, 3.4, -5.6 }));\n        assertThrows(IllegalArgumentException.class, () -> forest.update(new float[3]));\n        assertThrows(IllegalArgumentException.class, () -> forest.update(new float[3], 0l));\n    }\n\n    @Test\n    public void testTraverseForestBinaryAccumulator() {\n        float[] point = { 2.2f, -1.1f };\n        BinaryOperator<Double> accumulator = Double::sum;\n        Function<Double, Double> finisher = x -> x / numberOfTrees;\n\n        components.forEach(c -> doReturn(0.0).when(c).traverse(aryEq(point), any(VisitorFactory.class)));\n\n        forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher);\n        verify(traversalExecutor, times(1)).traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator,\n                finisher);\n    }\n\n    @Test\n    public void testTraverseForestBinaryAccumulatorInvalid() {\n        float[] point = { 2.2f, -1.1f };\n        BinaryOperator<Double> accumulator = Double::sum;\n        Function<Double, Double> finisher = x -> x / numberOfTrees;\n\n        components.forEach(c -> when(c.traverse(aryEq(point), any())).thenReturn(0.0));\n\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(null, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(IllegalArgumentException.class, () -> forest.traverseForest(new float[] { 2.2f, -1.1f, 3.3f },\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForest(point, null, accumulator, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForest(point,\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, (BinaryOperator<Double>) null, finisher));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, null));\n    }\n\n    @Test\n    public void testTraverseForestCollector() {\n        float[] point = { 2.2f, -1.1f };\n\n        components.forEach(c -> doReturn(0.0).when(c).traverse(aryEq(point), any(VisitorFactory.class)));\n\n        forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, TestUtils.SORTED_LIST_COLLECTOR);\n        verify(traversalExecutor, times(1)).traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY,\n                TestUtils.SORTED_LIST_COLLECTOR);\n    }\n\n    @Test\n    public void testTraverseForestCollectorInvalid() {\n        float[] point = { 2.2f, -1.1f };\n\n        components.forEach(c -> when(c.traverse(aryEq(point), any())).thenReturn(0.0));\n\n        assertThrows(NullPointerException.class, () -> forest.traverseForest(null,\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(IllegalArgumentException.class, () -> forest.traverseForest(new float[] { 2.2f, -1.1f, 3.3f },\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(point, null, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, null));\n    }\n\n    @Test\n    public void testTraverseForestConverging() {\n        float[] point = new float[] { 1.2f, -3.4f };\n\n        int convergenceThreshold = numberOfTrees / 2;\n        ConvergingAccumulator<Double> accumulator = TestUtils.convergeAfter(convergenceThreshold);\n\n        Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();\n\n        components.forEach(c -> doReturn(0.0).when(c).traverse(aryEq(point), any(VisitorFactory.class)));\n\n        forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher);\n        verify(traversalExecutor, times(1)).traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator,\n                finisher);\n    }\n\n    @Test\n    public void testTraverseForestConvergingInvalid() {\n        float[] point = new float[] { 1.2f, -3.4f };\n\n        int convergenceThreshold = numberOfTrees / 2;\n        ConvergingAccumulator<Double> accumulator = TestUtils.convergeAfter(convergenceThreshold);\n\n        Function<Double, Double> finisher = x -> x / accumulator.getValuesAccepted();\n\n        components.forEach(c -> when(c.traverse(aryEq(point), any())).thenReturn(0.0));\n\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(null, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(IllegalArgumentException.class, () -> forest.traverseForest(new float[] { 1.2f, -3.4f, 5.6f },\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForest(point, null, accumulator, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForest(point,\n                TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, (ConvergingAccumulator<Double>) null, finisher));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator, null));\n    }\n\n    @Test\n    public void traverseForestMultiBinaryAccumulator() {\n        float[] point = { 2.2f, -1.1f };\n        BinaryOperator<Double> accumulator = Double::sum;\n        Function<Double, Double> finisher = x -> x / numberOfTrees;\n\n        components.forEach(c -> doReturn(0.0).when(c).traverseMulti(aryEq(point), any(MultiVisitorFactory.class)));\n\n        forest.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, accumulator, finisher);\n        verify(traversalExecutor, times(1)).traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY,\n                accumulator, finisher);\n    }\n\n    @Test\n    public void testTraverseForestMultiBinaryAccumulatorInvalid() {\n        float[] point = { 2.2f, -1.1f };\n        BinaryOperator<Double> accumulator = Double::sum;\n        Function<Double, Double> finisher = x -> x / numberOfTrees;\n\n        components.forEach(c -> when(c.traverseMulti(aryEq(point), any())).thenReturn(0.0));\n\n        assertThrows(NullPointerException.class, () -> forest.traverseForestMulti(null,\n                TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(IllegalArgumentException.class, () -> forest.traverseForestMulti(new float[] { 2.2f, -1.1f, 3.3f },\n                TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, accumulator, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForestMulti(point, null, accumulator, finisher));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, null, finisher));\n        assertThrows(NullPointerException.class, () -> forest.traverseForestMulti(point,\n                TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, accumulator, null));\n    }\n\n    @Test\n    public void testTraverseForestMultiCollector() {\n        float[] point = { 2.2f, -1.1f };\n\n        components.forEach(c -> doReturn(0.0).when(c).traverseMulti(aryEq(point), any(MultiVisitorFactory.class)));\n\n        forest.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY,\n                TestUtils.SORTED_LIST_COLLECTOR);\n        verify(traversalExecutor, times(1)).traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY,\n                TestUtils.SORTED_LIST_COLLECTOR);\n    }\n\n    @Test\n    public void testTraverseForestCollectorMultiInvalid() {\n        float[] point = { 2.2f, -1.1f };\n\n        components.forEach(c -> when(c.traverse(aryEq(point), any())).thenReturn(0.0));\n\n        assertThrows(NullPointerException.class, () -> forest.traverseForestMulti(null,\n                TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(IllegalArgumentException.class, () -> forest.traverseForestMulti(new float[] { 2.2f, -1.1f, 3.3f },\n                TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForestMulti(point, null, TestUtils.SORTED_LIST_COLLECTOR));\n        assertThrows(NullPointerException.class,\n                () -> forest.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, null));\n    }\n\n    @Test\n    public void testGetAnomalyScore() {\n        float[] point = { 1.2f, -3.4f };\n\n        assertFalse(forest.isOutputReady());\n        assertEquals(0.0, forest.getAnomalyScore(point));\n\n        doReturn(true).when(forest).isOutputReady();\n        double expectedResult = 0.0;\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            SamplerPlusTree<Integer, float[]> component = (SamplerPlusTree<Integer, float[]>) components.get(i);\n            ITree<Integer, float[]> tree = component.getTree();\n            double treeResult = Math.random();\n            when(tree.traverse(aryEq(point), any(IVisitorFactory.class))).thenReturn(treeResult);\n\n            when(tree.getMass()).thenReturn(256);\n\n            expectedResult += treeResult;\n        }\n\n        expectedResult /= numberOfTrees;\n        assertEquals(expectedResult, forest.getAnomalyScore(point), EPSILON);\n    }\n\n    @Test\n    public void testGetApproximateAnomalyScore() {\n        float[] point = { 1.2f, -3.4f };\n\n        assertFalse(forest.isOutputReady());\n        assertEquals(0.0, forest.getApproximateAnomalyScore(point));\n\n        doReturn(true).when(forest).isOutputReady();\n\n        ConvergingAccumulator<Double> accumulator = new OneSidedConvergingDoubleAccumulator(\n                RandomCutForest.DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL,\n                RandomCutForest.DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,\n                RandomCutForest.DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            SamplerPlusTree<Integer, float[]> component = (SamplerPlusTree<Integer, float[]>) components.get(i);\n            ITree<Integer, float[]> tree = component.getTree();\n            double treeResult = Math.random();\n            when(tree.traverse(aryEq(point), any(IVisitorFactory.class))).thenReturn(treeResult);\n\n            when(tree.getMass()).thenReturn(256);\n\n            if (!accumulator.isConverged()) {\n                accumulator.accept(treeResult);\n            }\n        }\n\n        double expectedResult = accumulator.getAccumulatedValue() / accumulator.getValuesAccepted();\n        assertEquals(expectedResult, forest.getApproximateAnomalyScore(point), EPSILON);\n    }\n\n    @Test\n    public void testGetAnomalyAttribution() {\n        float[] point = { 1.2f, -3.4f };\n\n        assertFalse(forest.isOutputReady());\n        DiVector zero = new DiVector(dimensions);\n        DiVector result = forest.getAnomalyAttribution(point);\n        assertArrayEquals(zero.high, result.high);\n        assertArrayEquals(zero.low, result.low);\n\n        doReturn(true).when(forest).isOutputReady();\n        DiVector expectedResult = new DiVector(dimensions);\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            DiVector treeResult = new DiVector(dimensions);\n            for (int j = 0; j < dimensions; j++) {\n                treeResult.high[j] = Math.random();\n                treeResult.low[j] = Math.random();\n            }\n\n            SamplerPlusTree<Integer, float[]> component = (SamplerPlusTree<Integer, float[]>) components.get(i);\n            ITree<Integer, float[]> tree = component.getTree();\n            when(tree.traverse(aryEq(point), any(VisitorFactory.class))).thenReturn(treeResult);\n\n            when(tree.getMass()).thenReturn(256);\n\n            DiVector.addToLeft(expectedResult, treeResult);\n        }\n\n        expectedResult = expectedResult.scale(1.0 / numberOfTrees);\n        result = forest.getAnomalyAttribution(point);\n        assertArrayEquals(expectedResult.high, result.high, EPSILON);\n        assertArrayEquals(expectedResult.low, result.low, EPSILON);\n    }\n\n    @Test\n    public void testGetApproximateAnomalyAttribution() {\n        float[] point = { 1.2f, -3.4f };\n        DiVector zero = new DiVector(dimensions);\n        DiVector result = forest.getApproximateAnomalyAttribution(point);\n\n        assertFalse(forest.isOutputReady());\n        assertArrayEquals(zero.high, result.high, EPSILON);\n        assertArrayEquals(zero.low, result.low, EPSILON);\n\n        doReturn(true).when(forest).isOutputReady();\n\n        ConvergingAccumulator<DiVector> accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions,\n                RandomCutForest.DEFAULT_APPROXIMATE_ANOMALY_SCORE_HIGH_IS_CRITICAL,\n                RandomCutForest.DEFAULT_APPROXIMATE_DYNAMIC_SCORE_PRECISION,\n                RandomCutForest.DEFAULT_APPROXIMATE_DYNAMIC_SCORE_MIN_VALUES_ACCEPTED, numberOfTrees);\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            SamplerPlusTree<Integer, float[]> component = (SamplerPlusTree<Integer, float[]>) components.get(i);\n            ITree<Integer, float[]> tree = component.getTree();\n            DiVector treeResult = new DiVector(dimensions);\n\n            for (int j = 0; j < dimensions; j++) {\n                treeResult.high[j] = Math.random();\n                treeResult.low[j] = Math.random();\n            }\n\n            when(tree.traverse(aryEq(point), any(VisitorFactory.class))).thenReturn(treeResult);\n\n            when(tree.getMass()).thenReturn(256);\n\n            if (!accumulator.isConverged()) {\n                accumulator.accept(treeResult);\n            }\n        }\n\n        DiVector expectedResult = accumulator.getAccumulatedValue().scale(1.0 / accumulator.getValuesAccepted());\n        result = forest.getApproximateAnomalyAttribution(point);\n        assertArrayEquals(expectedResult.high, result.high, EPSILON);\n        assertArrayEquals(expectedResult.low, result.low, EPSILON);\n    }\n\n    @Test\n    public void testGetSimpleDensity() {\n        float[] point = { 12.3f, -45.6f };\n        DensityOutput zero = new DensityOutput(dimensions, sampleSize);\n        assertFalse(forest.isOutputReady());\n        DensityOutput result = forest.getSimpleDensity(point);\n        assertEquals(zero.getDensity(), result.getDensity(), EPSILON);\n\n        doReturn(true).when(forest).isOutputReady();\n        List<InterpolationMeasure> intermediateResults = new ArrayList<>();\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            InterpolationMeasure treeResult = new InterpolationMeasure(dimensions, sampleSize);\n            for (int j = 0; j < dimensions; j++) {\n                treeResult.measure.high[j] = Math.random();\n                treeResult.measure.low[j] = Math.random();\n                treeResult.distances.high[j] = Math.random();\n                treeResult.distances.low[j] = Math.random();\n                treeResult.probMass.high[j] = Math.random();\n                treeResult.probMass.low[j] = Math.random();\n            }\n\n            SamplerPlusTree<Integer, float[]> component = (SamplerPlusTree<Integer, float[]>) components.get(i);\n            ITree<Integer, float[]> tree = component.getTree();\n            when(tree.traverse(aryEq(point), any(VisitorFactory.class))).thenReturn(treeResult);\n            intermediateResults.add(treeResult);\n        }\n\n        Collector<InterpolationMeasure, ?, InterpolationMeasure> collector = InterpolationMeasure.collector(dimensions,\n                0, numberOfTrees);\n        DensityOutput expectedResult = new DensityOutput(intermediateResults.stream().collect(collector));\n        result = forest.getSimpleDensity(point);\n        assertEquals(expectedResult.getDensity(), result.getDensity(), EPSILON);\n    }\n\n    @Test\n    public void testImputeMissingValuesInvalid() {\n        float[] point = { 12.3f, -45.6f };\n        int numberOfMissingValues = 1;\n        int[] missingIndexes = { 0, 1 };\n\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.imputeMissingValues((float[]) null, numberOfMissingValues, missingIndexes));\n\n    }\n\n    @Test\n    public void testImputeMissingValuesWithNoMissingValues() {\n        float[] point = { 12.3f, -45.6f };\n        int[] missingIndexes = { 1, 1000 }; // second value doesn't matter since numberOfMissingValues is 1o\n\n        double[] result = forest.imputeMissingValues(toDoubleArray(point), 0, missingIndexes);\n        assertArrayEquals(new double[] { 0.0, 0.0 }, result);\n    }\n\n    @Test\n    public void testImputeMissingValuesWithOutputNotReady() {\n        double[] point = { 12.3, -45.6 };\n        int numberOfMissingValues = 1;\n        int[] missingIndexes = { 1, 1000 }; // second value doesn't matter since numberOfMissingValues is 1o\n\n        assertFalse(forest.isOutputReady());\n        double[] zero = new double[dimensions];\n        assertArrayEquals(zero, forest.imputeMissingValues(point, numberOfMissingValues, missingIndexes));\n    }\n\n    @Test\n    public void testExtrapolateBasic() {\n        doNothing().when(forest).extrapolateBasicCyclic(any(RangeVector.class), anyInt(), anyInt(), anyInt(),\n                any(float[].class), any(int[].class), anyDouble());\n        doNothing().when(forest).extrapolateBasicSliding(any(RangeVector.class), anyInt(), anyInt(), any(float[].class),\n                any(int[].class), anyDouble());\n\n        double[] point = new double[] { 2.0, -3.0 };\n        int horizon = 2;\n        int blockSize = 1;\n        boolean cyclic = true;\n        int shingleIndex = 1;\n\n        forest.extrapolateBasic(point, horizon, blockSize, cyclic, shingleIndex);\n        verify(forest).extrapolateBasicCyclic(any(RangeVector.class), eq(horizon), eq(blockSize), eq(shingleIndex),\n                any(float[].class), any(int[].class), anyDouble());\n\n        forest.extrapolateBasic(point, horizon, blockSize, cyclic);\n        verify(forest).extrapolateBasicCyclic(any(RangeVector.class), eq(horizon), eq(blockSize), eq(0),\n                any(float[].class), any(int[].class), anyDouble());\n\n        cyclic = false;\n        forest.extrapolateBasic(point, horizon, blockSize, cyclic, shingleIndex);\n        forest.extrapolateBasic(point, horizon, blockSize, cyclic);\n        verify(forest, times(2)).extrapolateBasicSliding(any(RangeVector.class), eq(horizon), eq(blockSize),\n                any(float[].class), any(int[].class), anyDouble());\n    }\n\n    @Test\n    public void testExtrapolateBasicInvalid() {\n        double[] point = new double[] { 2.0, -3.0 };\n        int horizon = 2;\n        int blockSize = 1;\n        boolean cyclic = true;\n        int shingleIndex = 1;\n\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.extrapolateBasic(point, horizon, -10, cyclic, shingleIndex));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.extrapolateBasic(point, horizon, 0, cyclic, shingleIndex));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.extrapolateBasic(point, horizon, dimensions, cyclic, shingleIndex));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.extrapolateBasic(point, horizon, dimensions * 2, cyclic, shingleIndex));\n        assertThrows(NullPointerException.class,\n                () -> forest.extrapolateBasic((double[]) null, horizon, blockSize, cyclic, shingleIndex));\n\n        RandomCutForest f = RandomCutForest.defaultForest(20);\n        double[] p = new double[20];\n\n        // dimensions not divisible by blockSize\n        assertThrows(IllegalArgumentException.class, () -> f.extrapolateBasic(p, horizon, 7, cyclic, shingleIndex));\n\n        // invalid shingle index values\n        assertThrows(IllegalArgumentException.class, () -> f.extrapolateBasic(point, horizon, 5, cyclic, -1));\n        assertThrows(IllegalArgumentException.class, () -> f.extrapolateBasic(point, horizon, 5, cyclic, 4));\n        assertThrows(IllegalArgumentException.class, () -> f.extrapolateBasic(point, horizon, 4, cyclic, 44));\n    }\n\n    @Test\n    public void testExtrapolateBasicWithShingleBuilder() {\n        doNothing().when(forest).extrapolateBasicCyclic(any(RangeVector.class), anyInt(), anyInt(), anyInt(),\n                any(float[].class), any(int[].class), anyDouble());\n        doNothing().when(forest).extrapolateBasicSliding(any(RangeVector.class), anyInt(), anyInt(), any(float[].class),\n                any(int[].class), anyDouble());\n\n        ShingleBuilder shingleBuilder = new ShingleBuilder(1, 2, true);\n        int horizon = 3;\n\n        forest.extrapolateBasic(shingleBuilder, horizon);\n        verify(forest, times(1)).extrapolateBasicCyclic(any(RangeVector.class), eq(horizon), eq(1), eq(0),\n                any(float[].class), any(int[].class), anyDouble());\n\n        shingleBuilder = new ShingleBuilder(1, 2, false);\n        forest.extrapolateBasic(shingleBuilder, horizon);\n        verify(forest, times(1)).extrapolateBasicSliding(any(RangeVector.class), eq(horizon), eq(1), any(float[].class),\n                any(int[].class), anyDouble());\n    }\n\n    @Test\n    public void testExtrapolateBasicSliding() {\n        int horizon = 3;\n        int blockSize = 2;\n        RangeVector result = new RangeVector(dimensions * horizon);\n        float[] queryPoint = new float[] { 1.0f, -2.0f };\n        int[] missingIndexes = new int[blockSize];\n\n        doReturn(new SampleSummary(new float[] { 2.0f, -3.0f }))\n                .doReturn(new SampleSummary(new float[] { 4.0f, -5.0f }))\n                .doReturn(new SampleSummary(new float[] { 6.0f, -7.0f })).when(forest)\n                .getConditionalFieldSummary(aryEq(queryPoint), any(int[].class), anyInt(), anyDouble(),\n                        any(Boolean.class), any(Boolean.class), anyDouble(), anyInt());\n\n        forest.extrapolateBasicSliding(result, horizon, blockSize, queryPoint, missingIndexes, 1.0);\n\n        float[] expectedResult = new float[] { 2.0f, -3.0f, 4.0f, -5.0f, 6.0f, -7.0f };\n        assertArrayEquals(expectedResult, result.values);\n        // test properties of RangeVector as well\n        for (int i = 0; i < 6; i++) {\n            assert (result.upper[i] >= result.values[i]);\n            assert (result.lower[i] <= result.values[i]);\n        }\n        // validate subsequent operations (typically used in parkservices)\n        expectedResult[0] = 0f;\n        RangeVector newVector = new RangeVector(expectedResult);\n        RangeVector another = new RangeVector(result);\n        another.shift(0, -2.0f);\n        another.scale(2, 0.25f);\n        newVector.scale(2, 0.25f);\n        assertArrayEquals(newVector.values, another.values, 1e-6f);\n        for (int i = 0; i < 6; i++) {\n            assert (another.upper[i] >= another.values[i]);\n            assert (another.lower[i] <= another.values[i]);\n        }\n    }\n\n    @Test\n    public void testExtrapolateBasicCyclic() {\n        int horizon = 3;\n        int blockSize = 2;\n        RangeVector result = new RangeVector(dimensions * horizon);\n        int shingleIndex = 1;\n        float[] queryPoint = new float[] { 1.0f, -2.0f };\n        int[] missingIndexes = new int[blockSize];\n\n        doReturn(new SampleSummary(new float[] { 2.0f, -3.0f }))\n                .doReturn(new SampleSummary(new float[] { 4.0f, -5.0f }))\n                .doReturn(new SampleSummary(new float[] { 6.0f, -7.0f })).when(forest)\n                .getConditionalFieldSummary(aryEq(queryPoint), any(int[].class), anyInt(), anyDouble(),\n                        any(Boolean.class), any(Boolean.class), anyDouble(), anyInt());\n\n        forest.extrapolateBasicCyclic(result, horizon, blockSize, shingleIndex, queryPoint, missingIndexes, 1.0);\n\n        float[] expectedResult = new float[] { -3.0f, 2.0f, -5.0f, 4.0f, -7.0f, 6.0f };\n        assertArrayEquals(expectedResult, result.values);\n        // test properties of RangeVector as well\n        for (int i = 0; i < 6; i++) {\n            assert (result.upper[i] >= result.values[i]);\n            assert (result.lower[i] <= result.values[i]);\n        }\n    }\n\n    @Test\n    public void testGetNearNeighborInSample() {\n        List<Long> indexes1 = new ArrayList<>();\n        indexes1.add(1L);\n        indexes1.add(3L);\n\n        List<Long> indexes2 = new ArrayList<>();\n        indexes2.add(2L);\n        indexes2.add(4L);\n\n        List<Long> indexes4 = new ArrayList<>();\n        indexes4.add(1L);\n        indexes4.add(3L);\n\n        List<Long> indexes5 = new ArrayList<>();\n        indexes5.add(2L);\n        indexes5.add(4L);\n\n        Neighbor neighbor1 = new Neighbor(new float[] { 1, 2 }, 5, indexes1);\n        when(((SamplerPlusTree<?, ?>) components.get(0)).getTree().traverse(any(float[].class),\n                any(IVisitorFactory.class))).thenReturn(Optional.of(neighbor1));\n\n        Neighbor neighbor2 = new Neighbor(new float[] { 1, 2 }, 5, indexes2);\n        when(((SamplerPlusTree<?, ?>) components.get(1)).getTree().traverse(any(float[].class),\n                any(IVisitorFactory.class))).thenReturn(Optional.of(neighbor2));\n\n        when(((SamplerPlusTree<?, ?>) components.get(2)).getTree().traverse(any(float[].class),\n                any(IVisitorFactory.class))).thenReturn(Optional.empty());\n\n        Neighbor neighbor4 = new Neighbor(new float[] { 2, 3 }, 4, indexes4);\n        when(((SamplerPlusTree<?, ?>) components.get(3)).getTree().traverse(any(float[].class),\n                any(IVisitorFactory.class))).thenReturn(Optional.of(neighbor4));\n\n        Neighbor neighbor5 = new Neighbor(new float[] { 2, 3 }, 4, indexes5);\n        when(((SamplerPlusTree<?, ?>) components.get(4)).getTree().traverse(any(float[].class),\n                any(IVisitorFactory.class))).thenReturn(Optional.of(neighbor5));\n\n        for (int i = 5; i < components.size(); i++) {\n            when(((SamplerPlusTree<?, ?>) components.get(i)).getTree().traverse(any(float[].class),\n                    any(IVisitorFactory.class))).thenReturn(Optional.empty());\n        }\n\n        Whitebox.setInternalState(forest, \"storeSequenceIndexesEnabled\", true);\n\n        doReturn(true).when(forest).isOutputReady();\n        List<Neighbor> neighbors = forest.getNearNeighborsInSample(new double[] { 0, 0 }, 5);\n\n        List<Long> expectedIndexes = Arrays.asList(1L, 2L, 3L, 4L);\n        assertEquals(2, neighbors.size());\n        assertTrue(neighbors.get(0).point[0] == 2 && neighbors.get(0).point[1] == 3);\n        assertEquals(4, neighbors.get(0).distance);\n        assertEquals(4, neighbors.get(0).sequenceIndexes.size());\n        assertThat(neighbors.get(0).sequenceIndexes, is(expectedIndexes));\n\n        assertTrue(neighbors.get(1).point[0] == 1 && neighbors.get(1).point[1] == 2);\n        assertEquals(5, neighbors.get(1).distance);\n        assertEquals(4, neighbors.get(1).sequenceIndexes.size());\n        assertThat(neighbors.get(1).sequenceIndexes, is(expectedIndexes));\n    }\n\n    @Test\n    public void testGetNearNeighborsInSampleBeforeOutputReady() {\n        assertFalse(forest.isOutputReady());\n        assertTrue(forest.getNearNeighborsInSample(new double[] { 0.1, 0.2 }, 5.0).isEmpty());\n    }\n\n    @Test\n    public void testGetNearNeighborsInSampleNoDistanceThreshold() {\n        forest.getNearNeighborsInSample(new double[] { 0.1, 0.2 });\n        verify(forest, times(1)).getNearNeighborsInSample(aryEq(new float[] { 0.1f, 0.2f }),\n                eq(Double.POSITIVE_INFINITY));\n    }\n\n    @Test\n    public void testGetNearNeighborsInSampleInvalid() {\n        assertThrows(NullPointerException.class, () -> forest.getNearNeighborsInSample((double[]) null, 101.1));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.getNearNeighborsInSample(new double[] { 1.1, 2.2 }, -101.1));\n        assertThrows(IllegalArgumentException.class,\n                () -> forest.getNearNeighborsInSample(new double[] { 1.1, 2.2 }, 0.0));\n    }\n\n    @Test\n    public void testUpdateOnSmallBoundingBox() {\n        // verifies on small bounding boxes random cuts and tree updates are functional\n        RandomCutForest.Builder forestBuilder = RandomCutForest.builder().dimensions(1).numberOfTrees(1).sampleSize(3)\n                .timeDecay(0.5).randomSeed(0).parallelExecutionEnabled(false);\n\n        RandomCutForest forest = forestBuilder.build();\n        double[][] data = new double[][] { { 48.08 }, { 48.08000000000001 } };\n\n        for (int i = 0; i < 20000; i++) {\n            forest.update(data[i % data.length]);\n        }\n    }\n\n    @Test\n    public void testSamplersFull() {\n        long totalUpdates = sampleSize / 2;\n        when(updateCoordinator.getTotalUpdates()).thenReturn(totalUpdates);\n        assertFalse(forest.samplersFull());\n\n        totalUpdates = sampleSize;\n        when(updateCoordinator.getTotalUpdates()).thenReturn(totalUpdates);\n        assertTrue(forest.samplersFull());\n\n        totalUpdates = sampleSize * 10;\n        when(updateCoordinator.getTotalUpdates()).thenReturn(totalUpdates);\n        assertTrue(forest.samplersFull());\n    }\n\n    @Test\n    public void testGetTotalUpdates() {\n        long totalUpdates = 987654321L;\n        when(updateCoordinator.getTotalUpdates()).thenReturn(totalUpdates);\n        assertEquals(totalUpdates, forest.getTotalUpdates());\n    }\n\n    @Test\n    public void testIsOutputReady() {\n        assertFalse(forest.isOutputReady());\n\n        for (int i = 0; i < numberOfTrees / 2; i++) {\n            doReturn(true).when(components.get(i)).isOutputReady();\n        }\n        assertFalse(forest.isOutputReady());\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            doReturn(true).when(components.get(i)).isOutputReady();\n        }\n        assertFalse(forest.isOutputReady());\n        when(updateCoordinator.getTotalUpdates()).thenReturn((long) sampleSize);\n        assertTrue(forest.isOutputReady());\n\n        // After forest.isOutputReady() returns true once, the result should be cached\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            IComponentModel<?, ?> component = components.get(i);\n            reset(component);\n            doReturn(true).when(component).isOutputReady();\n        }\n        assertTrue(forest.isOutputReady());\n        for (int i = 0; i < numberOfTrees; i++) {\n            IComponentModel<?, ?> component = components.get(i);\n            verify(component, never()).isOutputReady();\n        }\n    }\n\n    @Test\n    public void testUpdateAfterRoundTrip() {\n        int dimensions = 10;\n        for (int trials = 0; trials < 10; trials++) {\n            RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).sampleSize(64).build();\n\n            Random r = new Random();\n            for (int i = 0; i < new Random(trials).nextInt(3000); i++) {\n                forest.update(r.ints(dimensions, 0, 50).asDoubleStream().toArray());\n            }\n\n            // serialize + deserialize\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(true);\n            RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));\n\n            // update re-instantiated forest\n            for (int i = 0; i < 10000; i++) {\n                double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n\n                double score = forest.getAnomalyScore(point);\n                assertEquals(score, forest2.getAnomalyScore(point), 1e-5);\n                forest2.update(point);\n                forest.update(point);\n            }\n        }\n    }\n\n    @Test\n    public void testUpdateAfterRoundTripWithPause() {\n        int dimensions = 10;\n        int shingleSize = 5;\n        for (int trials = 0; trials < 10; trials++) {\n            RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).sampleSize(64)\n                    .shingleSize(shingleSize).internalShinglingEnabled(true).build();\n            RandomCutForest reference = RandomCutForest.builder().dimensions(dimensions).sampleSize(64)\n                    .shingleSize(shingleSize).internalShinglingEnabled(true).build();\n\n            Random r = new Random();\n            for (int i = 0; i < new Random(trials).nextInt(3000); i++) {\n                double[] vec = r.ints(dimensions / shingleSize, 0, 50).asDoubleStream().toArray();\n                forest.update(vec);\n                reference.update(vec);\n            }\n            assertTrue(forest.isCurrentlySampling());\n            forest.pauseSampling();\n            assertFalse(forest.isCurrentlySampling());\n\n            // serialize + deserialize\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(true);\n            RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));\n            assertFalse(forest2.isCurrentlySampling());\n\n            // update re-instantiated forest\n            for (int i = 0; i < 1000; i++) {\n                double[] point = r.ints(dimensions / shingleSize, 0, 50).asDoubleStream().toArray();\n\n                if (i % 100 == 0) {\n                    if (forest2.isCurrentlySampling()) {\n                        forest.pauseSampling();\n                        forest.resumeSampling();\n                    } else {\n                        forest.resumeSampling();\n                        forest2.resumeSampling();\n                    }\n                }\n                double score = forest.getAnomalyScore(point);\n                assertEquals(score, forest2.getAnomalyScore(point), 1e-5);\n                forest2.update(point);\n                forest.update(point);\n                reference.update(point);\n            }\n            assertArrayEquals(reference.transformToShingledPoint(new float[dimensions / shingleSize]),\n                    forest.transformToShingledPoint(new float[dimensions / shingleSize]), 1e-10f);\n        }\n    }\n\n    @Test\n    public void testUpdateAfterRoundTripMediumNodeStore() {\n        int dimensions = 5;\n        for (int trials = 0; trials < 10; trials++) {\n            RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).numberOfTrees(1).sampleSize(20000)\n                    .build();\n\n            Random r = new Random();\n            for (int i = 0; i < 30000 + new Random().nextInt(300); i++) {\n                forest.update(r.ints(dimensions, 0, 50).asDoubleStream().toArray());\n            }\n\n            // serialize + deserialize\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveTreeStateEnabled(true);\n            mapper.setSaveExecutorContextEnabled(true);\n            RandomCutForestState state = mapper.toState(forest);\n            RandomCutForest forest2 = mapper.toModel(state);\n\n            // update re-instantiated forest\n            for (int i = 0; i < 10000; i++) {\n                double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n                double score = forest.getAnomalyScore(point);\n                assertEquals(score, forest2.getAnomalyScore(point), 1E-10);\n                forest2.update(point);\n                forest.update(point);\n            }\n            List<ConditionalTreeSample> first = forest.getConditionalField(new float[dimensions], new int[1], 1.0);\n            List<ConditionalTreeSample> second = forest2.getConditionalField(new float[dimensions], new int[1], 1.0);\n            assertEquals(first.size(), second.size());\n            for (int i = 0; i < first.size(); i++) {\n                assertEquals(first.get(i).pointStoreIndex, second.get(i).pointStoreIndex);\n            }\n        }\n    }\n\n    @Test\n    public void testUpdateAfterRoundTripLargeNodeStore() {\n        int dimensions = 5;\n        for (int trials = 0; trials < 1; trials++) {\n            long seed = new Random().nextLong();\n            System.out.println(\" this seed \" + seed);\n            RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).numberOfTrees(1)\n                    .sampleSize(200000).centerOfMassEnabled(true).randomSeed(seed).build();\n\n            Random r = new Random(seed);\n            for (int i = 0; i < 300000 + new Random().nextInt(300); i++) {\n                forest.update(r.ints(dimensions, 0, 50).asDoubleStream().toArray());\n            }\n\n            // serialize + deserialize\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveTreeStateEnabled(true);\n            mapper.setSaveExecutorContextEnabled(true);\n            RandomCutForestState state = mapper.toState(forest);\n            RandomCutForest forest2 = mapper.toModel(state);\n            assert (forest2.isCenterOfMassEnabled());\n            // update re-instantiated forest\n            for (int i = 0; i < 10000; i++) {\n                double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n                double score = forest.getAnomalyScore(point);\n                assertEquals(score, forest2.getAnomalyScore(point), 1E-10);\n                forest2.update(point);\n                forest.update(point);\n            }\n            List<ICluster<float[]>> first = forest.summarize(10, 1, 1, null);\n            System.out.println(\"DONE 1\");\n            List<ICluster<float[]>> second = forest2.summarize(10, 1, 1, null);\n            assert (abs(first.size() - second.size()) < 1);\n        }\n    }\n\n    @Test\n    public void testInternalShinglingRotated() {\n        RandomCutForest forest = new RandomCutForest.Builder<>().internalShinglingEnabled(true)\n                .internalRotationEnabled(true).shingleSize(2).dimensions(4).numberOfTrees(1).build();\n        assertThrows(IllegalArgumentException.class, () -> forest.update(new double[] { 0 }));\n        forest.update(new double[] { 0.0, -0.0 });\n        assertArrayEquals(forest.lastShingledPoint(), new float[] { 0, 0, 0, 0 });\n        forest.update(new double[] { 1.0, -1.0 });\n        assertArrayEquals(forest.transformIndices(new int[] { 0, 1 }, 2), new int[] { 0, 1 });\n        forest.update(new double[] { 2.0, -2.0 });\n        assertEquals(forest.nextSequenceIndex(), 3);\n        assertArrayEquals(forest.lastShingledPoint(), new float[] { 2, -2, 1, -1 });\n        assertArrayEquals(forest.transformToShingledPoint(new float[] { 7, 8 }), new float[] { 2, -2, 7, 8 });\n        assertArrayEquals(forest.transformIndices(new int[] { 0, 1 }, 2), new int[] { 2, 3 });\n        assertThrows(IllegalArgumentException.class, () -> forest.update(new double[] { 0, 0, 0, 0 }));\n    }\n\n    @Test\n    public void testComponents() {\n        RandomCutForest forest = new RandomCutForest.Builder<>().dimensions(2).sampleSize(10).numberOfTrees(2).build();\n\n        for (IComponentModel model : forest.getComponents()) {\n            assertEquals(model.getConfig(Config.BOUNDING_BOX_CACHE_FRACTION), 1.0);\n            model.getConfig(Config.TIME_DECAY);\n            assertEquals(model.getConfig(Config.TIME_DECAY), 1.0 / 100);\n            assertThrows(IllegalArgumentException.class, () -> model.getConfig(\"foo\"));\n            assertThrows(IllegalArgumentException.class, () -> model.setConfig(\"bar\", 0));\n        }\n    }\n\n    @Test\n    public void testOutOfOrderUpdate() {\n        RandomCutForest forest = new RandomCutForest.Builder<>().dimensions(2).sampleSize(10).numberOfTrees(2).build();\n        forest.setTimeDecay(100); // will act almost like a sliding window buffer\n        forest.setBoundingBoxCacheFraction(0.2);\n        forest.update(new double[] { 20.0, -20.0 }, 20);\n        forest.update(new double[] { 0.0, -0.0 }, 0);\n        assertEquals(forest.getNearNeighborsInSample(new double[] { 0.0, -0.0 }, 1).size(), 1);\n        for (int i = 1; i < 19; i++) {\n            forest.update(new double[] { i, -i }, i);\n        }\n        // the {0,0} point should be flushed out\n        assertEquals(forest.getNearNeighborsInSample(new double[] { 0.0, -0.0 }, 1).size(), 0);\n        // the {20,-20} point is present still\n        assertEquals(forest.getNearNeighborsInSample(new double[] { 20.0, -20.0 }, 1).size(), 1);\n    }\n\n    @Test\n    public void testFloatingPointRandomCut() {\n        int dimensions = 16;\n        int numberOfTrees = 41;\n        int sampleSize = 64;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        int dataSize = 4000 * sampleSize;\n        double[][] big = generateShingledData(dataSize, dimensions, 2);\n        RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).randomSeed(seed).boundingBoxCacheFraction(1.0).build();\n\n        int num = 0;\n        for (double[] point : big) {\n            forest.update(point);\n        }\n    }\n\n    public static double[][] generateShingledData(int size, int dimensions, long seed) {\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[dimensions];\n        int count = 0;\n        double[] data = getDataD(size + dimensions - 1, 100, 5, seed);\n        for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % dimensions;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                // System.out.println(\"Adding \" + j);\n                answer[count++] = getShinglePoint(history, entryIndex, dimensions);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {\n        double[] shingledPoint = new double[shingleLength];\n        int i = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            shingledPoint[i++] = point;\n\n        }\n        return shingledPoint;\n    }\n\n    static double[] getDataD(int num, double amplitude, double noise, long seed) {\n\n        double[] data = new double[num];\n        Random noiseprg = new Random(seed);\n        for (int i = 0; i < num; i++) {\n            data[i] = amplitude * Math.cos(2 * PI * (i + 50) / 1000) + noise * noiseprg.nextDouble();\n        }\n\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/SampleSummaryTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\nimport java.util.function.Function;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.MethodSource;\n\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.summarization.Center;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.MultiCenter;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\n@Tag(\"functional\")\npublic class SampleSummaryTest {\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n\n    private static int dataSize;\n\n    @Test\n    public void configAndAbsorbTest() {\n\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        dataSize = 2000;\n        Summarizer summarizer = new Summarizer();\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        ArrayList<Weighted<Integer>> refs = new ArrayList<>();\n\n        int count = 0;\n        for (float[] point : points) {\n            // testing 0 weight\n            weighted.add(new Weighted<>(point, 0.0f));\n            refs.add(new Weighted<Integer>(count, 0.0f));\n            ++count;\n        }\n\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 500, 10 * newDimensions,\n                false, Summarizer::L2distance, random.nextInt(), false));\n        BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b, 0.8,\n                3);\n        Function<Integer, float[]> getPoint = (i) -> weighted.get(i).index;\n\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 500, 10 * newDimensions, 1,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> Summarizer.summarize(weighted, 50, 10, false, Summarizer::L2distance, random.nextInt(), false));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 50, 10, 1, false, 0.1,\n                Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 50, 10 * newDimensions, 0,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 50, 10 * newDimensions, 100,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 0,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 7,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1, Collections.emptyList(), getPoint,\n                        Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, false,\n                Summarizer::L2distance, random.nextInt(), false));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, 1,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        Weighted<float[]> a = weighted.get(0);\n        a.weight = -1;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, 1,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, false,\n                Summarizer::L2distance, random.nextInt(), false));\n        a.weight = Float.NaN;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, 1,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, false,\n                Summarizer::L2distance, random.nextInt(), false));\n        a.weight = Float.POSITIVE_INFINITY;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, 1,\n                false, 0.1, Summarizer::L2distance, clusterInitializer, 0, false, null));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.summarize(weighted, 5, 10 * newDimensions, false,\n                Summarizer::L2distance, random.nextInt(), false));\n        a.weight = 1;\n        assertDoesNotThrow(() -> Summarizer.summarize(weighted, 5, 10 * newDimensions, false, Summarizer::L2distance,\n                random.nextInt(), false));\n        assertDoesNotThrow(() -> Summarizer.summarize(weighted, 5, 10 * newDimensions, 1, false, 0.1,\n                Summarizer::L2distance, clusterInitializer, 0, false, null));\n\n        refs.get(0).weight = -1;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        refs.get(0).weight = Float.POSITIVE_INFINITY;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        refs.get(0).weight = Float.NaN;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        refs.get(0).weight = 0;\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1,\n                refs, getPoint, Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n        refs.get(0).weight = 1;\n        assertDoesNotThrow(() -> Summarizer.iterativeClustering(5, 10 * newDimensions, 1, refs, getPoint,\n                Summarizer::L2distance, clusterInitializer, 0, false, false, 0.1, null));\n\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.assignAndRecompute(refs, getPoint,\n                Collections.emptyList(), Summarizer::L2distance, false));\n        List<ICluster<float[]>> list = new ArrayList<>();\n        list.add(clusterInitializer.apply(new float[newDimensions], 1f));\n        assertThrows(IllegalArgumentException.class, () -> Summarizer.assignAndRecompute(Collections.emptyList(),\n                getPoint, list, Summarizer::L2distance, false));\n        assertDoesNotThrow(() -> Summarizer.assignAndRecompute(refs, getPoint, list, Summarizer::L2distance, false));\n        assertArrayEquals(list.get(0).primaryRepresentative(Summarizer::L2distance), new float[newDimensions], 1e-6f);\n\n        float[] newPoint = new float[newDimensions];\n        Arrays.fill(newPoint, 1.01f);\n        list.get(0).absorb(clusterInitializer.apply(newPoint, 1f), Summarizer::L2distance);\n        BiFunction<float[], float[], Double> badDistance = mock();\n        when(badDistance.apply(any(), any())).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class,\n                () -> Summarizer.assignAndRecompute(refs, getPoint, list, badDistance, false));\n    }\n\n    @Test\n    public void TestMultiCenter() {\n        BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> MultiCenter.initialize(a, b, 0.8,\n                3);\n        Function<Integer, float[]> getPoint = (i) -> {\n            return new float[1];\n        };\n        ICluster<float[]> newCluster = clusterInitializer.apply(new float[1], 1f);\n        float[] newPoint = new float[] { 1 };\n        BiFunction<float[], float[], Double> badDistance = mock();\n        when(badDistance.apply(any(), any())).thenReturn(-1.0);\n        ICluster<float[]> cluster = clusterInitializer.apply(new float[1], 1.0f);\n        ICluster<float[]> another = clusterInitializer.apply(new float[1], 1.0f);\n        assertThrows(IllegalArgumentException.class, () -> cluster.absorb(another, badDistance));\n        when(badDistance.apply(any(), any())).thenReturn(-1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> cluster.distance(new float[1], badDistance));\n        assertThrows(IllegalArgumentException.class, () -> cluster.absorb(another, badDistance));\n\n        newCluster.absorb(clusterInitializer.apply(newPoint, 1f), Summarizer::L2distance);\n        when(badDistance.apply(any(), any())).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster.absorb(another, badDistance));\n\n        ICluster<float[]> newCluster2 = clusterInitializer.apply(new float[1], 1f);\n        newCluster2.absorb(clusterInitializer.apply(newPoint, 1f), Summarizer::L2distance);\n        when(badDistance.apply(any(), any())).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0)\n                .thenReturn(1.0);\n        newCluster2.absorb(clusterInitializer.apply(newPoint, 1f), badDistance);\n        when(badDistance.apply(any(), any())).thenReturn(1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster2.distance(new float[1], badDistance));\n        another.absorb(clusterInitializer.apply(newPoint, 1f), Summarizer::L2distance);\n        when(badDistance.apply(any(), any())).thenReturn(-1.0).thenReturn(1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster2.distance(another, badDistance));\n        // error at a different location\n        assertThrows(IllegalArgumentException.class, () -> newCluster2.distance(another, badDistance));\n        when(badDistance.apply(any(), any())).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0)\n                .thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0).thenReturn(1.0)\n                .thenReturn(1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster2.absorb(another, badDistance));\n\n        ICluster<float[]> newCluster3 = MultiCenter.initialize(new float[1], 0f, 0, 1);\n        assertEquals(newCluster3.recompute(getPoint, false, Summarizer::L2distance), 0);\n        assertEquals(newCluster3.recompute(getPoint, true, Summarizer::L2distance), 0);\n        newCluster3.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        assertEquals(newCluster3.recompute(getPoint, true, Summarizer::L2distance), 0);\n\n        ICluster<float[]> newCluster4 = MultiCenter.initialize(new float[1], 1f, 0, 1);\n        when(badDistance.apply(any(), any())).thenReturn(-1.0).thenReturn(-1.0);\n        newCluster4.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        assertThrows(IllegalArgumentException.class, () -> newCluster4.recompute(getPoint, true, badDistance));\n        assertThrows(IllegalArgumentException.class, () -> newCluster4.absorb(newCluster3, badDistance));\n\n    }\n\n    @Test\n    public void testCenter() {\n        int newDimensions = 1;\n        Function<Integer, float[]> getPoint = (i) -> {\n            return new float[1];\n        };\n        BiFunction<float[], float[], Double> badDistance = mock();\n        ICluster<float[]> newCluster5 = Center.initialize(new float[newDimensions], 0f);\n        assertEquals(newCluster5.extentMeasure(), newCluster5.averageRadius());\n        assertEquals(newCluster5.recompute(getPoint, true, Summarizer::L2distance), 0);\n        newCluster5.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        assertEquals(newCluster5.recompute(getPoint, true, Summarizer::L2distance), 0);\n        when(badDistance.apply(any(), any())).thenReturn(-1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster5.distance(new float[1], badDistance));\n\n        ICluster<float[]> newCluster6 = Center.initialize(new float[newDimensions], 10f);\n        newCluster6.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        newCluster6.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        when(badDistance.apply(any(), any())).thenReturn(-1.0).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster6.absorb(newCluster5, badDistance));\n        assertThrows(IllegalArgumentException.class, () -> newCluster6.recompute(getPoint, true, badDistance));\n        ICluster<float[]> multiCenter1 = MultiCenter.initialize(new float[] { 1 }, 5.0f, 0.8, 2);\n        ICluster<float[]> multiCenter2 = MultiCenter.initialize(new float[] { 2 }, 5.0f, 0.8, 2);\n        multiCenter1.absorb(multiCenter2, Summarizer::L2distance); // weight 10\n        newCluster6.absorb(multiCenter1, Summarizer::L2distance);\n        assertEquals(newCluster6.primaryRepresentative(Summarizer::L2distance)[0], 0.5, 1e-6f);\n\n        ICluster<float[]> newCluster7 = Center.initialize(new float[newDimensions], -10f);\n        newCluster7.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        newCluster7.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        when(badDistance.apply(any(), any())).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster7.recompute(getPoint, true, badDistance));\n\n        ICluster<float[]> newCluster8 = Center.initialize(new float[newDimensions], 1.9f);\n        newCluster8.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        newCluster8.getAssignedPoints().add(new Weighted<>(1, 1.0f));\n        when(badDistance.apply(any(), any())).thenReturn(-1.0);\n        assertThrows(IllegalArgumentException.class, () -> newCluster8.recompute(getPoint, true, badDistance));\n    }\n\n    @Test\n    public void zeroTest() {\n        Random random = new Random(0);\n        dataSize = 2000;\n\n        float[][] points = new float[dataSize][];\n        for (int y = 0; y < dataSize; y++) {\n            points[y] = new float[] { (float) (random.nextInt(100) + 0.5 * random.nextDouble()) };\n        }\n\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        ArrayList<Weighted<Integer>> refs = new ArrayList<>();\n        Function<Integer, float[]> getPoint = (x) -> weighted.get(x).index;\n        int count = 0;\n        for (float[] point : points) {\n            // testing 0 weight\n            weighted.add(new Weighted<>(point, 1.0f));\n            refs.add(new Weighted<Integer>(count, 1.0f));\n            ++count;\n        }\n        BiFunction<float[], Float, ICluster<float[]>> clusterInitializer = (a, b) -> Center.initialize(a, b);\n        List<ICluster<float[]>> list = new ArrayList<>();\n        for (int y = 0; y < 200; y++) {\n            list.add(clusterInitializer.apply(new float[] { -1.0f }, 1.0f));\n        }\n        assertDoesNotThrow(() -> Summarizer.iterativeClustering(100, 0, 1, refs, getPoint, Summarizer::L2distance,\n                clusterInitializer, 0, false, true, 0.1, list));\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"generateArguments\")\n    public void SummaryTest(BiFunction<float[], float[], Double> distance) {\n\n        int over = 0;\n        int under = 0;\n\n        for (int numTrials = 0; numTrials < 20; numTrials++) {\n            long seed = new Random().nextLong();\n            Random random = new Random(seed);\n            int newDimensions = random.nextInt(10) + 3;\n            dataSize = 200000;\n\n            float[][] points = getData(dataSize, newDimensions, random.nextInt(), distance);\n\n            SampleSummary summary = Summarizer.summarize(points, 5 * newDimensions, 10 * newDimensions, false, distance,\n                    random.nextInt(), false);\n            System.out.println(\"trial \" + numTrials + \" : \" + summary.summaryPoints.length + \" clusters for \"\n                    + newDimensions + \" dimensions, seed : \" + seed);\n            if (summary.summaryPoints.length < 2 * newDimensions) {\n                ++under;\n            } else if (summary.summaryPoints.length > 2 * newDimensions) {\n                ++over;\n            }\n        }\n        assert (under <= 1);\n        assert (over <= 1);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"generateArguments\")\n    public void ParallelTest(BiFunction<float[], float[], Double> distance) {\n\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), distance);\n        System.out.println(\"checking seed : \" + seed);\n        int nextSeed = random.nextInt();\n        SampleSummary summary1 = Summarizer.summarize(points, 5 * newDimensions, 10 * newDimensions, false, distance,\n                nextSeed, false);\n        SampleSummary summary2 = Summarizer.summarize(points, 5 * newDimensions, 10 * newDimensions, false, distance,\n                nextSeed, true);\n\n        ArrayList<Weighted<float[]>> pointList = new ArrayList<>();\n        for (float[] point : points) {\n            pointList.add(new Weighted<>(point, 1.0f));\n        }\n        List<ICluster<float[]>> clusters = Summarizer.singleCentroidSummarize(pointList, 5 * newDimensions,\n                10 * newDimensions, 1, true, distance, nextSeed, false, null);\n        assertEquals(summary2.weightOfSamples, summary1.weightOfSamples, \" sampling inconsistent\");\n        assertEquals(summary2.summaryPoints.length, summary1.summaryPoints.length,\n                \" incorrect length of typical points\");\n        // due to randomization, they might not equal\n        assertTrue(Math.abs(clusters.size() - summary1.summaryPoints.length) <= 1,\n                \"The difference between clusters.size() and summary1.summaryPoints.length should be at most 1\");\n        double total = clusters.stream().map(ICluster::getWeight).reduce(0.0, Double::sum);\n        assertEquals(total, summary1.weightOfSamples, 1e-3);\n        // parallelization can produce reordering of merges\n    }\n\n    @Test\n    public void SampleSummaryTestL2() {\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n        System.out.println(\"checking L2 seed : \" + seed);\n        int nextSeed = random.nextInt();\n        ArrayList<Weighted<float[]>> pointList = new ArrayList<>();\n        for (float[] point : points) {\n            pointList.add(new Weighted<>(point, 1.0f));\n        }\n        SampleSummary summary1 = Summarizer.summarize(points, 5 * newDimensions, 20 * newDimensions, false,\n                Summarizer::L2distance, nextSeed, false);\n        SampleSummary summary2 = Summarizer.l2summarize(points, 5 * newDimensions, nextSeed);\n        SampleSummary summary3 = Summarizer.l2summarize(pointList, 5 * newDimensions, 20 * newDimensions, false,\n                nextSeed);\n\n        assertEquals(summary2.weightOfSamples, summary1.weightOfSamples, \" sampling inconsistent\");\n        assertEquals(summary3.weightOfSamples, summary1.weightOfSamples, \" sampling inconsistent\");\n        assertEquals(summary2.summaryPoints.length, summary1.summaryPoints.length,\n                \" incorrect length of typical points\");\n        assertEquals(summary3.summaryPoints.length, summary1.summaryPoints.length,\n                \" incorrect length of typical points\");\n        for (int i = 0; i < summary2.summaryPoints.length; i++) {\n            assertArrayEquals(summary1.summaryPoints[i], summary2.summaryPoints[i], 1e-6f);\n            assertArrayEquals(summary1.summaryPoints[i], summary3.summaryPoints[i], 1e-6f);\n            assertEquals(summary1.relativeWeight[i], summary2.relativeWeight[i], 1e-6f);\n            assertEquals(summary1.relativeWeight[i], summary3.relativeWeight[i], 1e-6f);\n        }\n    }\n\n    @Test\n    public void IdempotenceTestL2() {\n\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n        System.out.println(\"checking idempotence L2 seed : \" + seed);\n        int nextSeed = random.nextInt();\n        ArrayList<Weighted<float[]>> pointList = new ArrayList<>();\n        for (float[] point : points) {\n            pointList.add(new Weighted<>(point, 1.0f));\n        }\n        List<ICluster<float[]>> clusters = Summarizer.singleCentroidSummarize(pointList, 5 * newDimensions,\n                20 * newDimensions, 1, true, Summarizer::L2distance, nextSeed, false, null);\n        List<ICluster<float[]>> clusters2 = Summarizer.singleCentroidSummarize(pointList, 5 * newDimensions,\n                20 * newDimensions, 1, true, Summarizer::L2distance, nextSeed, false, clusters);\n        assertEquals(clusters.size(), clusters2.size(), \" incorrect sizes\");\n        for (int i = 0; i < clusters.size(); i++) {\n            // note clusters can have same weight and get permuted\n            assertEquals(clusters.get(i).getWeight(), clusters2.get(i).getWeight());\n        }\n        clusters.sort(Comparator.comparingDouble(ICluster::extentMeasure));\n        clusters2.sort(Comparator.comparingDouble(ICluster::extentMeasure));\n        assertEquals(clusters.size(), clusters2.size(), \" incorrect sizes\");\n        for (int i = 0; i < clusters.size(); i++) {\n            // note clusters can have same weight and get permuted\n            assertEquals(clusters.get(i).extentMeasure(), clusters2.get(i).extentMeasure());\n            assertEquals(clusters.get(i).averageRadius(), clusters2.get(i).averageRadius());\n            assertEquals(clusters.get(i).averageRadius(), clusters.get(i).extentMeasure());\n        }\n    }\n\n    public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.0;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n        float[][] floatData = new float[dataSize][];\n\n        float[] allZero = new float[newDimensions];\n        float[] sigma = new float[newDimensions];\n        Arrays.fill(sigma, 1f);\n        double scale = distance.apply(allZero, sigma);\n\n        for (int i = 0; i < dataSize; i++) {\n            // shrink, shift at random\n            int nextD = prg.nextInt(newDimensions);\n            for (int j = 0; j < newDimensions; j++) {\n                data[i][j] *= 1.0 / (3.0);\n                // standard deviation adds up across dimension; taking square root\n                // and using s 3 sigma ball\n                if (j == nextD) {\n                    if (prg.nextDouble() < 0.5)\n                        data[i][j] += 2.0 * scale;\n                    else\n                        data[i][j] -= 2.0 * scale;\n                }\n            }\n            floatData[i] = toFloatArray(data[i]);\n        }\n\n        return floatData;\n    }\n\n    private static Stream<Arguments> generateArguments() {\n        return Stream.of(Arguments.of((BiFunction<float[], float[], Double>) Summarizer::L1distance),\n                Arguments.of((BiFunction<float[], float[], Double>) Summarizer::L2distance),\n                Arguments.of((BiFunction<float[], float[], Double>) Summarizer::LInfinitydistance));\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/TestUtils.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.function.Function;\nimport java.util.stream.Collector;\n\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\n\npublic class TestUtils {\n    public static final double EPSILON = 1e-6;\n\n    /**\n     * Return a visitor that does nothing.\n     */\n    public static final VisitorFactory<Double> DUMMY_GENERIC_VISITOR_FACTORY = new VisitorFactory<Double>(\n            (tree, x) -> new Visitor<Double>() {\n                @Override\n                public void accept(INodeView node, int depthOfNode) {\n                }\n\n                @Override\n                public Double getResult() {\n                    return Double.NaN;\n                }\n            });\n\n    /**\n     * Return a multi-visitor that does nothing.\n     */\n    public static final Function<RandomCutTree, MultiVisitor<Double>> DUMMY_MULTI_VISITOR_FACTORY = tree -> new MultiVisitor<Double>() {\n        @Override\n        public void accept(INodeView node, int depthOfNode) {\n        }\n\n        @Override\n        public Double getResult() {\n            return Double.NaN;\n        }\n\n        @Override\n        public boolean trigger(INodeView node) {\n            return false;\n        }\n\n        @Override\n        public MultiVisitor<Double> newPartialCopy() {\n            return null;\n        }\n\n        @Override\n        public void combine(MultiVisitor<Double> other) {\n        }\n    };\n\n    /**\n     * A collector that accumulates values into a sorted list.\n     */\n    public static final Collector<Double, List<Double>, List<Double>> SORTED_LIST_COLLECTOR = Collector\n            .of(ArrayList::new, List::add, (left, right) -> {\n                left.addAll(right);\n                return left;\n            }, list -> {\n                list.sort(Double::compare);\n                return list;\n            });\n\n    /**\n     * Return a converging accumulator that converges after seeing numberOfEntries\n     * values. The returned value is the sum of all accepted values.\n     * \n     * @param numberOfEntries The number of entries that need to be accepted for\n     *                        this accumulator to converge.\n     * @return a new converging accumulator that converges after seeing\n     *         numberOfEntries values.\n     */\n    public static ConvergingAccumulator<Double> convergeAfter(int numberOfEntries) {\n        return new ConvergingAccumulator<Double>() {\n            private int valuesAccepted = 0;\n            private double total = 0.0;\n\n            @Override\n            public void accept(Double value) {\n                valuesAccepted++;\n                total += value;\n            }\n\n            @Override\n            public boolean isConverged() {\n                return valuesAccepted >= numberOfEntries;\n            }\n\n            @Override\n            public int getValuesAccepted() {\n                return valuesAccepted;\n            }\n\n            @Override\n            public Double getAccumulatedValue() {\n                return total;\n            }\n        };\n    }\n\n    /**\n     * Return a multi-visitor that does nothing.\n     */\n    public static final MultiVisitorFactory<Double> DUMMY_GENERIC_MULTI_VISITOR_FACTORY = new MultiVisitorFactory<>(\n            (tree, y) -> new MultiVisitor<Double>() {\n                @Override\n                public void accept(INodeView node, int depthOfNode) {\n                }\n\n                @Override\n                public Double getResult() {\n                    return Double.NaN;\n                }\n\n                @Override\n                public boolean trigger(INodeView node) {\n                    return false;\n                }\n\n                @Override\n                public MultiVisitor<Double> newPartialCopy() {\n                    return null;\n                }\n\n                @Override\n                public void combine(MultiVisitor<Double> other) {\n                }\n            });\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/AnomalyAttributionVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport static com.amazon.randomcutforest.CommonUtils.defaultScalarNormalizerFunction;\nimport static com.amazon.randomcutforest.CommonUtils.defaultScoreUnseenFunction;\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.tree.BoundingBox;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.NodeView;\n\npublic class AnomalyAttributionVisitorTest {\n\n    @Test\n    public void testNew() {\n        float[] point = new float[] { 1.1f, -2.2f, 3.3f };\n        int treeMass = 99;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass);\n\n        assertFalse(visitor.pointInsideBox);\n        for (int i = 0; i < point.length; i++) {\n            assertFalse(visitor.coordInsideBox[i]);\n        }\n\n        assertFalse(visitor.ignoreLeaf);\n        assertEquals(0, visitor.ignoreLeafMassThreshold);\n        DiVector result = visitor.getResult();\n        double[] zero = new double[point.length];\n        assertArrayEquals(zero, result.high);\n        assertArrayEquals(zero, result.low);\n    }\n\n    @Test\n    public void testNewWithIgnoreOptions() {\n        float[] point = new float[] { 1.1f, -2.2f, 3.3f };\n        int treeMass = 99;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 7);\n\n        assertFalse(visitor.pointInsideBox);\n        for (int i = 0; i < point.length; i++) {\n            assertFalse(visitor.coordInsideBox[i]);\n        }\n\n        assertTrue(visitor.ignoreLeaf);\n        assertEquals(7, visitor.ignoreLeafMassThreshold);\n        DiVector result = visitor.getResult();\n        double[] zero = new double[point.length];\n        assertArrayEquals(zero, result.high);\n        assertArrayEquals(zero, result.low);\n    }\n\n    @Test\n    public void testAcceptLeafEquals() {\n        float[] point = { 1.1f, -2.2f, 3.3f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafDepth = 100;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        int treeMass = 21;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 0);\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        assertTrue(visitor.hitDuplicates);\n        assertEquals(visitor.sumOfNewRange, 0);\n        double expectedScoreSum = CommonUtils.defaultDampFunction(leafMass, treeMass)\n                / (leafDepth + Math.log(leafMass + 1) / Math.log(2));\n        double expectedScore = expectedScoreSum / (2 * point.length);\n        DiVector result = visitor.getResult();\n        for (int i = 0; i < point.length; i++) {\n            assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.low[i], EPSILON);\n            assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.high[i], EPSILON);\n        }\n    }\n\n    @Test\n    public void testAcceptLeafNotEquals() {\n        float[] point = new float[] { 1.1f, -2.2f, 3.3f };\n        float[] anotherPoint = new float[] { -4.0f, 5.0f, 6.0f };\n\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(anotherPoint);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));\n        int leafDepth = 100;\n        int leafMass = 4;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        int treeMass = 21;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(point, treeMass, 0);\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        double expectedScoreSum = defaultScoreUnseenFunction(leafDepth, leafMass);\n        double sumOfNewRange = (1.1 - (-4.0)) + (5.0 - (-2.2)) + (6.0 - 3.3);\n\n        DiVector result = visitor.getResult();\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (1.1 - (-4.0)) / sumOfNewRange, treeMass),\n                result.high[0], EPSILON);\n        assertEquals(0.0, result.low[0]);\n        assertEquals(0.0, result.high[1]);\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (5.0 - (-2.2)) / sumOfNewRange, treeMass),\n                result.low[1], EPSILON);\n        assertEquals(0.0, result.high[2]);\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (6.0 - 3.3) / sumOfNewRange, treeMass),\n                result.low[2], EPSILON);\n\n        visitor = new AnomalyAttributionVisitor(point, treeMass, 3);\n        visitor.acceptLeaf(leafNode, leafDepth);\n        result = visitor.getResult();\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (1.1 - (-4.0)) / sumOfNewRange, treeMass),\n                result.high[0], EPSILON);\n        assertEquals(0.0, result.low[0]);\n        assertEquals(0.0, result.high[1]);\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (5.0 - (-2.2)) / sumOfNewRange, treeMass),\n                result.low[1], EPSILON);\n        assertEquals(0.0, result.high[2]);\n        assertEquals(defaultScalarNormalizerFunction(expectedScoreSum * (6.0 - 3.3) / sumOfNewRange, treeMass),\n                result.low[2], EPSILON);\n\n        visitor = new AnomalyAttributionVisitor(point, treeMass, 4);\n        visitor.acceptLeaf(leafNode, leafDepth);\n        double expectedScore = expectedScoreSum / (2 * point.length);\n        result = visitor.getResult();\n        for (int i = 0; i < point.length; i++) {\n            assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.low[i], EPSILON);\n            assertEquals(defaultScalarNormalizerFunction(expectedScore, treeMass), result.high[i], EPSILON);\n        }\n    }\n\n    @Test\n    public void testAccept() {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int treeMass = 50;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 0);\n\n        INodeView leafNode = mock(NodeView.class);\n        float[] point = new float[] { 1.0f, -2.0f };\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafMass = 3;\n        when(leafNode.getMass()).thenReturn(leafMass);\n        int depth = 4;\n        visitor.acceptLeaf(leafNode, depth);\n        DiVector result = visitor.getResult();\n\n        double expectedScoreSum = defaultScoreUnseenFunction(depth, leafNode.getMass());\n        double sumOfNewRange = 1.0 + 2.0;\n\n        double[] expectedUnnormalizedLow = new double[] { expectedScoreSum * 1.0 / sumOfNewRange, 0.0 };\n        double[] expectedUnnormalizedHigh = new double[] { 0.0, expectedScoreSum * 2.0 / sumOfNewRange };\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow[i], treeMass), result.low[i], EPSILON);\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh[i], treeMass), result.high[i],\n                    EPSILON);\n        }\n\n        // parent does not contain pointToScore\n\n        depth--;\n        INodeView sibling = mock(NodeView.class);\n        int siblingMass = 2;\n        when(sibling.getMass()).thenReturn(siblingMass);\n        INodeView parent = mock(NodeView.class);\n        int parentMass = leafMass + siblingMass;\n        when(parent.getMass()).thenReturn(parentMass);\n        BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, -0.5f });\n        when(parent.getBoundingBox()).thenReturn(boundingBox);\n        visitor.accept(parent, depth);\n        result = visitor.getResult();\n\n        double expectedSumOfNewRange2 = 2.0 + 2.0;\n        double expectedProbOfCut2 = (1.0 + 0.5) / expectedSumOfNewRange2;\n        double[] expectedDifferenceInRangeVector2 = { 0.0, 1.0, 0.5, 0.0 };\n\n        double expectedScore2 = defaultScoreUnseenFunction(depth, parent.getMass());\n        double[] expectedUnnormalizedLow2 = new double[pointToScore.length];\n        double[] expectedUnnormalizedHigh2 = new double[pointToScore.length];\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            double prob = expectedDifferenceInRangeVector2[2 * i] / expectedSumOfNewRange2;\n            expectedUnnormalizedHigh2[i] = prob * expectedScore2\n                    + (1 - expectedProbOfCut2) * expectedUnnormalizedHigh[i];\n\n            prob = expectedDifferenceInRangeVector2[2 * i + 1] / expectedSumOfNewRange2;\n            expectedUnnormalizedLow2[i] = prob * expectedScore2 + (1 - expectedProbOfCut2) * expectedUnnormalizedLow[i];\n        }\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow2[i], treeMass), result.low[i],\n                    EPSILON);\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh2[i], treeMass), result.high[i],\n                    EPSILON);\n        }\n\n        // grandparent contains pointToScore\n\n        assertFalse(visitor.pointInsideBox);\n\n        depth--;\n        INodeView grandParent = mock(NodeView.class);\n\n        when(grandParent.getMass()).thenReturn(parentMass + 2);\n        when(grandParent.getBoundingBox()).thenReturn(boundingBox\n                .getMergedBox(new BoundingBox(new float[] { -1.0f, 1.0f }).getMergedBox(new float[] { -0.5f, -1.5f })));\n        visitor.accept(grandParent, depth);\n        result = visitor.getResult();\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedLow2[i], treeMass), result.low[i],\n                    EPSILON);\n            assertEquals(defaultScalarNormalizerFunction(expectedUnnormalizedHigh2[i], treeMass), result.high[i],\n                    EPSILON);\n        }\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 3, 5 })\n    public void reNormalizeNotEqual(int mass) {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int treeMass = 50;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 4);\n\n        INodeView leafNode = mock(NodeView.class);\n        float[] point = new float[] { 1.0f, -2.0f };\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafMass = mass;\n        when(leafNode.getMass()).thenReturn(leafMass);\n        visitor.acceptLeaf(leafNode, 1);\n        INodeView parent = mock(NodeView.class);\n        int parentMass = leafMass + 2;\n        when(parent.getMass()).thenReturn(parentMass);\n        BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, 2.0f });\n        when(parent.getBoundingBox()).thenReturn(boundingBox);\n        when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));\n        visitor.accept(parent, 0);\n        DiVector result = visitor.directionalAttribution;\n        assertEquals(result.getHighLowSum(), visitor.savedScore, 1e-6);\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 3, 5 })\n    public void reNormalize(int mass) {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int treeMass = 50;\n        AnomalyAttributionVisitor visitor = new AnomalyAttributionVisitor(pointToScore, treeMass, 4);\n\n        INodeView leafNode = mock(NodeView.class);\n        float[] point = pointToScore;\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafMass = mass;\n        when(leafNode.getMass()).thenReturn(leafMass);\n        visitor.acceptLeaf(leafNode, 1);\n        INodeView parent = mock(NodeView.class);\n        int parentMass = leafMass + 2;\n        when(parent.getMass()).thenReturn(parentMass);\n        BoundingBox boundingBox = new BoundingBox(point, new float[] { 2.0f, 2.0f });\n        when(parent.getBoundingBox()).thenReturn(boundingBox);\n        when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));\n        visitor.accept(parent, 0);\n        DiVector result = visitor.directionalAttribution;\n        assertEquals(result.getHighLowSum(), visitor.savedScore, 1e-6);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/AnomalyScoreVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.Matchers.closeTo;\nimport static org.hamcrest.Matchers.is;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.tree.BoundingBox;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.NodeView;\n\npublic class AnomalyScoreVisitorTest {\n\n    @Test\n    public void testNew() {\n        float[] point = new float[] { 1.0f, 2.0f };\n        int sampleSize = 9;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize);\n\n        assertFalse(visitor.pointInsideBox);\n        for (int i = 0; i < point.length; i++) {\n            assertFalse(visitor.coordInsideBox[i]);\n        }\n\n        assertFalse(visitor.ignoreLeafEquals);\n        assertEquals(0, visitor.ignoreLeafMassThreshold);\n        assertThat(visitor.getResult(), is(0.0));\n    }\n\n    @Test\n    public void testNewWithIgnoreOptions() {\n        float[] point = new float[] { 1.0f, 2.0f };\n        int sampleSize = 9;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize, 7);\n\n        assertFalse(visitor.pointInsideBox);\n        for (int i = 0; i < point.length; i++) {\n            assertFalse(visitor.coordInsideBox[i]);\n        }\n\n        assertTrue(visitor.ignoreLeafEquals);\n        assertEquals(7, visitor.ignoreLeafMassThreshold);\n        assertThat(visitor.getResult(), is(0.0));\n    }\n\n    @Test\n    public void testAcceptLeafEquals() {\n        float[] point = { 1.0f, 2.0f, 3.0f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafDepth = 100;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        int subSampleSize = 21;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, subSampleSize);\n        visitor.acceptLeaf(leafNode, leafDepth);\n        double expectedScore = CommonUtils.defaultDampFunction(leafMass, subSampleSize)\n                / (leafDepth + Math.log(leafMass + 1) / Math.log(2));\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, subSampleSize), EPSILON));\n        assertTrue(visitor.pointInsideBox);\n\n        visitor = new AnomalyScoreVisitor(point, subSampleSize);\n        visitor.acceptLeaf(leafNode, 0);\n        expectedScore = CommonUtils.defaultDampFunction(leafMass, subSampleSize)\n                / (Math.log(leafMass + 1) / Math.log(2.0));\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, subSampleSize), EPSILON));\n        assertTrue(visitor.pointInsideBox);\n\n        AnomalyScoreVisitor anotherVisitor = new AnomalyScoreVisitor(point, subSampleSize, 7);\n        anotherVisitor.acceptLeaf(leafNode, 0);\n        assertEquals(anotherVisitor.score, visitor.score);\n\n        AnomalyScoreVisitor yetAnotherVisitor = new AnomalyScoreVisitor(point, subSampleSize, 12);\n        yetAnotherVisitor.acceptLeaf(leafNode, 0);\n        assertNotEquals(yetAnotherVisitor.score, visitor.score);\n    }\n\n    @Test\n    public void testAcceptLeafNotEquals() {\n        float[] point = new float[] { 1.0f, 2.0f, 3.0f };\n        float[] anotherPoint = new float[] { 4.0f, 5.0f, 6.0f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(anotherPoint);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));\n\n        int leafDepth = 100;\n\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, 2);\n        visitor.acceptLeaf(leafNode, leafDepth);\n        double expectedScore = 1.0 / (leafDepth + 1);\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, 2), EPSILON));\n        assertFalse(visitor.pointInsideBox);\n\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n        AnomalyScoreVisitor anotherVisitor = new AnomalyScoreVisitor(point, 2, 7);\n        anotherVisitor.acceptLeaf(leafNode, 100);\n        assertEquals(anotherVisitor.score, visitor.score);\n\n        AnomalyScoreVisitor yetAnotherVisitor = new AnomalyScoreVisitor(point, 2, 12);\n        yetAnotherVisitor.acceptLeaf(leafNode, 100);\n        assertEquals(yetAnotherVisitor.score, visitor.score);\n\n    }\n\n    @Test\n    public void testAcceptEqualsLeafPoint() {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int sampleSize = 50;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(pointToScore, sampleSize);\n\n        float[] point = Arrays.copyOf(pointToScore, pointToScore.length);\n        INodeView node = mock(NodeView.class);\n        when(node.getLeafPoint()).thenReturn(point);\n        when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int depth = 2;\n        visitor.acceptLeaf(node, depth);\n        double expectedScore = CommonUtils.defaultDampFunction(node.getMass(), sampleSize)\n                / (depth + Math.log(node.getMass() + 1) / Math.log(2));\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n\n        depth--;\n        IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 1.0f, 1.0f });\n        node = new NodeView(null, null, Null);\n        visitor.accept(node, depth);\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n\n        depth--;\n        boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, -1.0f });\n        node = new NodeView(null, null, Null);\n        visitor.accept(node, depth);\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n    }\n\n    @Test\n    public void testAccept() {\n        float[] pointToScore = new float[] { 0.0f, 0.0f };\n        int sampleSize = 50;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(pointToScore, sampleSize);\n\n        NodeView node = mock(NodeView.class);\n        float[] otherPoint = new float[] { 1.0f, 1.0f };\n        when(node.getLeafPoint()).thenReturn(otherPoint);\n        when(node.getBoundingBox()).thenReturn(new BoundingBox(otherPoint, otherPoint));\n        int depth = 4;\n        visitor.acceptLeaf(node, depth);\n        double expectedScore = 1.0 / (depth + 1);\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n\n        depth--;\n        IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 2.0f, 0.0f });\n        when(node.getBoundingBox()).thenReturn(boundingBox);\n        when(node.probailityOfSeparation(any())).thenReturn(1.0 / 3);\n        visitor.accept(node, depth);\n        double p = visitor.getProbabilityOfSeparation(boundingBox);\n        expectedScore = p * (1.0 / (depth + 1)) + (1 - p) * expectedScore;\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n\n        depth--;\n        boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, 0.0f });\n\n        when(node.getBoundingBox()).thenReturn(boundingBox);\n        when(node.probailityOfSeparation(any())).thenReturn(0.0);\n        visitor.accept(node, depth);\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        expectedScore = p * (1.0 / (depth + 1)) + (1 - p) * expectedScore;\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n\n        depth--;\n        boundingBox = boundingBox.getMergedBox(new float[] { -1.0f, -1.0f });\n        when(node.probailityOfSeparation(any())).thenReturn(0.0);\n        visitor.accept(node, depth);\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(visitor.getResult(),\n                closeTo(CommonUtils.defaultScalarNormalizerFunction(expectedScore, sampleSize), EPSILON));\n        assertTrue(visitor.pointInsideBox);\n    }\n\n    @Test\n    public void testGetProbabilityOfSeparation() {\n        float[] minPoint = { 0.0f, 0.0f, 0.0f };\n        float[] maxPoint = { 1.0f, 2.0f, 3.0f };\n        IBoundingBoxView boundingBox = new BoundingBox(minPoint);\n        boundingBox = boundingBox.getMergedBox(maxPoint);\n\n        float[] point = { 0.5f, 0.5f, 0.5f };\n        int sampleSize = 2;\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, sampleSize);\n\n        double p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo(0.0, EPSILON));\n        assertTrue(visitor.coordInsideBox[0]);\n        assertTrue(visitor.coordInsideBox[1]);\n        assertTrue(visitor.coordInsideBox[2]);\n\n        visitor = new AnomalyScoreVisitor(point, sampleSize);\n        visitor.coordInsideBox[1] = visitor.coordInsideBox[2] = true;\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo(0.0, EPSILON));\n        assertTrue(visitor.coordInsideBox[0]);\n        assertTrue(visitor.coordInsideBox[1]);\n        assertTrue(visitor.coordInsideBox[2]);\n\n        point = new float[] { 2.0f, 0.5f, 0.5f };\n        visitor = new AnomalyScoreVisitor(point, sampleSize);\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo(1.0 / (2.0 + 2.0 + 3.0), EPSILON));\n        assertFalse(visitor.coordInsideBox[0]);\n        assertTrue(visitor.coordInsideBox[1]);\n        assertTrue(visitor.coordInsideBox[2]);\n\n        visitor = new AnomalyScoreVisitor(point, sampleSize);\n        visitor.coordInsideBox[1] = visitor.coordInsideBox[2] = true;\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo(1.0 / (2.0 + 2.0 + 3.0), EPSILON));\n        assertFalse(visitor.coordInsideBox[0]);\n        assertTrue(visitor.coordInsideBox[1]);\n        assertTrue(visitor.coordInsideBox[2]);\n\n        point = new float[] { 0.5f, -3.0f, 4.0f };\n        visitor = new AnomalyScoreVisitor(point, sampleSize);\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo((3.0 + 1.0) / (1.0 + 5.0 + 4.0), EPSILON));\n        assertTrue(visitor.coordInsideBox[0]);\n        assertFalse(visitor.coordInsideBox[1]);\n        assertFalse(visitor.coordInsideBox[2]);\n\n        visitor = new AnomalyScoreVisitor(point, sampleSize);\n        visitor.coordInsideBox[0] = true;\n        p = visitor.getProbabilityOfSeparation(boundingBox);\n        assertThat(p, closeTo((3.0 + 1.0) / (1.0 + 5.0 + 4.0), EPSILON));\n        assertTrue(visitor.coordInsideBox[0]);\n        assertFalse(visitor.coordInsideBox[1]);\n        assertFalse(visitor.coordInsideBox[2]);\n    }\n\n    @Test\n    public void test_getProbabilityOfSeparation_leafNode() {\n        float[] point = new float[] { 1.0f, 2.0f, 3.0f };\n        float[] leafPoint = Arrays.copyOf(point, point.length);\n        BoundingBox boundingBox = new BoundingBox(leafPoint);\n\n        AnomalyScoreVisitor visitor = new AnomalyScoreVisitor(point, 2);\n        assertThrows(IllegalStateException.class, () -> visitor.getProbabilityOfSeparation(boundingBox));\n\n        TransductiveScalarScoreVisitor esotericVisitor = new TransductiveScalarScoreVisitor(leafPoint, 2,\n                CommonUtils::defaultScoreSeenFunction, CommonUtils::defaultScoreUnseenFunction,\n                CommonUtils::defaultDampFunction, b -> new double[3]);\n        assertThrows(IllegalStateException.class, () -> esotericVisitor.getProbabilityOfSeparation(boundingBox));\n\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/DynamicAttributionVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.function.BiFunction;\n\nimport org.junit.jupiter.api.Test;\n\npublic class DynamicAttributionVisitorTest {\n    @Test\n    public void testScoringMethods() {\n        BiFunction<Double, Double, Double> scoreSeen = (x, y) -> (x + y) / 2;\n        BiFunction<Double, Double, Double> scoreUneen = (x, y) -> 0.75 * x + 0.25 * y;\n        BiFunction<Double, Double, Double> damp = (x, y) -> Math.sqrt(x * y);\n        DynamicAttributionVisitor visitor = new DynamicAttributionVisitor(new float[] { 1.1f, -2.2f }, 100, 2,\n                scoreSeen, scoreUneen, damp);\n\n        int x = 9;\n        int y = 4;\n        assertEquals((x + y) / 2.0, visitor.scoreSeen(x, y));\n        assertEquals(0.75 * x + 0.25 * y, visitor.scoreUnseen(x, y));\n        assertEquals(Math.sqrt(x * y), visitor.damp(x, y));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/anomalydetection/DynamicScoreVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.anomalydetection;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.function.BiFunction;\n\nimport org.junit.jupiter.api.Test;\n\npublic class DynamicScoreVisitorTest {\n    @Test\n    public void testScoringMethods() {\n        BiFunction<Double, Double, Double> scoreSeen = (x, y) -> (x + y) / 2;\n        BiFunction<Double, Double, Double> scoreUneen = (x, y) -> 0.75 * x + 0.25 * y;\n        BiFunction<Double, Double, Double> damp = (x, y) -> Math.sqrt(x * y);\n        DynamicScoreVisitor visitor = new DynamicScoreVisitor(new float[] { 1.1f, -2.2f }, 100, 2, scoreSeen,\n                scoreUneen, damp);\n\n        int x = 9;\n        int y = 4;\n        assertEquals((x + y) / 2.0, visitor.scoreSeen(x, y));\n        assertEquals(0.75 * x + 0.25 * y, visitor.scoreUnseen(x, y));\n        assertEquals(Math.sqrt(x * y), visitor.damp(x, y));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/executor/ForestTraversalExecutorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.AdditionalMatchers.aryEq;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.atMost;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.spy;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.function.BinaryOperator;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.ArgumentsProvider;\nimport org.junit.jupiter.params.provider.ArgumentsSource;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IComponentModel;\nimport com.amazon.randomcutforest.IMultiVisitorFactory;\nimport com.amazon.randomcutforest.IVisitorFactory;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.TestUtils;\nimport com.amazon.randomcutforest.anomalydetection.AnomalyScoreVisitor;\nimport com.amazon.randomcutforest.imputation.ImputeVisitor;\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.returntypes.ConvergingAccumulator;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.tree.ITree;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\n\npublic class ForestTraversalExecutorTest {\n\n    private static int numberOfTrees = 10;\n    private static int threadPoolSize = 2;\n\n    private static class TestExecutorProvider implements ArgumentsProvider {\n        @Override\n        public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {\n\n            ComponentList<Integer, float[]> sequentialExecutors = new ComponentList<>();\n            ComponentList<Integer, float[]> parallelExecutors = new ComponentList<>();\n\n            for (int i = 0; i < numberOfTrees; i++) {\n                CompactSampler sampler = mock(CompactSampler.class);\n                RandomCutTree tree = mock(RandomCutTree.class);\n                sequentialExecutors.add(spy(new SamplerPlusTree<>(sampler, tree)));\n            }\n\n            for (int i = 0; i < numberOfTrees; i++) {\n                CompactSampler sampler = mock(CompactSampler.class);\n                RandomCutTree tree = mock(RandomCutTree.class);\n                parallelExecutors.add(spy(new SamplerPlusTree<>(sampler, tree)));\n            }\n\n            SequentialForestTraversalExecutor sequentialExecutor = new SequentialForestTraversalExecutor(\n                    sequentialExecutors);\n\n            ParallelForestTraversalExecutor parallelExecutor = new ParallelForestTraversalExecutor(parallelExecutors,\n                    threadPoolSize);\n\n            return Stream.of(sequentialExecutor, parallelExecutor).map(Arguments::of);\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testTraverseForestBinaryAccumulator(AbstractForestTraversalExecutor executor) {\n        float[] point = new float[] { 1.2f, -3.4f };\n        double expectedResult = 0.0;\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            double treeResult = Math.random();\n            ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();\n            when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);\n            expectedResult += treeResult;\n        }\n\n        expectedResult /= numberOfTrees;\n\n        double result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, Double::sum,\n                x -> x / 10.0);\n\n        for (IComponentModel<?, ?> component : executor.components) {\n            verify(component, times(1)).traverse(aryEq(point), any());\n        }\n\n        assertEquals(expectedResult, result, EPSILON);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testTraverseForestCollector(AbstractForestTraversalExecutor executor) {\n        float[] point = new float[] { 1.2f, -3.4f };\n        double[] expectedResult = new double[numberOfTrees];\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            double treeResult = Math.random();\n            ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();\n            when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);\n            expectedResult[i] = treeResult;\n        }\n\n        Arrays.sort(expectedResult);\n\n        List<Double> result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY,\n                TestUtils.SORTED_LIST_COLLECTOR);\n\n        for (IComponentModel<?, ?> component : executor.components) {\n            verify(component, times(1)).traverse(aryEq(point), any());\n        }\n\n        assertEquals(numberOfTrees, result.size());\n        for (int i = 0; i < numberOfTrees; i++) {\n            assertEquals(expectedResult[i], result.get(i), EPSILON);\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testTraverseForestConverging(AbstractForestTraversalExecutor executor) {\n        float[] point = new float[] { 1.2f, -3.4f };\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            double treeResult = Math.random();\n            ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();\n            when(tree.traverse(aryEq(point), any())).thenReturn(treeResult);\n        }\n\n        int convergenceThreshold = numberOfTrees / 2;\n        ConvergingAccumulator<Double> accumulator = TestUtils.convergeAfter(convergenceThreshold);\n\n        double result = executor.traverseForest(point, TestUtils.DUMMY_GENERIC_VISITOR_FACTORY, accumulator,\n                x -> x / accumulator.getValuesAccepted());\n\n        for (IComponentModel<?, ?> component : executor.components) {\n            verify(component, atMost(1)).traverse(aryEq(point), any());\n        }\n\n        assertTrue(accumulator.getValuesAccepted() >= convergenceThreshold);\n        assertTrue(accumulator.getValuesAccepted() < numberOfTrees);\n        assertEquals(accumulator.getAccumulatedValue() / accumulator.getValuesAccepted(), result, EPSILON);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testTraverseForestMultiBinaryAccumulator(AbstractForestTraversalExecutor executor) {\n        float[] point = new float[] { 1.2f, -3.4f };\n        double expectedResult = 0.0;\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            double treeResult = Math.random();\n            ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();\n            when(tree.traverseMulti(aryEq(point), any())).thenReturn(treeResult);\n            expectedResult += treeResult;\n        }\n\n        expectedResult /= numberOfTrees;\n\n        double result = executor.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY, Double::sum,\n                x -> x / 10.0);\n\n        for (IComponentModel<?, ?> component : executor.components) {\n            verify(component, times(1)).traverseMulti(aryEq(point), any());\n        }\n\n        assertEquals(expectedResult, result, EPSILON);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testTraverseForestMultiCollector(AbstractForestTraversalExecutor executor) {\n        float[] point = new float[] { 1.2f, -3.4f };\n        double[] expectedResult = new double[numberOfTrees];\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            double treeResult = Math.random();\n            ITree<?, ?> tree = ((SamplerPlusTree<?, ?>) executor.components.get(i)).getTree();\n            when(tree.traverseMulti(aryEq(point), any())).thenReturn(treeResult);\n            expectedResult[i] = treeResult;\n        }\n\n        Arrays.sort(expectedResult);\n\n        List<Double> result = executor.traverseForestMulti(point, TestUtils.DUMMY_GENERIC_MULTI_VISITOR_FACTORY,\n                TestUtils.SORTED_LIST_COLLECTOR);\n\n        for (IComponentModel<?, ?> component : executor.components) {\n            verify(component, times(1)).traverseMulti(aryEq(point), any());\n        }\n\n        assertEquals(numberOfTrees, result.size());\n        for (int i = 0; i < numberOfTrees; i++) {\n            assertEquals(expectedResult[i], result.get(i), EPSILON);\n        }\n    }\n\n    @Test\n    public void testException() {\n        ParallelForestTraversalExecutor executor = new ParallelForestTraversalExecutor(new ComponentList<>(0), 2);\n        SequentialForestTraversalExecutor executor1 = new SequentialForestTraversalExecutor(new ComponentList<>(0));\n        IVisitorFactory<Double> visitorFactory = (tree, x) -> new AnomalyScoreVisitor(tree.projectToTree(x),\n                tree.getMass());\n        assertThrows(IllegalStateException.class,\n                () -> executor.traverseForest(new float[1], visitorFactory, Double::sum, x -> x));\n        assertThrows(IllegalStateException.class,\n                () -> executor1.traverseForest(new float[1], visitorFactory, Double::sum, x -> x));\n        IMultiVisitorFactory<ConditionalTreeSample> multiVisitorFactory = (tree, y) -> new ImputeVisitor(y,\n                tree.projectToTree(y), null, null, 1.0, tree.getRandomSeed());\n        BinaryOperator<ConditionalTreeSample> accumulator = (x, y) -> x;\n        assertThrows(IllegalStateException.class,\n                () -> executor.traverseForestMulti(new float[1], multiVisitorFactory, accumulator, x -> x));\n        assertThrows(IllegalStateException.class,\n                () -> executor1.traverseForestMulti(new float[1], multiVisitorFactory, accumulator, x -> x));\n    }\n\n    @Test\n    public void threadpoolOne() {\n        RandomCutForest f = RandomCutForest.builder().dimensions(1).numberOfTrees(5).parallelExecutionEnabled(true)\n                .threadPoolSize(1).outputAfter(1).build();\n        f.update(new float[1]);\n        f.getApproximateAnomalyScore(new float[1]);\n    }\n\n    @Test\n    public void constructorTest() {\n        ParallelForestTraversalExecutor executor = new ParallelForestTraversalExecutor(null, 1);\n        executor.forkJoinPool = null;\n        executor.submitAndJoin(() -> {\n            return 0;\n        });\n        assertEquals(executor.forkJoinPool.getPoolSize(), 1);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/executor/ForestUpdateExecutorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static com.amazon.randomcutforest.util.ArrayUtils.cleanCopy;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotSame;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.ArgumentMatchers.anyLong;\nimport static org.mockito.ArgumentMatchers.eq;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.spy;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.List;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.ArgumentsProvider;\nimport org.junit.jupiter.params.provider.ArgumentsSource;\nimport org.mockito.ArgumentCaptor;\nimport org.mockito.Captor;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.IComponentModel;\nimport com.amazon.randomcutforest.store.PointStore;\n\n@ExtendWith(MockitoExtension.class)\npublic class ForestUpdateExecutorTest {\n\n    private static final int numberOfTrees = 10;\n    private static final int threadPoolSize = 2;\n\n    @Captor\n    private ArgumentCaptor<List<UpdateResult<Integer>>> updateResultCaptor;\n\n    private static class TestExecutorProvider implements ArgumentsProvider {\n        @Override\n        public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {\n\n            ComponentList<Integer, float[]> sequentialComponents = new ComponentList<>();\n            ComponentList<Integer, float[]> parallelComponents = new ComponentList<>();\n\n            for (int i = 0; i < numberOfTrees; i++) {\n                sequentialComponents.add(mock(IComponentModel.class));\n                parallelComponents.add(mock(IComponentModel.class));\n            }\n\n            PointStore pointStore = mock(PointStore.class);\n            IStateCoordinator<Integer, float[]> sequentialUpdateCoordinator = spy(\n                    new PointStoreCoordinator<>(pointStore));\n            AbstractForestUpdateExecutor<Integer, float[]> sequentialExecutor = new SequentialForestUpdateExecutor<>(\n                    sequentialUpdateCoordinator, sequentialComponents);\n\n            IStateCoordinator<Integer, float[]> parallelUpdateCoordinator = spy(\n                    new PointStoreCoordinator<>(pointStore));\n            AbstractForestUpdateExecutor<Integer, float[]> parallelExecutor = new ParallelForestUpdateExecutor<>(\n                    parallelUpdateCoordinator, parallelComponents, threadPoolSize);\n\n            return Stream.of(sequentialExecutor, parallelExecutor).map(Arguments::of);\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testUpdate(AbstractForestUpdateExecutor<Integer, float[]> executor) {\n        int addAndDelete = 4;\n        int addOnly = 4;\n\n        ComponentList<Integer, ?> components = executor.components;\n        for (int i = 0; i < addAndDelete; i++) {\n            IComponentModel<Integer, ?> model = components.get(i);\n            UpdateResult<Integer> result = new UpdateResult<>(i, 2 * i);\n            when(model.update(any(), anyLong())).thenReturn(result);\n        }\n\n        for (int i = addAndDelete; i < addAndDelete + addOnly; i++) {\n            IComponentModel<Integer, ?> model = components.get(i);\n            UpdateResult<Integer> result = UpdateResult.<Integer>builder().addedPoint(i).build();\n            when(model.update(any(), anyLong())).thenReturn(result);\n        }\n\n        for (int i = addAndDelete + addOnly; i < numberOfTrees; i++) {\n            IComponentModel<Integer, ?> model = components.get(i);\n            when(model.update(any(), anyLong())).thenReturn(UpdateResult.noop());\n        }\n\n        float[] point = new float[] { 1.0f };\n        executor.update(point);\n\n        executor.components.forEach(model -> verify(model).update(any(), eq(0L)));\n\n        IStateCoordinator<Integer, ?> coordinator = executor.updateCoordinator;\n        verify(coordinator, times(1)).completeUpdate(updateResultCaptor.capture(), any());\n\n        List<UpdateResult<Integer>> updateResults = updateResultCaptor.getValue();\n        assertEquals(addAndDelete + addOnly, updateResults.size());\n\n        int actualAddAndAndDelete = 0;\n        int actualAddOnly = 0;\n        for (int i = 0; i < updateResults.size(); i++) {\n            UpdateResult<Integer> result = updateResults.get(i);\n            if (result.getDeletedPoint().isPresent()) {\n                actualAddAndAndDelete++;\n            } else {\n                actualAddOnly++;\n            }\n        }\n\n        assertEquals(addAndDelete, actualAddAndAndDelete);\n        assertEquals(addOnly, actualAddOnly);\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(TestExecutorProvider.class)\n    public void testCleanCopy(AbstractForestUpdateExecutor<double[], ?> executor) {\n        float[] point1 = new float[] { 1.0f, -22.2f, 30.9f };\n        float[] point1Copy = cleanCopy(point1);\n        assertNotSame(point1, point1Copy);\n        assertArrayEquals(point1, point1Copy);\n\n        float[] point2 = new float[] { -0.0f, -22.2f, 30.9f };\n        float[] point2Copy = cleanCopy(point2);\n        assertNotSame(point2, point2Copy);\n        assertEquals(0.0, point2Copy[0]);\n\n        point2Copy[0] = -0.0f;\n        assertArrayEquals(point2, point2Copy);\n    }\n\n    @Test\n    public void constructorTest() {\n        ParallelForestUpdateExecutor<?, ?> executor = new ParallelForestUpdateExecutor(null, null, 1);\n        executor.forkJoinPool = null;\n        executor.submitAndJoin(() -> {\n            return 0;\n        });\n        assertEquals(executor.forkJoinPool.getPoolSize(), 1);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/executor/PointStoreCoordinatorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.mockito.ArgumentMatchers.anyBoolean;\nimport static org.mockito.ArgumentMatchers.anyLong;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.mockito.ArgumentCaptor;\n\nimport com.amazon.randomcutforest.store.PointStore;\n\npublic class PointStoreCoordinatorTest {\n\n    private PointStore store;\n    private PointStoreCoordinator coordinator;\n\n    @BeforeEach\n    public void setUp() {\n        store = mock(PointStore.class);\n        coordinator = new PointStoreCoordinator(store);\n    }\n\n    @Test\n    public void testInitUpdate() {\n        float[] point = { 1.2f, -3.4f };\n        int index = 123;\n\n        ArgumentCaptor<float[]> captor = ArgumentCaptor.forClass(float[].class);\n        when(store.add(captor.capture(), anyLong(), anyBoolean())).thenReturn(index);\n\n        int result = coordinator.initUpdate(point, 0, false);\n\n        verify(store, times(1)).add(point, 0, false);\n        assertEquals(result, index);\n    }\n\n    @Test\n    public void testCompleteUpdate() {\n        List<UpdateResult<Integer>> updateResults = new ArrayList<>();\n\n        UpdateResult<Integer> result1 = UpdateResult.<Integer>builder().addedPoint(1).deletedPoint(100).build();\n        updateResults.add(result1);\n\n        UpdateResult<Integer> result2 = UpdateResult.<Integer>builder().addedPoint(2).deletedPoint(200).build();\n        updateResults.add(result2);\n\n        UpdateResult<Integer> result3 = UpdateResult.<Integer>builder().addedPoint(3).build();\n        updateResults.add(result3);\n\n        UpdateResult<Integer> result4 = UpdateResult.noop();\n        updateResults.add(result4);\n\n        // order shouldn't matter\n        Collections.shuffle(updateResults);\n\n        Integer updateInput = 1000;\n        coordinator.completeUpdate(updateResults, updateInput);\n\n        ArgumentCaptor<Integer> captor1 = ArgumentCaptor.forClass(Integer.class);\n        verify(store, times(3)).incrementRefCount(captor1.capture());\n        List<Integer> arguments = captor1.getAllValues();\n        Collections.sort(arguments);\n        assertEquals(1, arguments.get(0));\n        assertEquals(2, arguments.get(1));\n        assertEquals(3, arguments.get(2));\n\n        ArgumentCaptor<Integer> captor2 = ArgumentCaptor.forClass(Integer.class);\n        verify(store, times(3)).decrementRefCount(captor2.capture());\n        arguments = captor2.getAllValues();\n        Collections.sort(arguments);\n        assertEquals(100, arguments.get(0));\n        assertEquals(200, arguments.get(1));\n        assertEquals(1000, arguments.get(2));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/executor/SamplerPlusTreeTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.ArgumentMatchers.anyLong;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.never;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Optional;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtendWith;\nimport org.mockito.Mock;\nimport org.mockito.junit.jupiter.MockitoExtension;\n\nimport com.amazon.randomcutforest.sampler.ISampled;\nimport com.amazon.randomcutforest.sampler.IStreamSampler;\nimport com.amazon.randomcutforest.tree.ITree;\n\n@ExtendWith(MockitoExtension.class)\npublic class SamplerPlusTreeTest {\n    @Mock\n    private ITree<Integer, double[]> tree;\n    @Mock\n    private IStreamSampler<Integer> sampler;\n    private SamplerPlusTree<Integer, double[]> samplerPlusTree;\n\n    @BeforeEach\n    public void setUp() {\n        samplerPlusTree = new SamplerPlusTree<>(sampler, tree);\n    }\n\n    @Test\n    public void testUpdateAddPoint() {\n        int pointReference = 2;\n        long sequenceIndex = 100L;\n        int existingPointReference = 222;\n        when(sampler.acceptPoint(sequenceIndex)).thenReturn(true);\n        when(sampler.getEvictedPoint()).thenReturn(Optional.empty());\n        when(tree.addPoint(pointReference, sequenceIndex)).thenReturn(existingPointReference);\n\n        UpdateResult<Integer> result = samplerPlusTree.update(pointReference, sequenceIndex);\n        assertTrue(result.getAddedPoint().isPresent());\n        assertEquals(existingPointReference, result.getAddedPoint().get());\n        assertFalse(result.getDeletedPoint().isPresent());\n\n        verify(tree, never()).deletePoint(any(), anyLong());\n        verify(sampler, times(1)).addPoint(existingPointReference);\n    }\n\n    @Test\n    public void testUpdateAddAndDeletePoint() {\n        int pointReference = 2;\n        long sequenceIndex = 100L;\n        int existingPointReference = 222;\n        int evictedPoint = 333;\n        long evictedSequenceIndex = 50L;\n\n        ISampled<Integer> evictedPointSampled = mock(ISampled.class);\n        when(evictedPointSampled.getValue()).thenReturn(evictedPoint);\n        when(evictedPointSampled.getSequenceIndex()).thenReturn(evictedSequenceIndex);\n\n        when(sampler.acceptPoint(sequenceIndex)).thenReturn(true);\n        when(sampler.getEvictedPoint()).thenReturn(Optional.of(evictedPointSampled));\n        when(tree.addPoint(pointReference, sequenceIndex)).thenReturn(existingPointReference);\n\n        UpdateResult<Integer> result = samplerPlusTree.update(pointReference, sequenceIndex);\n        assertTrue(result.getAddedPoint().isPresent());\n        assertEquals(existingPointReference, result.getAddedPoint().get());\n        assertTrue(result.getDeletedPoint().isPresent());\n        assertEquals(evictedPoint, result.getDeletedPoint().get());\n\n        verify(tree, times(1)).deletePoint(evictedPoint, evictedSequenceIndex);\n        verify(sampler, times(1)).addPoint(existingPointReference);\n    }\n\n    @Test\n    public void testRejectPoint() {\n        when(sampler.acceptPoint(anyLong())).thenReturn(false);\n        UpdateResult<Integer> result = samplerPlusTree.update(2, 100L);\n        assertFalse(result.isStateChange());\n\n        verify(tree, never()).addPoint(any(), anyLong());\n        verify(tree, never()).deletePoint(any(), anyLong());\n        verify(sampler, never()).addPoint(any());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/executor/UpdateResultTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.executor;\n\nimport static org.junit.jupiter.api.Assertions.assertFalse;\n\nimport org.junit.jupiter.api.Test;\n\npublic class UpdateResultTest {\n    @Test\n    public void testNoop() {\n        UpdateResult<Integer> result = UpdateResult.noop();\n        assertFalse(result.getAddedPoint().isPresent());\n        assertFalse(result.getDeletedPoint().isPresent());\n        assertFalse(result.isStateChange());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/imputation/ConditionalSampleSummarizerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.imputation;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.ConditionalTreeSample;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.summarization.Summarizer;\n\npublic class ConditionalSampleSummarizerTest {\n\n    private float[] queryPoint;\n    private int[] missingIndexes;\n    ConditionalSampleSummarizer summarizer;\n    ConditionalSampleSummarizer projectedSummarizer;\n\n    @BeforeEach\n    public void setUp() {\n        queryPoint = new float[] { 50, 70, 90, 100 };\n        missingIndexes = new int[] { 2, 3 };\n        summarizer = new ConditionalSampleSummarizer(missingIndexes, queryPoint, 0.2, false, 1, 0, 1);\n        projectedSummarizer = new ConditionalSampleSummarizer(missingIndexes, queryPoint, 0.2, true, 5, 0.3, 1);\n    }\n\n    @Test\n    public void testSummarize() {\n        assertThrows(IllegalArgumentException.class, () -> summarizer.summarize(Collections.emptyList()));\n\n        Random random = new Random(42);\n        ArrayList<ConditionalTreeSample> list = new ArrayList<>();\n        for (int i = 0; i < 999; i++) {\n            float[] point = new float[] { 50, 70, 90, 100 + 2 * random.nextFloat() };\n            list.add(new ConditionalTreeSample(i, null, Summarizer.L1distance(point, queryPoint), point));\n        }\n        list.add(new ConditionalTreeSample(999, null, 100, new float[] { 50, 70, 90, 200 }));\n\n        SampleSummary summary = summarizer.summarize(list, false);\n        assertNull(summary.summaryPoints);\n        SampleSummary summaryTwo = summarizer.summarize(list, true);\n        assertNotNull(summaryTwo.summaryPoints);\n        for (float[] element : summaryTwo.summaryPoints) {\n            assertEquals(element.length, 4);\n            assertEquals(element[0], 50);\n            assertEquals(element[1], 70);\n            assertEquals(element[2], 90);\n            assertTrue(100 < element[3] && element[3] < 102);\n        }\n        assertEquals(4, summaryTwo.mean.length);\n        assertEquals(0, summaryTwo.deviation[0]);\n        SampleSummary summaryThree = projectedSummarizer.summarize(list);\n        assertNotNull(summaryThree.summaryPoints);\n        for (float[] element : summaryThree.summaryPoints) {\n            assertEquals(element.length, missingIndexes.length);\n        }\n    }\n\n    @Test\n    public void testZero() {\n        ArrayList<ConditionalTreeSample> list = new ArrayList<>();\n        for (int i = 0; i < 1000; i++) {\n            list.add(new ConditionalTreeSample(i, null, 0, queryPoint));\n        }\n        assert (summarizer.summarize(list, true).summaryPoints.length == 1);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/imputation/ImputeVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.imputation;\n\nimport static com.amazon.randomcutforest.CommonUtils.defaultScoreSeenFunction;\nimport static com.amazon.randomcutforest.CommonUtils.defaultScoreUnseenFunction;\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotSame;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.tree.BoundingBox;\nimport com.amazon.randomcutforest.tree.IBoundingBoxView;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.NodeView;\n\npublic class ImputeVisitorTest {\n\n    private float[] queryPoint;\n    private int numberOfMissingValues;\n    private int[] missingIndexes;\n    private ImputeVisitor visitor;\n    private ImputeVisitor anotherVisitor;\n\n    @BeforeEach\n    public void setUp() {\n        // create a point where the 2nd value is missing\n        // The second value of queryPoint and the 2nd and 3rd values of missingIndexes\n        // should be ignored in all tests\n\n        queryPoint = new float[] { -1.0f, 1000.0f, 3.0f };\n        numberOfMissingValues = 1;\n        missingIndexes = new int[] { 1, 99, -888 };\n\n        visitor = new ImputeVisitor(queryPoint, numberOfMissingValues, missingIndexes);\n        anotherVisitor = new ImputeVisitor(queryPoint, queryPoint, null, null, 0.8, 42);\n    }\n\n    @Test\n    public void testNew() {\n        assertArrayEquals(queryPoint, visitor.getResult().leafPoint);\n        assertNotSame(queryPoint, visitor.getResult());\n        assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());\n        assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.adjustedRank());\n        assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, anotherVisitor.getAnomalyRank());\n        assertNotEquals(ImputeVisitor.DEFAULT_INIT_VALUE, anotherVisitor.adjustedRank());\n        assertEquals(visitor.getDistance(), Double.MAX_VALUE);\n        assertFalse(visitor.isConverged());\n        assertThrows(IllegalArgumentException.class,\n                () -> new ImputeVisitor(queryPoint, queryPoint, null, null, -1.0, 42));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ImputeVisitor(queryPoint, queryPoint, null, null, 2.0, 42));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ImputeVisitor(queryPoint, queryPoint, null, new int[] { -1 }, 1.0, 42));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ImputeVisitor(queryPoint, queryPoint, null, new int[] { 4 }, 1.0, 42));\n    }\n\n    @Test\n    public void testCopyConstructor() {\n        ImputeVisitor copy = new ImputeVisitor(visitor);\n        assertArrayEquals(queryPoint, copy.getResult().leafPoint);\n        assertNotSame(copy.getResult(), visitor.getResult());\n        assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());\n    }\n\n    @Test\n    public void testAcceptLeafEquals() {\n        float[] point = { queryPoint[0], 2.0f, queryPoint[2] };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getLiftedLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafDepth = 100;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        visitor.acceptLeaf(leafNode, leafDepth);\n        anotherVisitor.acceptLeaf(leafNode, leafDepth);\n\n        float[] expected = new float[] { -1.0f, 2.0f, 3.0f };\n        assertArrayEquals(expected, visitor.getResult().leafPoint);\n        assertEquals(visitor.getDistance(), 0, 1e-6);\n        assertEquals(defaultScoreSeenFunction(leafDepth, leafMass), visitor.getAnomalyRank());\n    }\n\n    @Test\n    public void testAcceptLeafEqualsZeroDepth() {\n        float[] point = { queryPoint[0], 2.0f, queryPoint[2] };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getLiftedLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n\n        int leafDepth = 0;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        float[] expected = new float[] { -1.0f, 2.0f, 3.0f };\n        assertArrayEquals(expected, visitor.getResult().leafPoint);\n\n        assertEquals(0.0, visitor.getAnomalyRank());\n    }\n\n    @Test\n    public void testAcceptLeafNotEquals() {\n        float[] point = { queryPoint[0], 2.0f, -111.11f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getLiftedLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n        int leafDepth = 100;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        float[] expected = new float[] { -1.0f, 2.0f, 3.0f };\n        assertArrayEquals(expected, visitor.getResult().leafPoint);\n\n        assertEquals(defaultScoreUnseenFunction(leafDepth, leafMass), visitor.getAnomalyRank());\n    }\n\n    @Test\n    public void testAccept() {\n\n        float[] point = { queryPoint[0], 2.0f, -111.11f };\n        INodeView node = mock(NodeView.class);\n        when(node.getLeafPoint()).thenReturn(point);\n        when(node.getLiftedLeafPoint()).thenReturn(point);\n        when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n        int depth = 100;\n        int leafMass = 10;\n        when(node.getMass()).thenReturn(leafMass);\n\n        visitor.acceptLeaf(node, depth);\n\n        float[] expected = new float[] { -1.0f, 2.0f, 3.0f };\n        assertArrayEquals(expected, visitor.getResult().leafPoint);\n\n        assertEquals(defaultScoreUnseenFunction(depth, leafMass), visitor.getAnomalyRank());\n\n        depth--;\n        IBoundingBoxView boundingBox = node.getBoundingBox().getMergedBox(new float[] { 99.0f, 4.0f, -19.0f });\n        when(node.getBoundingBox()).thenReturn(boundingBox);\n        when(node.probailityOfSeparation(any()))\n                .thenReturn(CommonUtils.getProbabilityOfSeparation(boundingBox, expected));\n        when(node.getMass()).thenReturn(leafMass + 2);\n\n        double oldRank = visitor.getAnomalyRank();\n        visitor.accept(node, depth);\n        assertArrayEquals(expected, visitor.getResult().leafPoint);\n\n        double p = CommonUtils.getProbabilityOfSeparation(boundingBox, expected);\n        double expectedRank = p * defaultScoreUnseenFunction(depth, node.getMass()) + (1 - p) * oldRank;\n        assertEquals(expectedRank, visitor.getAnomalyRank(), EPSILON);\n    }\n\n    @Test\n    public void testNewCopy() {\n        ImputeVisitor copy = (ImputeVisitor) visitor.newPartialCopy();\n        assertArrayEquals(queryPoint, copy.getResult().leafPoint);\n        assertNotSame(copy.getResult(), visitor.getResult());\n        assertEquals(ImputeVisitor.DEFAULT_INIT_VALUE, visitor.getAnomalyRank());\n    }\n\n    @Test\n    public void testMerge() {\n        float[] otherPoint = new float[] { 99, 100, 101 };\n        ImputeVisitor other = new ImputeVisitor(otherPoint, 0, new int[0]);\n\n        // set other.rank to a small value\n        NodeView node = mock(NodeView.class);\n        when(node.getLeafPoint()).thenReturn(new float[] { 0, 0, 0 });\n        when(node.getLiftedLeafPoint()).thenReturn(new float[] { 0, 0, 0 });\n        when(node.getBoundingBox()).thenReturn(new BoundingBox(new float[] { 0, 0, 0 }));\n        other.acceptLeaf(node, 99);\n\n        assertTrue(other.getAnomalyRank() < visitor.getAnomalyRank());\n\n        other.combine(visitor);\n        assertArrayEquals(otherPoint, other.getResult().leafPoint);\n\n        visitor.combine(other);\n        assertArrayEquals(otherPoint, visitor.getResult().leafPoint);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/inspect/NearNeighborVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.inspect;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotSame;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.stream.Collector;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.NodeView;\n\npublic class NearNeighborVisitorTest {\n\n    private float[] queryPoint;\n    private double distanceThreshold;\n    private NearNeighborVisitor visitor;\n\n    @BeforeEach\n    public void setUp() {\n        queryPoint = new float[] { 7.7f, 8.8f, -6.6f };\n        distanceThreshold = 10.0;\n        visitor = new NearNeighborVisitor(queryPoint, distanceThreshold);\n    }\n\n    @Test\n    public void acceptLeafNear() {\n        float[] leafPoint = new float[] { 8.8f, 9.9f, -5.5f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));\n        when(leafNode.getLiftedLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));\n        HashMap<Long, Integer> sequenceIndexes = new HashMap<>();\n        sequenceIndexes.put(1234L, 1);\n        sequenceIndexes.put(5678L, 1);\n        when(leafNode.getSequenceIndexes()).thenReturn(sequenceIndexes);\n\n        int depth = 12;\n        visitor.acceptLeaf(leafNode, depth);\n\n        Optional<Neighbor> optional = visitor.getResult();\n        assertTrue(optional.isPresent());\n\n        Neighbor neighbor = optional.get();\n        assertNotSame(leafPoint, neighbor.point);\n        assertArrayEquals(leafPoint, neighbor.point);\n        assertEquals(Math.sqrt(3 * 1.1 * 1.1), neighbor.distance, EPSILON);\n        assertNotSame(leafNode.getSequenceIndexes(), neighbor.sequenceIndexes);\n    }\n\n    @Test\n    public void acceptLeafNearTimestampsDisabled() {\n        float[] leafPoint = new float[] { 8.8f, 9.9f, -5.5f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLiftedLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));\n        when(leafNode.getLeafPoint()).thenReturn(Arrays.copyOf(leafPoint, leafPoint.length));\n        assertEquals(0, leafNode.getSequenceIndexes().size());\n        int depth = 12;\n        visitor.acceptLeaf(leafNode, depth);\n\n        Optional<Neighbor> optional = visitor.getResult();\n        assertTrue(optional.isPresent());\n        NearNeighborVisitor nearNeighborVisitor = new NearNeighborVisitor(queryPoint);\n        nearNeighborVisitor.acceptLeaf(leafNode, depth);\n\n        Map<Integer, Neighbor> map1 = new HashMap<>();\n        Map<Integer, Neighbor> map2 = new HashMap<>();\n        // an equality test\n        Collector<Optional<Neighbor>, Map<Integer, Neighbor>, List<Neighbor>> collector = Neighbor.collector();\n        map1.put(Arrays.hashCode(optional.get().point), optional.get());\n        map2.put(Arrays.hashCode(nearNeighborVisitor.getResult().get().point), optional.get());\n        collector.combiner().apply(map1, map2);\n        assertEquals(map1.size(), 1);\n\n        Neighbor neighbor = optional.get();\n        assertNotSame(leafPoint, neighbor.point);\n        assertArrayEquals(leafPoint, neighbor.point);\n        assertEquals(Math.sqrt(3 * 1.1 * 1.1), neighbor.distance, EPSILON);\n        assertTrue(neighbor.sequenceIndexes.isEmpty());\n    }\n\n    @Test\n    public void acceptLeafNotNear() {\n        float[] leafPoint = new float[] { 108.8f, 209.9f, -305.5f };\n        INodeView leafNode = mock(NodeView.class);\n\n        HashMap<Long, Integer> sequenceIndexes = new HashMap<>();\n        sequenceIndexes.put(1234L, 1);\n        sequenceIndexes.put(5678L, 1);\n        when(leafNode.getLeafPoint()).thenReturn(leafPoint);\n        when(leafNode.getLiftedLeafPoint()).thenReturn(leafPoint);\n        when(leafNode.getSequenceIndexes()).thenReturn(sequenceIndexes);\n\n        int depth = 12;\n        visitor.acceptLeaf(leafNode, depth);\n\n        Optional<Neighbor> optional = visitor.getResult();\n        assertFalse(optional.isPresent());\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/interpolation/SimpleInterpolationVisitorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.interpolation;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.InterpolationMeasure;\nimport com.amazon.randomcutforest.tree.BoundingBox;\nimport com.amazon.randomcutforest.tree.INodeView;\nimport com.amazon.randomcutforest.tree.NodeView;\n\npublic class SimpleInterpolationVisitorTest {\n\n    private static final int SEED = 1002;\n\n    @Test\n    public void testNew() {\n        float[] point = { 1.0f, 2.0f };\n        int sampleSize = 9;\n        SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);\n\n        assertFalse(visitor.pointInsideBox);\n        assertEquals(2, visitor.coordInsideBox.length);\n\n        for (int i = 0; i < point.length; i++) {\n            assertFalse(visitor.coordInsideBox[i]);\n        }\n\n        InterpolationMeasure output = visitor.getResult();\n\n        double[] zero = new double[point.length];\n\n        assertArrayEquals(zero, output.measure.high);\n        assertArrayEquals(zero, output.distances.high);\n        assertArrayEquals(zero, output.probMass.high);\n        assertArrayEquals(zero, output.measure.low);\n        assertArrayEquals(zero, output.distances.low);\n        assertArrayEquals(zero, output.probMass.low);\n    }\n\n    @Test\n    public void testAcceptLeafEquals() {\n        float[] point = { 1.0f, 2.0f, 3.0f };\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n        int leafDepth = 100;\n        int leafMass = 10;\n        when(leafNode.getMass()).thenReturn(leafMass);\n\n        int sampleSize = 21;\n        SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        InterpolationMeasure result = visitor.getResult();\n\n        double[] expected = new double[point.length];\n        Arrays.fill(expected, 0.5 * (1 + leafMass) / point.length);\n        assertArrayEquals(expected, result.measure.high);\n        assertArrayEquals(expected, result.measure.low);\n\n        Arrays.fill(expected, 0.5 / point.length);\n        assertArrayEquals(expected, result.probMass.high);\n        assertArrayEquals(expected, result.probMass.low);\n\n        Arrays.fill(expected, 0.0);\n        assertArrayEquals(expected, result.distances.high);\n        assertArrayEquals(expected, result.distances.low);\n    }\n\n    @Test\n    public void testAcceptLeafNotEquals() {\n        float[] point = { 1.0f, 9.0f, 4.0f };\n        float[] anotherPoint = { 4.0f, 5.0f, 6.0f };\n\n        INodeView leafNode = mock(NodeView.class);\n        when(leafNode.getLeafPoint()).thenReturn(anotherPoint);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(anotherPoint, anotherPoint));\n        when(leafNode.getMass()).thenReturn(4);\n        int leafDepth = 100;\n        int sampleSize = 99;\n\n        SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(point, sampleSize, 1, false);\n        visitor.acceptLeaf(leafNode, leafDepth);\n\n        InterpolationMeasure result = visitor.getResult();\n\n        double expectedSumOfNewRange = 3.0 + 4.0 + 2.0;\n        double[] expectedDifferenceInRangeVector = { 0.0, 3.0, 4.0, 0.0, 0.0, 2.0 };\n        double[] expectedProbVector = Arrays.stream(expectedDifferenceInRangeVector).map(x -> x / expectedSumOfNewRange)\n                .toArray();\n        double[] expectedmeasure = Arrays.stream(expectedProbVector).toArray();\n\n        double[] expectedDistances = new double[2 * point.length];\n        for (int i = 0; i < 2 * point.length; i++) {\n            expectedDistances[i] = expectedProbVector[i] * expectedDifferenceInRangeVector[i];\n        }\n        for (int i = 0; i < 2 * point.length; i++) {\n            expectedmeasure[i] = expectedmeasure[i] * 5;\n        }\n        for (int i = 0; i < point.length; i++) {\n            assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);\n            assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);\n\n            assertEquals(expectedmeasure[2 * i], result.measure.high[i]);\n            assertEquals(expectedmeasure[2 * i + 1], result.measure.low[i]);\n\n            assertEquals(expectedDistances[2 * i], result.distances.high[i]);\n            assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);\n        }\n\n    }\n\n    @Test\n    public void testAcceptEqualsLeafPoint() {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int sampleSize = 50;\n        SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(pointToScore, sampleSize, 1, false);\n\n        float[] point = Arrays.copyOf(pointToScore, pointToScore.length);\n        INodeView node = mock(NodeView.class);\n        when(node.getLeafPoint()).thenReturn(point);\n        when(node.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n        when(node.getMass()).thenReturn(1);\n        int depth = 2;\n        visitor.acceptLeaf(node, depth);\n        InterpolationMeasure result = visitor.getResult();\n\n        double[] expected = new double[point.length];\n        Arrays.fill(expected, 0.5 * (1 + node.getMass()) / point.length);\n        assertArrayEquals(expected, result.measure.high);\n        assertArrayEquals(expected, result.measure.low);\n\n        Arrays.fill(expected, 0.5 / point.length);\n        assertArrayEquals(expected, result.probMass.high);\n        assertArrayEquals(expected, result.probMass.low);\n\n        Arrays.fill(expected, 0.0);\n        assertArrayEquals(expected, result.distances.high);\n        assertArrayEquals(expected, result.distances.low);\n\n        depth--;\n        float[] siblingPoint = { 1.0f, -2.0f };\n        INodeView sibling = mock(NodeView.class);\n        int siblingMass = 2;\n        when(sibling.getMass()).thenReturn(siblingMass);\n        INodeView parent = mock(NodeView.class);\n        when(parent.getMass()).thenReturn(1 + siblingMass);\n        BoundingBox boundingBox = new BoundingBox(point, siblingPoint);\n        when(parent.getBoundingBox()).thenReturn(boundingBox);\n        when(parent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(siblingPoint));\n        visitor.accept(parent, depth);\n        result = visitor.getResult();\n\n        // compute using shadow box (sibling leaf node at {1.0, -2.0} and parent\n        // bounding box\n\n        double[] directionalDistance = { 0.0, 1.0, 2.0, 0.0 };\n        double[] differenceInRange = { 0.0, 1.0, 2.0, 0.0 };\n        double sumOfNewRange = 1.0 + 2.0;\n        double[] probVector = Arrays.stream(differenceInRange).map(x -> x / sumOfNewRange).toArray();\n        expected = new double[2 * pointToScore.length];\n        for (int i = 0; i < expected.length; i++) {\n            expected[i] = probVector[i] * (1 + node.getMass() + parent.getMass());\n        }\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(expected[2 * i], result.measure.high[i]);\n            assertEquals(expected[2 * i + 1], result.measure.low[i]);\n        }\n\n        for (int i = 0; i < expected.length; i++) {\n            expected[i] = probVector[i];\n        }\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(expected[2 * i], result.probMass.high[i]);\n            assertEquals(expected[2 * i + 1], result.probMass.low[i]);\n        }\n\n        for (int i = 0; i < expected.length; i++) {\n            expected[i] = probVector[i] * directionalDistance[i];\n        }\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(expected[2 * i], result.distances.high[i]);\n            assertEquals(expected[2 * i + 1], result.distances.low[i]);\n        }\n\n        // reset to probmass\n        for (int i = 0; i < expected.length; i++) {\n            expected[i] = probVector[i];\n        }\n\n        // testing shawbox setup for grandparent\n        INodeView uncle = mock(NodeView.class);\n        int uncleMass = 2;\n        when(sibling.getMass()).thenReturn(uncleMass);\n        INodeView grandParent = mock(NodeView.class);\n        when(grandParent.getMass()).thenReturn(1 + siblingMass + uncleMass);\n        BoundingBox grandBox = boundingBox.getMergedBox(new float[] { 2.0f, 2.0f });\n        when(grandParent.getBoundingBox()).thenReturn(grandBox);\n        when(grandParent.getSiblingBoundingBox(any())).thenReturn(new BoundingBox(new float[] { 2.0f, 2.0f }));\n        visitor.accept(grandParent, depth - 1);\n        result = visitor.getResult();\n\n        directionalDistance = new double[] { 0.0, 2.0, 0.0, 0.0 };\n        differenceInRange = new double[] { 0.0, 1.0, 0.0, 0.0 };\n        double newSumOfNewRange = 1.0 + 2.0 + 1.0 + 2.0;\n        probVector = Arrays.stream(differenceInRange).map(x -> x / newSumOfNewRange).toArray();\n        double prob = Arrays.stream(probVector).sum();\n        for (int i = 0; i < expected.length; i++) {\n            expected[i] = probVector[i] + (1 - prob) * expected[i];\n        }\n        for (int i = 0; i < pointToScore.length; i++) {\n            System.out.println(i);\n            assertEquals(expected[2 * i], result.probMass.high[i]);\n            assertEquals(expected[2 * i + 1], result.probMass.low[i]);\n        }\n    }\n\n    @Test\n    public void testAccept() {\n        float[] pointToScore = { 0.0f, 0.0f };\n        int sampleSize = 50;\n        SimpleInterpolationVisitor visitor = new SimpleInterpolationVisitor(pointToScore, sampleSize, 1, false);\n\n        INodeView leafNode = mock(NodeView.class);\n        float[] point = new float[] { 1.0f, -2.0f };\n        when(leafNode.getLeafPoint()).thenReturn(point);\n        when(leafNode.getBoundingBox()).thenReturn(new BoundingBox(point, point));\n        int leafMass = 3;\n        when(leafNode.getMass()).thenReturn(leafMass);\n        int depth = 4;\n        visitor.acceptLeaf(leafNode, depth);\n        InterpolationMeasure result = visitor.getResult();\n\n        double expectedSumOfNewRange = 1.0 + 2.0;\n        double[] expectedDifferenceInRangeVector = { 0.0, 1.0, 2.0, 0.0 };\n        double[] expectedProbVector = Arrays.stream(expectedDifferenceInRangeVector).map(x -> x / expectedSumOfNewRange)\n                .toArray();\n        double[] expectedNumPts = Arrays.stream(expectedProbVector).toArray();\n\n        double[] expectedDistances = new double[2 * pointToScore.length];\n        for (int i = 0; i < 2 * pointToScore.length; i++) {\n            expectedDistances[i] = expectedProbVector[i] * expectedDifferenceInRangeVector[i];\n        }\n\n        for (int i = 0; i < 2 * pointToScore.length; i++) {\n            expectedNumPts[i] = expectedNumPts[i] * 4;\n        }\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);\n            assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);\n\n            assertEquals(expectedNumPts[2 * i], result.measure.high[i]);\n            assertEquals(expectedNumPts[2 * i + 1], result.measure.low[i]);\n\n            assertEquals(expectedDistances[2 * i], result.distances.high[i]);\n            assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);\n        }\n\n        // parent does not contain pointToScore\n\n        depth--;\n        INodeView sibling = mock(NodeView.class);\n        int siblingMass = 2;\n        when(sibling.getMass()).thenReturn(siblingMass);\n        INodeView parent = mock(NodeView.class);\n        int parentMass = leafMass + siblingMass;\n        when(parent.getMass()).thenReturn(parentMass);\n        when(parent.getBoundingBox()).thenReturn(new BoundingBox(point, new float[] { 2.0f, -0.5f }));\n        visitor.accept(parent, depth);\n        result = visitor.getResult();\n\n        double expectedSumOfNewRange2 = 2.0 + 2.0;\n        double expectedProbOfCut2 = (1.0 + 0.5) / expectedSumOfNewRange2;\n        double[] expectedDifferenceInRangeVector2 = { 0.0, 1.0, 0.5, 0.0 };\n        double[] expectedDirectionalDistanceVector2 = { 0.0, 2.0, 2.0, 0.0 };\n\n        for (int i = 0; i < 2 * pointToScore.length; i++) {\n            double prob = expectedDifferenceInRangeVector2[i] / expectedSumOfNewRange2;\n            expectedProbVector[i] = prob + (1 - expectedProbOfCut2) * expectedProbVector[i];\n            expectedNumPts[i] = prob * (1 + parent.getMass()) + (1 - expectedProbOfCut2) * expectedNumPts[i];\n            expectedDistances[i] = prob * expectedDirectionalDistanceVector2[i]\n                    + (1 - expectedProbOfCut2) * expectedDistances[i];\n        }\n\n        for (int i = 0; i < pointToScore.length; i++) {\n            assertEquals(expectedProbVector[2 * i], result.probMass.high[i]);\n            assertEquals(expectedProbVector[2 * i + 1], result.probMass.low[i]);\n\n            assertEquals(expectedNumPts[2 * i], result.measure.high[i]);\n            assertEquals(expectedNumPts[2 * i + 1], result.measure.low[i]);\n\n            assertEquals(expectedDistances[2 * i], result.distances.high[i]);\n            assertEquals(expectedDistances[2 * i + 1], result.distances.low[i]);\n        }\n\n        // grandparent contains pointToScore\n\n        assertFalse(visitor.pointInsideBox);\n\n        depth--;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/preprocessor/PreprocessorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.config.ForestMode.STANDARD;\nimport static com.amazon.randomcutforest.config.ForestMode.STREAMING_IMPUTE;\nimport static com.amazon.randomcutforest.config.ForestMode.TIME_AUGMENTED;\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.LINEAR;\nimport static com.amazon.randomcutforest.config.ImputationMethod.NEXT;\nimport static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.config.ImputationMethod.ZERO;\nimport static com.amazon.randomcutforest.config.TransformMethod.NONE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE_DIFFERENCE;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.copyAtEnd;\nimport static java.lang.Math.abs;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorMapper;\nimport com.amazon.randomcutforest.statistics.Deviation;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class PreprocessorTest {\n\n    @Test\n    void testConfig() {\n        assertThrows(IllegalArgumentException.class, () -> copyAtEnd(new double[2], new double[3]));\n        assertThrows(IllegalArgumentException.class, () -> copyAtEnd(new float[4], new float[5]));\n        assertNull(Preprocessor.copyIfNotnull((float[]) null));\n        assertNull(Preprocessor.copyIfNotnull((double[]) null));\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(null).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(null).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).inputLength(10).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .forestMode(STANDARD).inputLength(10).dimensions(12).build());\n        assertDoesNotThrow(() -> {\n            new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).inputLength(12).dimensions(12)\n                    .build();\n        });\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .forestMode(STANDARD).inputLength(12).dimensions(12).initialShingledInput(new double[1]).build());\n        assertDoesNotThrow(() -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).inputLength(12)\n                .dimensions(12).initialShingledInput(new double[12]).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).inputLength(6)\n                        .dimensions(12).shingleSize(2).initialShingledInput(new double[6]).build());\n\n        assertDoesNotThrow(() -> new Preprocessor.Builder<>().transformMethod(NONE).forestMode(STANDARD).inputLength(6)\n                .dimensions(12).shingleSize(2).initialShingledInput(new double[12]).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NORMALIZE)\n                .forestMode(STANDARD).inputLength(12).dimensions(12).startNormalization(0).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NORMALIZE_DIFFERENCE).forestMode(STANDARD)\n                        .inputLength(12).dimensions(12).startNormalization(0).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .forestMode(TIME_AUGMENTED).inputLength(12).dimensions(12).build());\n        assertDoesNotThrow(() -> {\n            new Preprocessor.Builder<>().transformMethod(NONE).forestMode(TIME_AUGMENTED).inputLength(12).dimensions(13)\n                    .build();\n        });\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(TIME_AUGMENTED).inputLength(12).dimensions(13).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(TIME_AUGMENTED).inputLength(12).dimensions(14).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(STREAMING_IMPUTE).inputLength(12).dimensions(12).shingleSize(1).build());\n        assertDoesNotThrow(() -> new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2)\n                .forestMode(TIME_AUGMENTED).inputLength(6).dimensions(14).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).forestMode(TIME_AUGMENTED)\n                        .inputLength(6).dimensions(14).initialShingledInput(new double[14]).build());\n        assertDoesNotThrow(() -> new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2)\n                .forestMode(TIME_AUGMENTED).inputLength(6).dimensions(14).initialShingledInput(new double[12]).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).forestMode(TIME_AUGMENTED)\n                        .inputLength(6).initialPoint(new float[12]).dimensions(14).build());\n        assertDoesNotThrow(() -> {\n            new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).forestMode(TIME_AUGMENTED).inputLength(6)\n                    .dimensions(14).initialPoint(new float[14]).build();\n        });\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(-2).forestMode(TIME_AUGMENTED).inputLength(6).dimensions(14).build());\n\n        assertDoesNotThrow(() -> {\n            new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).normalizeTime(true)\n                    .forestMode(TIME_AUGMENTED).inputLength(6).dimensions(14).build();\n        });\n\n        // external shingling in STANDARD mode\n        assertDoesNotThrow(() -> {\n            IPreprocessor preprocessor = new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2)\n                    .forestMode(TIME_AUGMENTED).inputLength(6).dimensions(14).build();\n            // need a forest\n            assertThrows(IllegalArgumentException.class,\n                    () -> preprocessor.getScaledShingledInput(new double[6], 0L, null, null));\n        });\n\n        // internal shingling\n        assertDoesNotThrow(() -> {\n            IPreprocessor preprocessor = new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2)\n                    .forestMode(STANDARD).inputLength(6).dimensions(12).build();\n            assertDoesNotThrow(() -> preprocessor.getScaledShingledInput(new double[6], 0L, null, null));\n            assertNull(preprocessor.invertInPlaceRecentSummaryBlock(null));\n            SampleSummary summary = new SampleSummary(6);\n            summary.summaryPoints = new float[1][6];\n            summary.measure = new float[1][2];\n            assertThrows(IllegalArgumentException.class, () -> preprocessor.invertInPlaceRecentSummaryBlock(summary));\n            assertThrows(IllegalArgumentException.class, () -> preprocessor.setDefaultFill(new double[7]));\n            assertDoesNotThrow(() -> preprocessor.setDefaultFill(new double[6]));\n            assertThrows(IllegalArgumentException.class,\n                    () -> ((Preprocessor) preprocessor).setPreviousTimeStamps(new long[5]));\n        });\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(STANDARD).weights(new double[1]).inputLength(6).dimensions(12).build());\n\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(STANDARD).weights(new double[2]).inputLength(6).dimensions(12).build());\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).forestMode(STANDARD)\n                        .weights(new double[] { 1.0, 1.0 }).inputLength(6).dimensions(12).build());\n\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .forestMode(STANDARD).inputLength(6).dimensions(12).build());\n        assertDoesNotThrow(() -> {\n            new Preprocessor.Builder<>().transformMethod(NONE).shingleSize(2).normalizeTime(true).forestMode(STANDARD)\n                    .inputLength(6).dimensions(12).build();\n        });\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .shingleSize(2).forestMode(STANDARD).inputLength(5).dimensions(12).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .inputLength(5).dimensions(5).startNormalization(0).normalizeTime(true).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().transformMethod(NONE)\n                .inputLength(1).dimensions(1).weights(new double[] { 0.5 }).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().inputLength(1).dimensions(1)\n                .startNormalization(0).transformMethod(NORMALIZE_DIFFERENCE).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().inputLength(1).dimensions(1)\n                .startNormalization(0).transformMethod(NORMALIZE_DIFFERENCE).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().inputLength(1).dimensions(1)\n                .forestMode(STREAMING_IMPUTE).imputationMethod(FIXED_VALUES).build());\n        assertThrows(IllegalArgumentException.class, () -> new Preprocessor.Builder<>().inputLength(1).dimensions(2)\n                .forestMode(STREAMING_IMPUTE).imputationMethod(FIXED_VALUES).shingleSize(2).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new Preprocessor.Builder<>().inputLength(1).dimensions(2).forestMode(STREAMING_IMPUTE)\n                        .imputationMethod(FIXED_VALUES).shingleSize(2).fillValues(new double[2]).build());\n        assertDoesNotThrow(() -> new Preprocessor.Builder<>().inputLength(1).dimensions(2).forestMode(STREAMING_IMPUTE)\n                .imputationMethod(FIXED_VALUES).shingleSize(2).fillValues(new double[1]).build());\n    }\n\n    public void preprocessorPlusForest(int seed, ForestMode mode, TransformMethod method, ImputationMethod imputeMethod,\n            boolean internalShinglingHint, int shingleSize) {\n        int dataSize = 1000;\n        int sampleSize = 256;\n        int tempDimensions = (mode == TIME_AUGMENTED) ? 2 * shingleSize : shingleSize;\n        MultiDimDataWithKey dataWithKey = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 50, 5, seed, 1,\n                false);\n        Preprocessor.Builder<?> builder = Preprocessor.builder().inputLength(1).dimensions(tempDimensions)\n                .weights(new double[] { 1.0 }).shingleSize(shingleSize).transformMethod(method).randomSeed(seed + 1)\n                .forestMode(mode);\n        if (mode == STREAMING_IMPUTE) {\n            builder.imputationMethod(imputeMethod);\n            builder.fastForward(new Random().nextDouble() < 0.5);\n            if (imputeMethod == FIXED_VALUES) {\n                builder.fillValues(new double[] { 5 });\n            }\n        }\n        if (imputeMethod != null) {\n            builder.imputationMethod(imputeMethod);\n        }\n        boolean internal = ((internalShinglingHint || method != NONE) && mode != STREAMING_IMPUTE);\n        Preprocessor preprocessor = builder.build(); // polymorphism\n        RandomCutForest forest = RandomCutForest.builder().dimensions(tempDimensions).randomSeed(seed + 2)\n                .outputAfter(50).shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(internal)\n                .build();\n        Random random = new Random(seed + 4);\n        double score = 0;\n        double error = 0;\n        for (int i = 0; i < dataSize - 1; i++) {\n            long timestamp = i * 100 + random.nextInt(20);\n            if (mode != STREAMING_IMPUTE) {\n                assertEquals(preprocessor.numberOfImputes(timestamp), 0);\n            }\n            PreprocessorMapper mapper = new PreprocessorMapper();\n            Preprocessor newPre = mapper.toModel(mapper.toState(preprocessor));\n            float[] shingle = preprocessor.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest);\n            assertArrayEquals(shingle, newPre.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest));\n            if (shingle != null && (mode != STREAMING_IMPUTE || i != 3 || random.nextDouble() > 0.1)) {\n                if (i > 100 + shingleSize - 1) {\n                    double currentScore = forest.getAnomalyScore(shingle);\n                    if (currentScore > 1.5) {\n                        float[] value = forest.imputeMissingValues(shingle, 1, new int[] { shingle.length - 1 });\n                        double expected = preprocessor.getExpectedValue(0, dataWithKey.data[i], shingle, value)[0];\n                        System.out.println(\" expected \" + expected + \" in place of \" + dataWithKey.data[i][0]);\n                    }\n                    score += currentScore;\n                }\n            }\n            if (internal) {\n                float[] input = preprocessor.getScaledInput(toFloatArray(dataWithKey.data[i]), timestamp);\n                if (i != 20 && random.nextDouble() > 0.1) {\n                    preprocessor.update(dataWithKey.data[i], input, timestamp, null, forest);\n                } else {\n                    // drop first coordinate\n                    preprocessor.update(dataWithKey.data[i], input, timestamp, new int[1], forest);\n                }\n                if (shingleSize > 1) {\n                    RangeVector rangeVector = forest.extrapolateWithRanges(preprocessor.getLastShingledPoint(), 1,\n                            tempDimensions / shingleSize, false, 0, 1.0);\n                    TimedRangeVector timedRanges = preprocessor.invertForecastRange(rangeVector, timestamp, null, false,\n                            timestamp);\n                    // error of lookahead\n                    if (i > 100 + shingleSize - 1) {\n                        error += abs(timedRanges.rangeVector.values[0] - dataWithKey.data[i + 1][0]);\n                    }\n                }\n            } else {\n                // force two subsequent drops\n                if (i != 0 && i != 5 && i != preprocessor.startNormalization - 1 && i != 500 && i != 501 && i != 502\n                        && random.nextDouble() > 0.1) {\n                    if (random.nextDouble() > 0.7) {\n                        preprocessor.update(dataWithKey.data[i], shingle, timestamp, null, forest);\n                    } else if (random.nextDouble() > 0.5) {\n                        // same as null\n                        preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[0], forest);\n                    } else {\n                        preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[] {}, forest);\n                    }\n                } else {\n                    if (i != 5 && i != 6 && i != 500 && i != 501 && i != 502) {\n                        // force initial; note 5 is dropped\n                        preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[] { 0 }, forest);\n                    }\n                    // drop\n                }\n            }\n        }\n        assertTrue((score) / (dataSize - 100 - shingleSize + 1) < 1);\n        // note for time-augmentation the noise in the time will overwhelm the noise in\n        // the signal\n        if (mode != TIME_AUGMENTED && (imputeMethod == null || imputeMethod == RCF)) {\n            assertTrue((error) / (dataSize - 200 - shingleSize + 1) < 10); // twice the noise\n        }\n        PreprocessorMapper mapper = new PreprocessorMapper();\n        Preprocessor second = mapper.toModel(mapper.toState(preprocessor));\n        assertArrayEquals(second.getSmoothedDeviations(), preprocessor.getSmoothedDeviations(), 1e-10f);\n        assertArrayEquals(second.getShift(), preprocessor.getShift(), 1e-10f);\n        assertArrayEquals(second.getScale(), preprocessor.getScale(), 1e-10f);\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void preprocessorTest(TransformMethod method) {\n        preprocessorPlusForest(0, STANDARD, method, null, true, 10);\n        preprocessorPlusForest(0, STANDARD, method, null, true, 1);\n        preprocessorPlusForest(0, STANDARD, method, null, false, 2);\n        preprocessorPlusForest(0, STANDARD, method, RCF, false, 1);\n        preprocessorPlusForest(0, TIME_AUGMENTED, method, null, true, 1);\n        preprocessorPlusForest(0, TIME_AUGMENTED, method, null, true, 3);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, RCF, true, 10);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, PREVIOUS, false, 5);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, ZERO, false, 11);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, FIXED_VALUES, true, 12);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, NEXT, false, 7);\n        preprocessorPlusForest(0, STREAMING_IMPUTE, method, LINEAR, false, 2);\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void allMissing(TransformMethod method) {\n        int dataSize = 1000;\n        int shingleSize = 2;\n        long seed = new Random().nextLong();\n        Random random = new Random(seed + 4);\n        double[] defaultFill = null;\n        if (method == NORMALIZE) {\n            defaultFill = new double[] { random.nextInt(10) };\n        }\n\n        MultiDimDataWithKey dataWithKey = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 50, 5, 0, 1,\n                false);\n        Preprocessor.Builder<?> builder = Preprocessor.builder().inputLength(1).dimensions(shingleSize)\n                .weights(new double[] { 1.0 }).shingleSize(shingleSize).transformMethod(method).randomSeed(seed + 1)\n                .weightTime(0).normalizeTime(true).forestMode(STANDARD).fillValues(defaultFill);\n\n        Preprocessor preprocessor = builder.build();\n\n        // testing length of deviation list\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.manageDeviations(new Deviation[12], null, 0));\n\n        for (int i = 0; i < preprocessor.getStartNormalization(); i++) {\n            long timestamp = i * 100 + random.nextInt(20);\n            PreprocessorMapper mapper = new PreprocessorMapper();\n            Preprocessor newPre = mapper.toModel(mapper.toState(preprocessor));\n            float[] shingle = preprocessor.getScaledShingledInput(dataWithKey.data[i], timestamp, null, null);\n            assertArrayEquals(shingle, newPre.getScaledShingledInput(dataWithKey.data[i], timestamp, null, null));\n            preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[] { 0 }, null);\n        }\n        assertTrue(preprocessor.getInitialValues() == null);\n        assertTrue(preprocessor.isOutputReady());\n        assertEquals(preprocessor.getScale().length, 1);\n        assertEquals(preprocessor.getShift().length, 1);\n        if (method == NORMALIZE) {\n            assertTrue(preprocessor.getDeviationList()[0].getMean() == defaultFill[0]);\n            assertEquals(preprocessor.getLastShingledInput()[0], defaultFill[0]);\n        } else {\n            assertTrue(preprocessor.getDeviationList()[0].getMean() == 0);\n        }\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.inverseMapTime(0, -(shingleSize + 1)));\n        assertEquals(preprocessor.inverseMapTimeValue(1L, 1L), 0);\n        assertEquals(preprocessor.getShingledInput().length, shingleSize);\n        assertEquals(preprocessor.dataQuality(), 0);\n        assertTrue(preprocessor.normalize(-200, 1) < 0);\n        assertEquals(preprocessor.getScale().length, 1);\n        assertEquals(preprocessor.getShift().length, 1);\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void allMissingWithForest(TransformMethod method) {\n        int dataSize = 1000;\n        int shingleSize = 2;\n        long seed = 0L;\n        MultiDimDataWithKey dataWithKey = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 50, 5, 0, 1,\n                false);\n        Preprocessor.Builder<?> builder = Preprocessor.builder().inputLength(1).dimensions(2 * shingleSize)\n                .weights(new double[] { 1.0 }).shingleSize(shingleSize).transformMethod(method).randomSeed(seed + 1)\n                .forestMode(TIME_AUGMENTED).weightTime(0).normalizeTime(false);\n        RandomCutForest forest = new RandomCutForest.Builder().dimensions(2 * shingleSize)\n                .internalShinglingEnabled(false) // not recommended\n                .shingleSize(shingleSize).build();\n\n        Preprocessor preprocessor = builder.build();\n        Random random = new Random(seed + 4);\n        assertTrue(!preprocessor.isOutputReady());\n        assertThrows(IllegalArgumentException.class,\n                () -> preprocessor.getScaledShingledInput(dataWithKey.data[0], 0, null, null));\n        for (int i = 0; i < preprocessor.getStartNormalization() + 1; i++) {\n            long timestamp = i * 100 + random.nextInt(20);\n            PreprocessorMapper mapper = new PreprocessorMapper();\n            Preprocessor newPre = mapper.toModel(mapper.toState(preprocessor));\n            float[] shingle = preprocessor.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest);\n            assertArrayEquals(shingle, newPre.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest));\n            preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[] { 0 }, forest);\n        }\n        assertTrue(preprocessor.getInitialValues() == null);\n        assertTrue(preprocessor.getDeviationList()[0].getMean() == 0);\n        assertEquals(preprocessor.getScale().length, 2);\n        assertEquals(preprocessor.getShift().length, 2);\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.inverseMapTime(0, -(shingleSize + 1)));\n        assertEquals(preprocessor.inverseMapTimeValue(1L, 1L), 0);\n        assertEquals(preprocessor.getShingledInput().length, shingleSize);\n        assertEquals(preprocessor.dataQuality(), 0);\n        assertTrue(preprocessor.normalize(0, 1) < 0);\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.invertForecastRange(new RangeVector(11),\n                preprocessor.internalTimeStamp + 1, new double[0], false, 0));\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.invertForecastRange(new RangeVector(10),\n                preprocessor.internalTimeStamp + 1, new double[0], false, 0));\n        long[] values = preprocessor.invertForecastRange(new RangeVector(10), preprocessor.internalTimeStamp - 1,\n                new double[1], false, -100).timeStamps;\n        long[] otherValues = preprocessor.invertForecastRange(new RangeVector(10), preprocessor.internalTimeStamp - 1,\n                new double[1], true, -100).timeStamps;\n        assertTrue(values[0] == otherValues[0]);\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.invertInPlace(new float[10], null, -1));\n        assertDoesNotThrow(() -> preprocessor.invertInPlace(new float[2], new double[1], -1));\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class, names = { \"NONE\", \"WEIGHTED\", \"DIFFERENCE\" })\n    public void basicPreProcessor(TransformMethod method) {\n        int dataSize = 1000;\n        int shingleSize = 2;\n        long seed = 0L;\n        MultiDimDataWithKey dataWithKey = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 50, 5, 0, 1,\n                false);\n        Preprocessor.Builder<?> builder = Preprocessor.builder().inputLength(1).dimensions(shingleSize)\n                .weights(new double[] { 1.0 }).shingleSize(shingleSize).transformMethod(method).randomSeed(seed + 1)\n                .forestMode(STANDARD).imputationMethod(ZERO);\n        RandomCutForest forest = new RandomCutForest.Builder().dimensions(shingleSize).internalShinglingEnabled(false) // not\n                // recommended\n                .shingleSize(shingleSize).build();\n\n        Preprocessor preprocessor = builder.build();\n        Random random = new Random(seed + 4);\n\n        assertDoesNotThrow(() -> preprocessor.getScaledShingledInput(dataWithKey.data[0], 0, null, null));\n        for (int i = 0; i < preprocessor.getStartNormalization() + 1; i++) {\n            long timestamp = i * 100 + random.nextInt(20);\n            PreprocessorMapper mapper = new PreprocessorMapper();\n            Preprocessor newPre = mapper.toModel(mapper.toState(preprocessor));\n            float[] shingle = preprocessor.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest);\n            assertArrayEquals(shingle, newPre.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest));\n            preprocessor.update(dataWithKey.data[i], shingle, timestamp, new int[] { 0 }, forest);\n        }\n        assertTrue(preprocessor.getInitialValues() == null);\n        assertTrue(preprocessor.getDeviationList()[0].getMean() == 0);\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.inverseMapTime(0, -(shingleSize + 1)));\n        assertNotEquals(preprocessor.inverseMapTimeValue(1L, 1L), 0);\n        assertEquals(preprocessor.getShingledInput().length, shingleSize);\n        assertEquals(preprocessor.dataQuality(), 0);\n        assertTrue(preprocessor.normalize(0, 1) < 0);\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.getExpectedValue(-2, null, null, new float[1]));\n        assertThrows(IllegalArgumentException.class, () -> preprocessor.getExpectedValue(-2, null, null, new float[2]));\n        preprocessor.getExpectedValue(-1, null, null, new float[2]);\n        assertDoesNotThrow(() -> preprocessor.getExpectedValue(-1, null, null, new float[2]));\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = ImputationMethod.class)\n    public void streamingImputeLargeGap(ImputationMethod method) {\n        int dataSize = 1000;\n        int shingleSize = 4;\n        long seed = 0L;\n        MultiDimDataWithKey dataWithKey = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 50, 5, 0, 1,\n                false);\n        Preprocessor.Builder<?> builder = Preprocessor.builder().inputLength(1).dimensions(shingleSize)\n                .weights(new double[] { 1.0 }).shingleSize(shingleSize).randomSeed(seed + 1)\n                .forestMode(STREAMING_IMPUTE).imputationMethod(method).transformMethod(NORMALIZE).fastForward(true);\n        if (method == FIXED_VALUES) {\n            builder.fillValues(new double[] { 0 });\n        }\n        RandomCutForest forest = new RandomCutForest.Builder().dimensions(shingleSize).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).build();\n\n        Preprocessor preprocessor = builder.build();\n        Random random = new Random(seed + 4);\n\n        assertDoesNotThrow(() -> preprocessor.getScaledShingledInput(dataWithKey.data[0], 0, null, null));\n        for (int i = 0; i < dataSize; i++) {\n            long timestamp = i * 100 + random.nextInt(20);\n            PreprocessorMapper mapper = new PreprocessorMapper();\n            Preprocessor newPre = mapper.toModel(mapper.toState(preprocessor));\n            float[] shingle = preprocessor.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest);\n            assertArrayEquals(shingle, newPre.getScaledShingledInput(dataWithKey.data[i], timestamp, null, forest));\n            preprocessor.update(dataWithKey.data[i], shingle, timestamp, null, forest);\n        }\n        long updates = forest.getTotalUpdates();\n        double[] newData = new double[] { -11.11 };\n        float[] shingle = preprocessor.getScaledShingledInput(newData, 100 * dataSize + 10000L, null, forest);\n        assertEquals(forest.getTotalUpdates(), updates);\n        preprocessor.update(newData, shingle, 100 * dataSize + 10000L, null, forest);\n        if (method == RCF) {\n            assertEquals(forest.getTotalUpdates(), updates + shingleSize);\n        } else {\n            assertEquals(forest.getTotalUpdates(), updates + 100 + shingleSize / 2);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/preprocessor/transform/WeightedTransformerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.preprocessor.transform;\n\nimport static com.amazon.randomcutforest.preprocessor.transform.WeightedTransformer.NUMBER_OF_STATS;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\npublic class WeightedTransformerTest {\n\n    public void checkTransformer(WeightedTransformer w, double value, double another) {\n        w.setWeights(new double[1]);\n        float[] test = new float[] { 1.0f };\n        w.invertInPlace(test, new double[] { 2.0 });\n        assertEquals(test[0], value, 1e-6);\n        assertEquals(w.getScale()[0], 0, 1e-6);\n        RangeVector r = new RangeVector(1);\n        r.shift(0, 10);\n        assertEquals(r.values[0], 10, 1e-6);\n        assertEquals(r.upper[0], 10, 1e-6);\n        assertEquals(r.lower[0], 10, 1e-6);\n        assertThrows(IllegalArgumentException.class,\n                () -> w.invertForecastRange(r, 1, new double[] { 1.0 }, new double[0]));\n        w.invertForecastRange(r, 1, new double[] { 1.0 }, new double[1]);\n        assertEquals(r.values[0], another, 1e-6);\n        assertEquals(r.upper[0], another, 1e-6);\n        assertEquals(r.lower[0], another, 1e-6);\n    }\n\n    @Test\n    void constructorTest() {\n        assertThrows(IllegalArgumentException.class, () -> new WeightedTransformer(new double[2], new Deviation[5]));\n        assertThrows(IllegalArgumentException.class,\n                () -> new WeightedTransformer(new double[2], new Deviation[2 * NUMBER_OF_STATS]));\n        Deviation[] deviations = new Deviation[NUMBER_OF_STATS];\n        for (int i = 0; i < NUMBER_OF_STATS; i++) {\n            deviations[i] = new Deviation(0);\n        }\n        WeightedTransformer w = new WeightedTransformer(new double[1], deviations);\n        assertThrows(IllegalArgumentException.class, () -> w.setWeights(new double[2]));\n        checkTransformer(w, 0, 0);\n        checkTransformer(new NormalizedDifferenceTransformer(new double[1], deviations), 2.0, 1.0);\n        assertThrows(IllegalArgumentException.class,\n                () -> new NormalizedDifferenceTransformer(new double[1], deviations).invertInPlace(new float[1],\n                        new double[2]));\n        checkTransformer(new DifferenceTransformer(new double[1], deviations), 2.0, 1.0);\n        assertThrows(IllegalArgumentException.class,\n                () -> new DifferenceTransformer(new double[1], deviations).invertInPlace(new float[1], new double[2]));\n    }\n\n    @Test\n    void updateDeviationsTest() {\n        Deviation[] deviations = new Deviation[2 * NUMBER_OF_STATS];\n        for (int y = 0; y < deviations.length; y++) {\n            deviations[y] = new Deviation(0);\n        }\n        WeightedTransformer transformer = new WeightedTransformer(new double[2], deviations);\n        assertThrows(IllegalArgumentException.class,\n                () -> transformer.updateDeviation(new double[1], new double[1], null));\n        assertThrows(IllegalArgumentException.class,\n                () -> transformer.updateDeviation(new double[2], new double[1], null));\n        assertDoesNotThrow(() -> transformer.updateDeviation(new double[2], new double[2], null));\n    }\n\n    @Test\n    void normalizeTest() {\n        Deviation[] deviations = new Deviation[2 * NUMBER_OF_STATS];\n        for (int y = 0; y < deviations.length; y++) {\n            deviations[y] = new Deviation(0);\n        }\n        WeightedTransformer transformer = new WeightedTransformer(new double[2], deviations);\n        assertThrows(IllegalArgumentException.class, () -> transformer.normalize(10, 5, 0, 10));\n        assertTrue(transformer.normalize(10, 5, 0.5, 9) == 9);\n        assertTrue(transformer.normalize(-10, -5, 0.5, 9) == -9);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/DensityOutputTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class DensityOutputTest {\n\n    private int dimensions;\n    private int sampleSize;\n    private DensityOutput output;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 3;\n        sampleSize = 99;\n        output = new DensityOutput(dimensions, sampleSize);\n    }\n\n    @Test\n    public void testNew() {\n        double[] zero = new double[3];\n        assertArrayEquals(zero, output.measure.high);\n        assertArrayEquals(zero, output.distances.high);\n        assertArrayEquals(zero, output.probMass.high);\n        assertArrayEquals(zero, output.measure.low);\n        assertArrayEquals(zero, output.distances.low);\n        assertArrayEquals(zero, output.probMass.low);\n    }\n\n    @Test\n    public void testAddToLeft() {\n        DensityOutput other1 = new DensityOutput(dimensions, sampleSize);\n        DensityOutput other2 = new DensityOutput(dimensions, sampleSize);\n\n        for (int i = 0; i < dimensions; i++) {\n            output.probMass.high[i] = 2 * i;\n            output.probMass.low[i] = 2 * i + 1;\n            output.distances.high[i] = 4 * i;\n            output.distances.low[i] = 4 * i + 2;\n            output.measure.high[i] = 6 * i;\n            output.measure.low[i] = 6 * i + 3;\n\n            other1.probMass.high[i] = other2.probMass.high[i] = 8 * i;\n            other1.distances.high[i] = other2.distances.high[i] = 10 * i;\n            other1.measure.high[i] = other2.measure.high[i] = 12 * i;\n\n            other1.probMass.low[i] = other2.probMass.low[i] = 8 * i + 4;\n            other1.distances.low[i] = other2.distances.low[i] = 10 * i + 5;\n            other1.measure.low[i] = other2.measure.low[i] = 12 * i + 6;\n\n        }\n\n        assertArrayEquals(other1.probMass.high, other2.probMass.high);\n        assertArrayEquals(other1.distances.high, other2.distances.high);\n        assertArrayEquals(other1.measure.high, other2.measure.high);\n        assertArrayEquals(other1.probMass.low, other2.probMass.low);\n        assertArrayEquals(other1.distances.low, other2.distances.low);\n        assertArrayEquals(other1.measure.low, other2.measure.low);\n\n        DensityOutput.addToLeft(output, other1);\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(2 * i + 8 * i, output.probMass.high[i]);\n            assertEquals(4 * i + 10 * i, output.distances.high[i]);\n            assertEquals(6 * i + 12 * i, output.measure.high[i]);\n            assertEquals(2 * i + 8 * i + 5, output.probMass.low[i]);\n            assertEquals(4 * i + 10 * i + 7, output.distances.low[i]);\n            assertEquals(6 * i + 12 * i + 9, output.measure.low[i]);\n        }\n\n        assertArrayEquals(other1.probMass.high, other2.probMass.high);\n        assertArrayEquals(other1.distances.high, other2.distances.high);\n        assertArrayEquals(other1.measure.high, other2.measure.high);\n        assertArrayEquals(other1.probMass.low, other2.probMass.low);\n        assertArrayEquals(other1.distances.low, other2.distances.low);\n        assertArrayEquals(other1.measure.low, other2.measure.low);\n\n    }\n\n    @Test\n    public void testGetDensity() {\n        assertTrue(output.getDensity(0.5, 3) == 0);\n        for (int i = 0; i < dimensions; i++) {\n            output.probMass.high[i] = 2 * i;\n            output.distances.high[i] = 4 * i;\n            output.measure.high[i] = 6 * i;\n            output.probMass.low[i] = 2 * i;\n            output.distances.low[i] = 4 * i + 2;\n            output.measure.low[i] = 6 * i + 3;\n        }\n\n        double q = 0.5;\n        double density = output.getDensity(q, 3);\n        DiVector densityVector = output.getDirectionalDensity(q, 3);\n\n        double sumOfPoints = output.measure.getHighLowSum() / sampleSize;\n        double sumOfFactors = 0.0;\n        for (int i = 0; i < dimensions; i++) {\n            double mass = output.probMass.getHighLowSum(i);\n            double distance = output.distances.getHighLowSum(i);\n            double t = (mass != 0) ? distance / mass : 0;\n            t = Math.pow(t, dimensions) * mass;\n            sumOfFactors += t;\n        }\n\n        assertEquals(sumOfPoints / (q * sumOfPoints + sumOfFactors), density, EPSILON);\n\n        // for contrib, do not scale sum of points by sample size\n        sumOfPoints = output.measure.getHighLowSum();\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(output.measure.high[i] * density / sumOfPoints, densityVector.high[i], EPSILON);\n            assertEquals(output.measure.low[i] * density / sumOfPoints, densityVector.low[i], EPSILON);\n        }\n\n        assertEquals(output.getDensity(DensityOutput.DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions),\n                output.getDensity());\n\n        densityVector = output.getDirectionalDensity(DensityOutput.DEFAULT_SUM_OF_POINTS_SCALING_FACTOR, dimensions);\n        DiVector defaultDensityVector = output.getDirectionalDensity();\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(densityVector.high[i], defaultDensityVector.high[i], EPSILON);\n            assertEquals(densityVector.low[i], defaultDensityVector.low[i], EPSILON);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/DiVectorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertSame;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.state.returntypes.DiVectorMapper;\nimport com.amazon.randomcutforest.state.returntypes.DiVectorState;\n\npublic class DiVectorTest {\n\n    int dimensions;\n    private DiVector vector;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 3;\n        vector = new DiVector(dimensions);\n    }\n\n    @Test\n    public void testNew() {\n        double[] expected = new double[dimensions];\n        assertEquals(dimensions, vector.getDimensions());\n        assertArrayEquals(expected, vector.high);\n        assertArrayEquals(expected, vector.low);\n        assertThrows(IllegalArgumentException.class, () -> new DiVector(0));\n        assertThrows(IllegalArgumentException.class, () -> new DiVector(new double[10], new double[9]));\n        assertDoesNotThrow(() -> new DiVector(new double[10], new double[10]));\n    }\n\n    @Test\n    public void testAddToLeft() {\n        DiVector left = new DiVector(dimensions);\n        DiVector right = new DiVector(dimensions);\n        for (int i = 0; i < dimensions; i++) {\n            left.low[i] = Math.random();\n            left.high[i] = Math.random();\n            right.low[i] = Math.random();\n            right.high[i] = Math.random();\n        }\n        assertThrows(IllegalArgumentException.class, () -> DiVector.addToLeft(left, new DiVector(dimensions + 1)));\n        DiVector leftCopy = new DiVector(dimensions);\n        System.arraycopy(left.low, 0, leftCopy.low, 0, dimensions);\n        System.arraycopy(left.high, 0, leftCopy.high, 0, dimensions);\n\n        DiVector rightCopy = new DiVector(dimensions);\n        System.arraycopy(right.low, 0, rightCopy.low, 0, dimensions);\n        System.arraycopy(right.high, 0, rightCopy.high, 0, dimensions);\n\n        DiVector result = DiVector.addToLeft(left, right);\n\n        assertSame(result, left);\n        assertArrayEquals(rightCopy.low, right.low);\n        assertArrayEquals(rightCopy.high, right.high);\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(leftCopy.low[i] + right.low[i], left.low[i]);\n            assertEquals(leftCopy.high[i] + right.high[i], left.high[i]);\n        }\n    }\n\n    @Test\n    public void testScale() {\n        vector.high[0] = 1.1;\n        vector.high[2] = 3.1;\n        vector.low[1] = 2.2;\n\n        double z = 9.9;\n        DiVector result = vector.scale(z);\n\n        double[] expected = new double[] { 1.1 * 9.9, 0.0, 3.1 * 9.9 };\n        assertArrayEquals(expected, result.high);\n\n        expected = new double[] { 0.0, 2.2 * 9.9, 0.0 };\n        assertArrayEquals(expected, result.low);\n\n        DiVector emptyVector = new DiVector(dimensions);\n        emptyVector.scale(123.0);\n        expected = new double[dimensions];\n        assertArrayEquals(expected, emptyVector.low);\n        assertArrayEquals(expected, emptyVector.high);\n    }\n\n    @Test\n    public void testGetHighLowSum() {\n        vector.high[2] = 3.1;\n        vector.low[1] = 2.2;\n\n        assertEquals(3.1 + 2.2, vector.getHighLowSum());\n    }\n\n    @Test\n    public void testRenormalize() {\n\n        DiVector testVector = new DiVector(10);\n        // cannot renormalize really\n        testVector.renormalize(100);\n        assertEquals(testVector.getHighLowSum(), 0);\n        vector.high[0] = 1.1;\n        vector.high[2] = 3.1;\n        vector.low[1] = 2.2;\n\n        assertEquals(1.1 + 3.1 + 2.2, vector.getHighLowSum());\n\n        vector.renormalize(100.0);\n\n        assertEquals(100.0, vector.getHighLowSum());\n\n    }\n\n    @Test\n    public void testComponentwiseTransform() {\n        vector.high[0] = 1.1;\n        vector.high[1] = 2.1;\n        vector.high[2] = 3.1;\n        vector.low[0] = 101.1;\n        vector.low[1] = 202.1;\n        vector.low[2] = 303.1;\n\n        double[] highCopy = Arrays.copyOf(vector.high, dimensions);\n        double[] lowCopy = Arrays.copyOf(vector.low, dimensions);\n        vector.componentwiseTransform(x -> 2 * x - 1);\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(2 * highCopy[i] - 1, vector.high[i], EPSILON);\n            assertEquals(2 * lowCopy[i] - 1, vector.low[i], EPSILON);\n        }\n    }\n\n    @Test\n    public void testMapper() {\n        DiVector left = new DiVector(dimensions);\n        for (int i = 0; i < dimensions; i++) {\n            left.low[i] = Math.random();\n            left.high[i] = Math.random();\n        }\n        DiVectorMapper mapper = new DiVectorMapper();\n        DiVector another = mapper.toModel(mapper.toState(left));\n        assertArrayEquals(another.high, left.high, 1e-10);\n        assertArrayEquals(another.low, left.low, 1e-10);\n        assertNull(mapper.toModel(mapper.toState(null)));\n        DiVectorState state = new DiVectorState();\n        state.setHigh(left.high);\n        assertNull(mapper.toModel(state));\n        state.setHigh(null);\n        state.setLow(left.low);\n        assertNull(mapper.toModel(state));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/InterpolationMeasureTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class InterpolationMeasureTest {\n\n    private int dimensions;\n    private int sampleSize;\n    private InterpolationMeasure output;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 3;\n        sampleSize = 99;\n        output = new InterpolationMeasure(dimensions, sampleSize);\n    }\n\n    @Test\n    public void testNew() {\n        double[] zero = new double[3];\n        assertArrayEquals(zero, output.measure.high);\n        assertArrayEquals(zero, output.distances.high);\n        assertArrayEquals(zero, output.probMass.high);\n        assertArrayEquals(zero, output.measure.low);\n        assertArrayEquals(zero, output.distances.low);\n        assertArrayEquals(zero, output.probMass.low);\n        assertEquals(output.getSampleSize(), sampleSize);\n        assertThrows(IllegalArgumentException.class, () -> new InterpolationMeasure(0, sampleSize));\n        assertThrows(IllegalArgumentException.class,\n                () -> new InterpolationMeasure(1, new DiVector(1), new DiVector(2), new DiVector(3)));\n        assertThrows(IllegalArgumentException.class,\n                () -> new InterpolationMeasure(1, new DiVector(2), new DiVector(2), new DiVector(3)));\n        assertDoesNotThrow(() -> new InterpolationMeasure(1, new DiVector(2), new DiVector(2), new DiVector(2)));\n    }\n\n    @Test\n    public void testAddToLeft() {\n        InterpolationMeasure other1 = new InterpolationMeasure(dimensions, sampleSize);\n        InterpolationMeasure other2 = new InterpolationMeasure(dimensions, sampleSize);\n\n        assertThrows(IllegalArgumentException.class,\n                () -> InterpolationMeasure.addToLeft(other1, new InterpolationMeasure(dimensions + 1, sampleSize)));\n        for (int i = 0; i < dimensions; i++) {\n            output.probMass.high[i] = 2 * i;\n            output.probMass.low[i] = 2 * i + 1;\n            output.distances.high[i] = 4 * i;\n            output.distances.low[i] = 4 * i + 2;\n            output.measure.high[i] = 6 * i;\n            output.measure.low[i] = 6 * i + 3;\n\n            other1.probMass.high[i] = other2.probMass.high[i] = 8 * i;\n            other1.distances.high[i] = other2.distances.high[i] = 10 * i;\n            other1.measure.high[i] = other2.measure.high[i] = 12 * i;\n\n            other1.probMass.low[i] = other2.probMass.low[i] = 8 * i + 4;\n            other1.distances.low[i] = other2.distances.low[i] = 10 * i + 5;\n            other1.measure.low[i] = other2.measure.low[i] = 12 * i + 6;\n\n        }\n\n        assertArrayEquals(other1.probMass.high, other2.probMass.high);\n        assertArrayEquals(other1.distances.high, other2.distances.high);\n        assertArrayEquals(other1.measure.high, other2.measure.high);\n        assertArrayEquals(other1.probMass.low, other2.probMass.low);\n        assertArrayEquals(other1.distances.low, other2.distances.low);\n        assertArrayEquals(other1.measure.low, other2.measure.low);\n\n        InterpolationMeasure.addToLeft(output, other1);\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(2 * i + 8 * i, output.probMass.high[i]);\n            assertEquals(4 * i + 10 * i, output.distances.high[i]);\n            assertEquals(6 * i + 12 * i, output.measure.high[i]);\n            assertEquals(2 * i + 8 * i + 5, output.probMass.low[i]);\n            assertEquals(4 * i + 10 * i + 7, output.distances.low[i]);\n            assertEquals(6 * i + 12 * i + 9, output.measure.low[i]);\n        }\n\n        assertArrayEquals(other1.probMass.high, other2.probMass.high);\n        assertArrayEquals(other1.distances.high, other2.distances.high);\n        assertArrayEquals(other1.measure.high, other2.measure.high);\n        assertArrayEquals(other1.probMass.low, other2.probMass.low);\n        assertArrayEquals(other1.distances.low, other2.distances.low);\n        assertArrayEquals(other1.measure.low, other2.measure.low);\n\n    }\n\n    @Test\n    public void testScale() {\n        InterpolationMeasure copy = new InterpolationMeasure(dimensions, sampleSize);\n\n        for (int i = 0; i < dimensions; i++) {\n            output.probMass.high[i] = copy.probMass.high[i] = 2 * i;\n            output.distances.high[i] = copy.distances.high[i] = 4 * i;\n            output.measure.high[i] = copy.measure.high[i] = 6 * i;\n            output.probMass.low[i] = copy.probMass.low[i] = 2 * i + 1;\n            output.distances.low[i] = copy.distances.low[i] = 4 * i + 2;\n            output.measure.low[i] = copy.measure.low[i] = 6 * i + 3;\n        }\n\n        assertArrayEquals(copy.probMass.high, output.probMass.high);\n        assertArrayEquals(copy.distances.high, output.distances.high);\n        assertArrayEquals(copy.measure.high, output.measure.high);\n        assertArrayEquals(copy.probMass.low, output.probMass.low);\n        assertArrayEquals(copy.distances.low, output.distances.low);\n        assertArrayEquals(copy.measure.low, output.measure.low);\n\n        InterpolationMeasure result = output.scale(0.9);\n\n        assertArrayEquals(copy.probMass.low, output.probMass.low);\n        assertArrayEquals(copy.distances.low, output.distances.low);\n        assertArrayEquals(copy.measure.low, output.measure.low);\n        assertArrayEquals(copy.probMass.high, output.probMass.high);\n        assertArrayEquals(copy.distances.high, output.distances.high);\n        assertArrayEquals(copy.measure.high, output.measure.high);\n\n        for (int i = 0; i < dimensions; i++) {\n            assertEquals(2 * i * 0.9, result.probMass.high[i]);\n            assertEquals(4 * i * 0.9, result.distances.high[i]);\n            assertEquals(6 * i * 0.9, result.measure.high[i]);\n            assertEquals((2 * i + 1) * 0.9, result.probMass.low[i]);\n            assertEquals((4 * i + 2) * 0.9, result.distances.low[i]);\n            assertEquals((6 * i + 3) * 0.9, result.measure.low[i]);\n\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/NeighborTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.Matchers.containsInAnyOrder;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.junit.jupiter.api.Test;\n\npublic class NeighborTest {\n    @Test\n    public void testNew() {\n        float[] point = new float[] { 1.0f, -2.0f, 3.3f };\n        double distance = 1234.5;\n        List<Long> timestamps = new ArrayList<>();\n        timestamps.add(99999L);\n        timestamps.add(99L);\n        Neighbor neighbor = new Neighbor(point, distance, timestamps);\n\n        assertArrayEquals(point, neighbor.point);\n        assertEquals(distance, neighbor.distance);\n        assertThat(neighbor.sequenceIndexes, containsInAnyOrder(timestamps.toArray()));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDiVectorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class OneSidedConvergingDiVectorTest {\n\n    private boolean highIsCritical;\n    private double precision;\n    private int minValuesAccepted;\n    private int maxValuesAccepted;\n    private int dimensions;\n    private OneSidedConvergingDiVectorAccumulator accumulator;\n\n    @BeforeEach\n    public void setUp() {\n        highIsCritical = true;\n        precision = 0.1;\n        minValuesAccepted = 5;\n        maxValuesAccepted = 100;\n        dimensions = 2;\n        accumulator = new OneSidedConvergingDiVectorAccumulator(dimensions, highIsCritical, precision,\n                minValuesAccepted, maxValuesAccepted);\n    }\n\n    @Test\n    public void testGetConvergingValue() {\n        DiVector vector = new DiVector(dimensions);\n        vector.high[0] = 1.1;\n        vector.low[1] = 2.3;\n        vector.high[1] = 9.6;\n\n        assertEquals(1.1 + 2.3 + 9.6, accumulator.getConvergingValue(vector), EPSILON);\n    }\n\n    @Test\n    public void testAccumulateValue() {\n        assertEquals(accumulator.getWitnesses(), 0);\n        assertEquals(accumulator.getMean(), 0);\n        assertEquals(accumulator.getDeviation(), 0);\n\n        DiVector vector1 = new DiVector(dimensions);\n        vector1.high[0] = 1.1;\n        vector1.low[1] = 2.3;\n        vector1.high[1] = 9.6;\n\n        accumulator.accept(vector1);\n        DiVector result = accumulator.getAccumulatedValue();\n        assertArrayEquals(vector1.high, result.high, EPSILON);\n        assertArrayEquals(vector1.low, result.low, EPSILON);\n\n        DiVector vector2 = new DiVector(dimensions);\n        vector2.high[0] = 1.1;\n        vector2.low[1] = 2.3;\n        vector2.high[1] = 9.6;\n\n        accumulator.accept(vector2);\n        result = accumulator.getAccumulatedValue();\n        DiVector.addToLeft(vector1, vector2);\n        assertArrayEquals(vector1.high, result.high, EPSILON);\n        assertArrayEquals(vector1.low, result.low, EPSILON);\n\n        for (int i = 0; i < 5; i++) {\n            accumulator.accept(vector2);\n        }\n        assertEquals(accumulator.getWitnesses(), 3);\n        assertEquals(accumulator.getDeviation(), 0, 1e-6f);\n        assertEquals(accumulator.getMean(), 13, 1e-6f);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/OneSidedConvergingDoubleAccumulatorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\n/**\n * This test doubles as a test of the abstract OneSidedStdDevAccumulator class\n */\npublic class OneSidedConvergingDoubleAccumulatorTest {\n\n    private boolean highIsCritical;\n    private double precision;\n    private int minValuesAccepted;\n    private int maxValuesAccepted;\n    private OneSidedConvergingDoubleAccumulator accumulator;\n\n    @BeforeEach\n    public void setUp() {\n        highIsCritical = true;\n        precision = 0.1;\n        minValuesAccepted = 5;\n        maxValuesAccepted = 100;\n        accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision, minValuesAccepted,\n                maxValuesAccepted);\n    }\n\n    @Test\n    public void testGetConvergingValue() {\n        assertEquals(1.23, accumulator.getConvergingValue(1.23));\n        assertEquals(-1001.1001, accumulator.getConvergingValue(-1001.1001));\n    }\n\n    @Test\n    public void testAccumulateValue() {\n        double sum = 0.0;\n        for (int i = 0; i < 10; i++) {\n            double value = Math.random();\n            accumulator.accept(value);\n            sum += value;\n            assertEquals(sum, accumulator.getAccumulatedValue());\n        }\n    }\n\n    @Test\n    public void testConvergenceHighIsCritical() {\n        accumulator.accept(0.0);\n        accumulator.accept(10.0);\n        accumulator.accept(0.0);\n        accumulator.accept(10.0);\n\n        // less than minValuesAccepted\n        assertEquals(4, accumulator.getValuesAccepted());\n        assertFalse(accumulator.isConverged());\n        double expectedSum = 20.0;\n        assertEquals(expectedSum, accumulator.getAccumulatedValue());\n\n        // each high value should result in a witness to convergence\n        // we need 1.0 / precision witnesses in order to converge\n\n        for (int i = 0; i < 1.0 / precision - 1; i++) {\n            accumulator.accept(0.0);\n            accumulator.accept(10.0);\n            assertEquals(6 + 2 * i, accumulator.getValuesAccepted());\n            assertFalse(accumulator.isConverged());\n            expectedSum += 10.0;\n            assertEquals(expectedSum, accumulator.getAccumulatedValue());\n        }\n\n        accumulator.accept(0.0);\n        assertFalse(accumulator.isConverged());\n\n        // the last required high value\n        accumulator.accept(10.0);\n        assertTrue(accumulator.isConverged());\n\n        expectedSum += 10.0;\n        assertEquals(expectedSum, accumulator.getAccumulatedValue());\n    }\n\n    @Test\n    public void testConvergenceLowIsCritical() {\n        highIsCritical = false;\n        accumulator = new OneSidedConvergingDoubleAccumulator(highIsCritical, precision, minValuesAccepted,\n                maxValuesAccepted);\n\n        accumulator.accept(0.0);\n        accumulator.accept(10.0);\n        accumulator.accept(0.0);\n        accumulator.accept(10.0);\n\n        // less than minValuesAccepted\n        assertFalse(accumulator.isConverged());\n        double expectedSum = 20.0;\n        assertEquals(expectedSum, accumulator.getAccumulatedValue());\n\n        // each high value should result in a witness to convergence\n        // we need 1.0 / precision witnesses in order to converge\n\n        for (int i = 0; i < 1.0 / precision - 1; i++) {\n            accumulator.accept(0.0);\n            accumulator.accept(10.0);\n            assertFalse(accumulator.isConverged());\n            expectedSum += 10.0;\n            assertEquals(expectedSum, accumulator.getAccumulatedValue());\n        }\n\n        accumulator.accept(10.0);\n        assertFalse(accumulator.isConverged());\n\n        // the last required low value\n        accumulator.accept(0.0);\n        assertTrue(accumulator.isConverged());\n\n        expectedSum += 10.0;\n        assertEquals(expectedSum, accumulator.getAccumulatedValue());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/RangeVectorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class RangeVectorTest {\n\n    int dimensions;\n    private RangeVector vector;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 3;\n        vector = new RangeVector(dimensions);\n    }\n\n    @Test\n    public void testNew() {\n        assertThrows(IllegalArgumentException.class, () -> new RangeVector(0));\n        assertThrows(IllegalArgumentException.class, () -> new RangeVector(new float[0]));\n        float[] expected = new float[dimensions];\n        assertArrayEquals(expected, vector.values);\n        assertArrayEquals(expected, vector.upper);\n        assertArrayEquals(expected, vector.lower);\n\n        float[] another = new float[0];\n        assertThrows(IllegalArgumentException.class, () -> new RangeVector(another, another, another));\n        assertThrows(IllegalArgumentException.class,\n                () -> new RangeVector(expected, expected, new float[dimensions + 1]));\n        assertThrows(IllegalArgumentException.class,\n                () -> new RangeVector(expected, new float[dimensions + 1], expected));\n        assertThrows(IllegalArgumentException.class,\n                () -> new RangeVector(new float[dimensions + 1], expected, expected));\n        assertDoesNotThrow(() -> new RangeVector(expected, expected, expected));\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new RangeVector(expected, new float[] { -1f, 0f, 0f }, expected));\n        assertDoesNotThrow(() -> new RangeVector(expected, expected, new float[] { -1f, 0f, 0f }));\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new RangeVector(expected, new float[] { 1f, 0f, 0f }, new float[] { 1f, 0f, 0f }));\n        assertDoesNotThrow(() -> new RangeVector(expected, new float[] { 1f, 0f, 0f }, new float[] { -1f, 0f, 0f }));\n    }\n\n    @Test\n    public void testScale() {\n        vector.upper[0] = 1.1f;\n        vector.upper[2] = 3.1f;\n        vector.upper[1] = 3.1f;\n        vector.lower[1] = -2.2f;\n\n        float z = 9.9f;\n        assertThrows(IllegalArgumentException.class, () -> vector.scale(0, -1.0f));\n        assertThrows(IllegalArgumentException.class, () -> vector.scale(-1, 1.0f));\n        assertThrows(IllegalArgumentException.class, () -> vector.scale(dimensions + 1, 1.0f));\n        vector.scale(0, z);\n\n        float[] expected = new float[] { 1.1f * 9.9f, 3.1f, 3.1f };\n        assertArrayEquals(expected, vector.upper, 1e-6f);\n\n        expected = new float[] { 0.0f, -2.2f, 0.0f };\n        assertArrayEquals(expected, vector.lower);\n\n        vector.scale(1, 2 * z);\n        assertArrayEquals(new float[] { 1.1f * 9.9f, 3.1f * 2 * z, 3.1f }, vector.upper, 1e-6f);\n        assertArrayEquals(new float[] { 0f, -2.2f * 2 * z, 0f }, vector.lower, 1e-6f);\n    }\n\n    @Test\n    public void testShift() {\n        vector.upper[0] = 1.1f;\n        vector.upper[2] = 3.1f;\n        vector.lower[1] = -2.2f;\n\n        float z = -9.9f;\n        assertThrows(IllegalArgumentException.class, () -> vector.shift(-1, z));\n        assertThrows(IllegalArgumentException.class, () -> vector.shift(dimensions + 1, z));\n        vector.shift(0, z);\n\n        float[] expected = new float[] { 1.1f - 9.9f, 0.0f, 3.1f };\n        assertArrayEquals(expected, vector.upper, 1e-6f);\n\n        expected = new float[] { z, -2.2f, 0.0f };\n        assertArrayEquals(expected, vector.lower);\n\n        assertArrayEquals(new float[] { z, 0, 0 }, vector.values, 1e-6f);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/SampleSummaryTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.Random;\nimport java.util.function.BiFunction;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class SampleSummaryTest {\n\n    /**\n     * this class tests the return type data structure whereas\n     * randomcutforest.SampleSummaryTest tests tha summarization algorithms.\n     */\n    int dataSize = 20000;\n    int newDimensions = 2;\n    Random random = new Random();\n\n    @Test\n    public void testConstructor() {\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(Collections.emptyList(), 0.6));\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        for (float[] point : points) {\n            // testing 0 weight\n            weighted.add(new Weighted<>(point, 0.0f));\n        }\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted, 0.1));\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted, 1.3));\n\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(0).weight = Float.NaN;\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(0).weight = Float.POSITIVE_INFINITY;\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(0).weight = -1.0f;\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(0).weight = 1.0f;\n        assertDoesNotThrow(() -> new SampleSummary(weighted));\n        weighted.get(1).index = new float[newDimensions + 1];\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n\n        weighted.get(1).index = new float[newDimensions];\n        weighted.get(1).index[0] = Float.NaN;\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(1).index[0] = Float.NEGATIVE_INFINITY;\n        assertThrows(IllegalArgumentException.class, () -> new SampleSummary(weighted));\n        weighted.get(1).index[0] = -1.0f;\n        SampleSummary summary = new SampleSummary(weighted);\n    }\n\n    @Test\n    public void addTypicalTest() {\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n        ArrayList<Weighted<float[]>> weighted = new ArrayList<>();\n        for (float[] point : points) {\n            // testing 0 weight\n            weighted.add(new Weighted<>(point, 1.0f));\n        }\n        SampleSummary summary = new SampleSummary(weighted);\n        assertThrows(IllegalArgumentException.class,\n                () -> summary.addTypical(new float[1][2], new float[2], new float[2][2]));\n        assertDoesNotThrow(() -> summary.addTypical(new float[0][2], new float[0], new float[0][2]));\n        assertDoesNotThrow(() -> summary.addTypical(new float[2][4], new float[2], new float[2][4]));\n        assertThrows(IllegalArgumentException.class,\n                () -> summary.addTypical(new float[2][4], new float[2], new float[2][2]));\n        assertThrows(IllegalArgumentException.class,\n                () -> summary.addTypical(new float[][] { new float[2], new float[3] }, new float[2], new float[2][2]));\n        assertThrows(IllegalArgumentException.class,\n                () -> summary.addTypical(new float[][] { new float[2], new float[3] }, new float[2], new float[2][1]));\n        assertThrows(IllegalArgumentException.class,\n                () -> summary.addTypical(new float[2][4], new float[2], new float[1][4]));\n    }\n\n    public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {\n        double baseMu = 0.0;\n        double baseSigma = 1.0;\n        double anomalyMu = 0.0;\n        double anomalySigma = 1.0;\n        double transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        double transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n        float[][] floatData = new float[dataSize][];\n\n        float[] allZero = new float[newDimensions];\n        float[] sigma = new float[newDimensions];\n        Arrays.fill(sigma, 1f);\n        double scale = distance.apply(allZero, sigma);\n\n        for (int i = 0; i < dataSize; i++) {\n            // shrink, shift at random\n            int nextD = prg.nextInt(newDimensions);\n            for (int j = 0; j < newDimensions; j++) {\n                data[i][j] *= 1.0 / (3.0);\n                // standard deviation adds up across dimension; taking square root\n                // and using s 3 sigma ball\n                if (j == nextD) {\n                    if (prg.nextDouble() < 0.5)\n                        data[i][j] += 2.0 * scale;\n                    else\n                        data[i][j] -= 2.0 * scale;\n                }\n            }\n            floatData[i] = toFloatArray(data[i]);\n        }\n\n        return floatData;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/returntypes/TimedRangeVectorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.returntypes;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class TimedRangeVectorTest {\n\n    int dimensions;\n    int horizon;\n    private TimedRangeVector vector;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 4;\n        horizon = 2;\n        vector = new TimedRangeVector(dimensions, horizon);\n    }\n\n    @Test\n    public void testNew() {\n        assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(2, -2));\n        assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(-2, 2));\n        assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(5, 2));\n        assertDoesNotThrow(() -> new TimedRangeVector(6, 2));\n        assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(new RangeVector(8), 3));\n        assertDoesNotThrow(() -> new TimedRangeVector(new RangeVector(9), 3));\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new TimedRangeVector(new RangeVector(5), new long[2], new long[2], new long[2]));\n        assertThrows(IllegalArgumentException.class,\n                () -> new TimedRangeVector(new RangeVector(4), new long[2], new long[2], new long[1]));\n        assertThrows(IllegalArgumentException.class,\n                () -> new TimedRangeVector(new RangeVector(4), new long[2], new long[1], new long[1]));\n    }\n\n    @Test\n    public void testScale() {\n        assertTrue(vector.timeStamps.length == 2);\n        vector.timeStamps[0] = 100L;\n        vector.upperTimeStamps[0] = 120L;\n        vector.lowerTimeStamps[0] = -82L;\n        vector.lowerTimeStamps[1] = -100L;\n        assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(-1, 1.0));\n        assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(3, 1.0));\n        assertThrows(IllegalArgumentException.class, () -> vector.scaleTime(0, -1.0));\n\n        vector.scaleTime(0, 0.5);\n        assertArrayEquals(vector.timeStamps, new long[] { 50, 0 });\n        assertArrayEquals(vector.upperTimeStamps, new long[] { 60, 0 });\n        assertArrayEquals(vector.lowerTimeStamps, new long[] { -41, -100 });\n    }\n\n    @Test\n    public void testShift() {\n        vector.timeStamps[0] = 100L;\n        vector.upperTimeStamps[0] = 120L;\n        vector.lowerTimeStamps[0] = -82L;\n        vector.lowerTimeStamps[1] = -100L;\n        assertThrows(IllegalArgumentException.class, () -> vector.shiftTime(-1, 1L));\n        assertThrows(IllegalArgumentException.class, () -> vector.shiftTime(3, 1L));\n\n        vector.shiftTime(1, 13);\n\n        TimedRangeVector newVector = new TimedRangeVector(vector);\n        assertArrayEquals(newVector.timeStamps, new long[] { 100, 13 });\n        assertArrayEquals(newVector.upperTimeStamps, new long[] { 120, 13 });\n        assertArrayEquals(newVector.lowerTimeStamps, new long[] { -82, -87 });\n\n        newVector.shiftTime(1, -130);\n        assertArrayEquals(vector.timeStamps, new long[] { 100, 13 });\n        assertArrayEquals(vector.upperTimeStamps, new long[] { 120, 13 });\n        assertArrayEquals(vector.lowerTimeStamps, new long[] { -82, -87 });\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new TimedRangeVector(new RangeVector(4), newVector.timeStamps, new long[2], new long[2]));\n\n        assertThrows(IllegalArgumentException.class, () -> new TimedRangeVector(new RangeVector(4),\n                newVector.timeStamps, new long[] { 101L, 0L }, new long[2]));\n\n        TimedRangeVector another = new TimedRangeVector(new RangeVector(4), newVector.timeStamps,\n                new long[] { 101L, 0L }, newVector.lowerTimeStamps);\n        assertArrayEquals(another.timeStamps, new long[] { 100, -117 });\n        assertArrayEquals(another.upperTimeStamps, new long[] { 101, 0 });\n        assertArrayEquals(another.lowerTimeStamps, new long[] { -82, -217 });\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/AnomalyAttributionRunnerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.returntypes.DiVector;\n\npublic class AnomalyAttributionRunnerTest {\n\n    private int numberOfTrees;\n    private int sampleSize;\n    private int shingleSize;\n    private int windowSize;\n    private String delimiter;\n    private boolean headerRow;\n    private AnomalyAttributionRunner runner;\n\n    private BufferedReader in;\n    private PrintWriter out;\n\n    @BeforeEach\n    public void setUp() {\n        numberOfTrees = 50;\n        sampleSize = 100;\n        shingleSize = 1;\n        windowSize = 10;\n        delimiter = \",\";\n        headerRow = true;\n        runner = new AnomalyAttributionRunner();\n\n        runner.parse(\"--number-of-trees\", Integer.toString(numberOfTrees), \"--sample-size\",\n                Integer.toString(sampleSize), \"--shingle-size\", Integer.toString(shingleSize), \"--window-size\",\n                Integer.toString(windowSize), \"--delimiter\", delimiter, \"--header-row\", Boolean.toString(headerRow));\n\n        in = mock(BufferedReader.class);\n        out = mock(PrintWriter.class);\n    }\n\n    @Test\n    public void testRun() throws IOException {\n        when(in.readLine()).thenReturn(\"a,b\").thenReturn(\"1.0,2.0\").thenReturn(\"4.0,5.0\").thenReturn(null);\n        runner.run(in, out);\n        verify(out).println(\"a,b,anomaly_low_0,anomaly_high_0,anomaly_low_1,anomaly_high_1\");\n        verify(out).println(\"1.0,2.0,0.0,0.0,0.0,0.0\");\n        verify(out).println(\"4.0,5.0,0.0,0.0,0.0,0.0\");\n    }\n\n    @Test\n    public void testWriteHeader() {\n        String[] line = new String[] { \"a\", \"b\" };\n        runner.prepareAlgorithm(2);\n        runner.writeHeader(line, out);\n        verify(out).println(\"a,b,anomaly_low_0,anomaly_high_0,anomaly_low_1,anomaly_high_1\");\n    }\n\n    @Test\n    public void testProcessLine() {\n        String[] line = new String[] { \"1.0\", \"2.0\" };\n        runner.prepareAlgorithm(2);\n        runner.processLine(line, out);\n        verify(out).println(\"1.0,2.0,0.0,0.0,0.0,0.0\");\n    }\n\n    @Test\n    public void testAnomalyAttributionTransformer() {\n        RandomCutForest forest = mock(RandomCutForest.class);\n        when(forest.getDimensions()).thenReturn(2);\n        AnomalyAttributionRunner.AnomalyAttributionTransformer transformer = new AnomalyAttributionRunner.AnomalyAttributionTransformer(\n                forest);\n\n        DiVector vector = new DiVector(2);\n        vector.low[0] = 1.1;\n        vector.high[1] = 2.2;\n\n        when(forest.getAnomalyAttribution(new double[] { 1.0, 2.0 })).thenReturn(vector);\n        assertEquals(Arrays.asList(\"1.1\", \"0.0\", \"0.0\", \"2.2\"), transformer.getResultValues(1.0, 2.0));\n        assertEquals(Arrays.asList(\"anomaly_low_0\", \"anomaly_high_0\", \"anomaly_low_1\", \"anomaly_high_1\"),\n                transformer.getResultColumnNames());\n        assertEquals(Arrays.asList(\"NA\", \"NA\", \"NA\", \"NA\"), transformer.getEmptyResultValue());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/AnomalyScoreRunnerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Collections;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.RandomCutForest;\n\npublic class AnomalyScoreRunnerTest {\n\n    private int numberOfTrees;\n    private int sampleSize;\n    private int shingleSize;\n    private int windowSize;\n    private String delimiter;\n    private boolean headerRow;\n    private AnomalyScoreRunner runner;\n\n    private BufferedReader in;\n    private PrintWriter out;\n\n    @BeforeEach\n    public void setUp() {\n        numberOfTrees = 50;\n        sampleSize = 100;\n        shingleSize = 1;\n        windowSize = 10;\n        delimiter = \",\";\n        headerRow = true;\n        runner = new AnomalyScoreRunner();\n\n        runner.parse(\"--number-of-trees\", Integer.toString(numberOfTrees), \"--sample-size\",\n                Integer.toString(sampleSize), \"--shingle-size\", Integer.toString(shingleSize), \"--window-size\",\n                Integer.toString(windowSize), \"--delimiter\", delimiter, \"--header-row\", Boolean.toString(headerRow));\n\n        in = mock(BufferedReader.class);\n        out = mock(PrintWriter.class);\n    }\n\n    @Test\n    public void testRun() throws IOException {\n        when(in.readLine()).thenReturn(\"a,b,c\").thenReturn(\"1.0,2.0,3.0\").thenReturn(\"4.0,5.0,6.0\").thenReturn(null);\n        runner.run(in, out);\n        verify(out).println(\"a,b,c,anomaly_score\");\n        verify(out).println(\"1.0,2.0,3.0,0.0\");\n        verify(out).println(\"4.0,5.0,6.0,0.0\");\n    }\n\n    @Test\n    public void testWriteHeader() {\n        String[] line = new String[] { \"a\", \"b\", \"c\" };\n        runner.prepareAlgorithm(3);\n        runner.writeHeader(line, out);\n        verify(out).println(\"a,b,c,anomaly_score\");\n    }\n\n    @Test\n    public void testProcessLine() {\n        String[] line = new String[] { \"1.0\", \"2.0\", \"3.0\" };\n        runner.prepareAlgorithm(3);\n        runner.processLine(line, out);\n        verify(out).println(\"1.0,2.0,3.0,0.0\");\n    }\n\n    @Test\n    public void testAnomalyScoreTransformer() {\n        RandomCutForest forest = mock(RandomCutForest.class);\n        AnomalyScoreRunner.AnomalyScoreTransformer transformer = new AnomalyScoreRunner.AnomalyScoreTransformer(forest);\n\n        when(forest.getAnomalyScore(new double[] { 1.0, 2.0, 3.0 })).thenReturn(11.0);\n        assertEquals(Collections.singletonList(\"11.0\"), transformer.getResultValues(1.0, 2.0, 3.0));\n        assertEquals(Collections.singletonList(\"anomaly_score\"), transformer.getResultColumnNames());\n        assertEquals(Collections.singletonList(\"NA\"), transformer.getEmptyResultValue());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/ArgumentParserTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class ArgumentParserTest {\n\n    private ArgumentParser parser;\n\n    @BeforeEach\n    public void setUp() {\n        parser = new ArgumentParser(\"runner-class\", \"runner-description\");\n    }\n\n    @Test\n    public void testNew() {\n        assertEquals(100, parser.getNumberOfTrees());\n        assertEquals(256, parser.getSampleSize());\n        assertEquals(0, parser.getWindowSize());\n        assertEquals(0.0, parser.getTimeDecay());\n        assertEquals(1, parser.getShingleSize());\n        assertFalse(parser.getShingleCyclic());\n        assertEquals(\",\", parser.getDelimiter());\n        assertFalse(parser.getHeaderRow());\n    }\n\n    @Test\n    public void testParse() {\n        parser.parse(\"--number-of-trees\", \"222\", \"--sample-size\", \"123\", \"--window-size\", \"50\", \"--shingle-size\", \"4\",\n                \"--shingle-cyclic\", \"true\", \"--delimiter\", \"\\t\", \"--header-row\", \"true\");\n\n        assertEquals(222, parser.getNumberOfTrees());\n        assertEquals(123, parser.getSampleSize());\n        assertEquals(50, parser.getWindowSize());\n        assertEquals(0.02, parser.getTimeDecay());\n        assertEquals(4, parser.getShingleSize());\n        assertTrue(parser.getShingleCyclic());\n        assertEquals(\"\\t\", parser.getDelimiter());\n        assertTrue(parser.getHeaderRow());\n    }\n\n    @Test\n    public void testParseShortFlags() {\n        parser.parse(\"-n\", \"222\", \"-s\", \"123\", \"-w\", \"50\", \"-g\", \"4\", \"-c\", \"true\", \"-d\", \"\\t\");\n\n        assertEquals(222, parser.getNumberOfTrees());\n        assertEquals(123, parser.getSampleSize());\n        assertEquals(50, parser.getWindowSize());\n        assertEquals(0.02, parser.getTimeDecay());\n        assertEquals(4, parser.getShingleSize());\n        assertEquals(\"\\t\", parser.getDelimiter());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/ImputeRunnerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.BufferedReader;\nimport java.io.PrintWriter;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class ImputeRunnerTest {\n\n    private int numberOfTrees;\n    private int sampleSize;\n    private int windowSize;\n    private String delimiter;\n    private boolean headerRow;\n    private String missingValueMarker;\n    private ImputeRunner runner;\n\n    private BufferedReader in;\n    private PrintWriter out;\n\n    @BeforeEach\n    public void setUp() {\n        numberOfTrees = 50;\n        sampleSize = 100;\n        windowSize = 10;\n        delimiter = \",\";\n        missingValueMarker = \"X\";\n        headerRow = true;\n        runner = new ImputeRunner();\n\n        runner.parse(\"--number-of-trees\", Integer.toString(numberOfTrees), \"--sample-size\",\n                Integer.toString(sampleSize), \"--window-size\", Integer.toString(windowSize), \"--delimiter\", delimiter,\n                \"--missing-value-marker\", missingValueMarker, \"--header-row\", Boolean.toString(headerRow));\n\n        in = mock(BufferedReader.class);\n        out = mock(PrintWriter.class);\n    }\n\n    @Test\n    public void testRun() throws Exception {\n        when(in.readLine()).thenReturn(\"a,b\").thenReturn(\"1.0,2.0\").thenReturn(\"4.0,X\").thenReturn(null);\n        runner.run(in, out);\n        verify(out).println(\"a,b\");\n        verify(out).println(\"1.0,2.0\");\n        verify(out).println(\"0.0,0.0\");\n    }\n\n    @Test\n    public void testWriteHeader() {\n        String[] line = new String[] { \"a\", \"b\" };\n        runner.prepareAlgorithm(2);\n        runner.writeHeader(line, out);\n        verify(out).println(\"a,b\");\n    }\n\n    @Test\n    public void testProcessLine() {\n        String[] line = new String[] { \"1.0\", \"2.0\" };\n        runner.prepareAlgorithm(2);\n        runner.processLine(line, out);\n        verify(out).println(\"1.0,2.0\");\n\n        line = new String[] { missingValueMarker, \"2.0\" };\n        runner.processLine(line, out);\n        verify(out).println(\"0.0,0.0\");\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/SimpleDensityRunnerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Arrays;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\n\npublic class SimpleDensityRunnerTest {\n    private int numberOfTrees;\n    private int sampleSize;\n    private int shingleSize;\n    private int windowSize;\n    private String delimiter;\n    private boolean headerRow;\n    private SimpleDensityRunner runner;\n\n    private BufferedReader in;\n    private PrintWriter out;\n\n    @BeforeEach\n    public void setUp() {\n        numberOfTrees = 50;\n        sampleSize = 100;\n        shingleSize = 1;\n        windowSize = 10;\n        delimiter = \",\";\n        headerRow = true;\n        runner = new SimpleDensityRunner();\n\n        runner.parse(\"--number-of-trees\", Integer.toString(numberOfTrees), \"--sample-size\",\n                Integer.toString(sampleSize), \"--shingle-size\", Integer.toString(shingleSize), \"--window-size\",\n                Integer.toString(windowSize), \"--delimiter\", delimiter, \"--header-row\", Boolean.toString(headerRow));\n\n        in = mock(BufferedReader.class);\n        out = mock(PrintWriter.class);\n    }\n\n    @Test\n    public void testRun() throws IOException {\n        when(in.readLine()).thenReturn(\"a,b\").thenReturn(\"1.0,2.0\").thenReturn(\"4.0,5.0\").thenReturn(null);\n        runner.run(in, out);\n\n        verify(out).println(\"a,b,prob_mass_0_up,prob_mass_0_down,prob_mass_1_up,prob_mass_1_down\");\n        verify(out).println(\"1.0,2.0,0.000000,0.000000,0.000000,0.000000\");\n        verify(out).println(\"4.0,5.0,0.000000,0.000000,0.000000,0.000000\");\n    }\n\n    @Test\n    public void testWriteHeader() {\n        String[] line = new String[] { \"a\", \"b\" };\n        runner.prepareAlgorithm(2);\n        runner.writeHeader(line, out);\n        verify(out).println(\"a,b,prob_mass_0_up,prob_mass_0_down,prob_mass_1_up,prob_mass_1_down\");\n    }\n\n    @Test\n    public void testProcessLine() {\n        String[] line = new String[] { \"1.0\", \"2.0\" };\n        runner.prepareAlgorithm(2);\n        runner.processLine(line, out);\n\n        verify(out).println(\"1.0,2.0,0.000000,0.000000,0.000000,0.000000\");\n    }\n\n    @Test\n    public void testSimpleDensityTransformer() {\n        RandomCutForest forest = mock(RandomCutForest.class);\n        when(forest.getDimensions()).thenReturn(2);\n        SimpleDensityRunner.SimpleDensityTransformer transformer = new SimpleDensityRunner.SimpleDensityTransformer(\n                forest);\n\n        DensityOutput expected = new DensityOutput(2, 1);\n        expected.probMass.high[0] = 0.0;\n        expected.probMass.low[0] = 0.5;\n        expected.probMass.high[1] = 0.25;\n        expected.probMass.low[1] = 0.25;\n        expected.measure.high[0] = 0.0;\n        expected.measure.low[0] = 8.0;\n        expected.measure.high[1] = 8.0;\n        expected.measure.low[1] = 4.0;\n\n        when(forest.getSimpleDensity(new double[] { 1.0, 2.0 })).thenReturn(expected);\n        assertEquals(Arrays.asList(\"0.000000\", \"400.000000\", \"400.000000\", \"200.000000\"),\n                transformer.getResultValues(1.0, 2.0));\n        assertEquals(Arrays.asList(\"prob_mass_0_up\", \"prob_mass_0_down\", \"prob_mass_1_up\", \"prob_mass_1_down\"),\n                transformer.getResultColumnNames());\n        assertEquals(Arrays.asList(\"NA\", \"NA\", \"NA\", \"NA\"), transformer.getEmptyResultValue());\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/runner/UpdateOnlyTransformerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.runner;\n\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.verify;\n\nimport java.util.List;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.RandomCutForest;\n\npublic class UpdateOnlyTransformerTest {\n\n    private RandomCutForest forest;\n    private UpdateOnlyTransformer transformer;\n\n    @BeforeEach\n    public void setUp() {\n        forest = mock(RandomCutForest.class);\n        transformer = new UpdateOnlyTransformer(forest);\n    }\n\n    @Test\n    public void testGetResultValues() {\n        List<String> result = transformer.getResultValues(1.0, 2.0, 3.0);\n        assertTrue(result.isEmpty());\n        verify(forest).update(new double[] { 1.0, 2.0, 3.0 });\n    }\n\n    @Test\n    public void testGetEmptyResultValue() {\n        assertTrue(transformer.getEmptyResultValue().isEmpty());\n    }\n\n    @Test\n    public void testGetResultColumnNames() {\n        assertTrue(transformer.getResultColumnNames().isEmpty());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/sampler/CompactSamplerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.sampler;\n\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.Mockito.reset;\nimport static org.mockito.Mockito.spy;\nimport static org.mockito.Mockito.times;\nimport static org.mockito.Mockito.verify;\nimport static org.mockito.Mockito.when;\n\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.ArgumentsProvider;\nimport org.junit.jupiter.params.provider.ArgumentsSource;\n\nimport com.amazon.randomcutforest.config.Config;\n\npublic class CompactSamplerTest {\n\n    private static int sampleSize = 256;\n    private static double lambda = 0.01;\n    private static long seed = 42L;\n\n    private static class SamplerProvider implements ArgumentsProvider {\n        @Override\n        public Stream<? extends Arguments> provideArguments(ExtensionContext context) throws Exception {\n            Random random1 = spy(new Random(seed));\n            CompactSampler sampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random1)\n                    .initialAcceptFraction(0.1).storeSequenceIndexesEnabled(false).build();\n\n            Random random2 = spy(new Random(seed));\n            CompactSampler sampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random2)\n                    .initialAcceptFraction(0.1).storeSequenceIndexesEnabled(true).build();\n\n            CompactSampler sampler3 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).random(random1)\n                    .initialAcceptFraction(1.0).storeSequenceIndexesEnabled(false).build();\n            return Stream.of(Arguments.of(random1, sampler1), Arguments.of(random2, sampler2),\n                    Arguments.of(random1, sampler3));\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testNew(Random random, CompactSampler sampler) {\n        // test CompactSampler fields not defined in the IStreamSampler interface\n        assertEquals(lambda, sampler.getTimeDecay());\n        assertNotNull(sampler.getWeightArray());\n        assertNotNull(sampler.getPointIndexArray());\n        long seq = new Random().nextLong();\n        sampler.setMaxSequenceIndex(seq);\n        assertEquals(sampler.getMaxSequenceIndex(), seq);\n        assertFalse(sampler.isFull());\n        assertFalse(sampler.isReady());\n\n        double newLambda = new Random().nextDouble();\n        sampler.setTimeDecay(newLambda);\n        assertEquals(sampler.getConfig(Config.TIME_DECAY), newLambda);\n        sampler.setConfig(Config.TIME_DECAY, lambda + newLambda);\n        assertEquals(sampler.getTimeDecay(), lambda + newLambda, 1e-10);\n        assertEquals(sampler.getMostRecentTimeDecayUpdate(), seq);\n        sampler.setMostRecentTimeDecayUpdate(0L);\n        assertEquals(sampler.getMostRecentTimeDecayUpdate(), 0L);\n        assertThrows(IllegalArgumentException.class, () -> sampler.getConfig(\"foo\"));\n        assertThrows(IllegalArgumentException.class, () -> sampler.setConfig(\"bar\", 0L));\n        if (sampler.isStoreSequenceIndexesEnabled()) {\n            assertNotNull(sampler.getSequenceIndexArray());\n        } else {\n            assertNull(sampler.getSequenceIndexArray());\n        }\n        assertThrows(IllegalStateException.class, () -> sampler.addPoint(1));\n        assertDoesNotThrow(() -> sampler.addPoint(null));\n    }\n\n    @Test\n    public void testNewFromExistingWeightsParameters() {\n        int sampleSize = 3;\n        double lambda = 0.1;\n\n        // weight array is valid heap\n        float[] weight = { 0.4f, 0.3f, 0.2f };\n        int[] pointIndex = { 1, 2, 3 };\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(sampleSize).size(weight.length).timeDecay(lambda)\n                        .random(new Random()).weight(weight).pointIndex(pointIndex).sequenceIndex(null)\n                        .storeSequenceIndexesEnabled(true).validateHeap(true).build());\n    }\n\n    @Test\n    public void testNewFromExistingWeights() {\n        int sampleSize = 3;\n        double lambda = 0.1;\n\n        // weight array is valid heap\n        float[] weight = { 0.4f, 0.3f, 0.2f };\n        int[] pointIndex = { 1, 2, 3 };\n\n        CompactSampler sampler = new CompactSampler.Builder<>().capacity(sampleSize).size(weight.length)\n                .timeDecay(lambda).random(new Random()).weight(weight).pointIndex(pointIndex).sequenceIndex(null)\n                .validateHeap(true).build();\n\n        assertFalse(sampler.getEvictedPoint().isPresent());\n        assertFalse(sampler.isStoreSequenceIndexesEnabled());\n        assertEquals(3, sampler.size());\n        assertNull(sampler.getSequenceIndexArray());\n\n        for (int i = 0; i < 3; i++) {\n            assertEquals(weight[i], sampler.weight[i]);\n            assertEquals(pointIndex[i], sampler.pointIndex[i]);\n        }\n        sampler.setMaxSequenceIndex(10L);\n        sampler.setTimeDecay(lambda * 2);\n        assertNotEquals(sampler.accumuluatedTimeDecay, 0);\n        sampler.getWeightedSample();\n        assertEquals(sampler.accumuluatedTimeDecay, 0);\n    }\n\n    @Test\n    public void testUniformSampler() {\n        CompactSampler uniformSampler = CompactSampler.uniformSampler(sampleSize, seed, false);\n        assertFalse(uniformSampler.getEvictedPoint().isPresent());\n        assertFalse(uniformSampler.isReady());\n        assertFalse(uniformSampler.isFull());\n        assertEquals(sampleSize, uniformSampler.getCapacity());\n        assertEquals(0, uniformSampler.size());\n        assertEquals(0.0, uniformSampler.getTimeDecay());\n    }\n\n    @Test\n    public void testBuilderClass() {\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(0).initialAcceptFraction(0.5).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(1).initialAcceptFraction(0).build());\n        assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1).size(1).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(1).validateHeap(true).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 }).build());\n        assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)\n                .sequenceIndex(new long[] { 0 }).storeSequenceIndexesEnabled(true).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(1).pointIndex(new int[] { 0 }).build());\n        assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)\n                .weight(new float[0]).pointIndex(new int[] { 0 }).build());\n        assertThrows(IllegalArgumentException.class, () -> new CompactSampler.Builder<>().capacity(1)\n                .weight(new float[] { 0 }).pointIndex(new int[0]).build());\n        assertThrows(IllegalArgumentException.class,\n                () -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 }).pointIndex(new int[] { 0 })\n                        .sequenceIndex(new long[0]).storeSequenceIndexesEnabled(true).build());\n        assertDoesNotThrow(() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 })\n                .pointIndex(new int[] { 0 }).sequenceIndex(new long[] { 0 }).storeSequenceIndexesEnabled(true).build());\n        assertDoesNotThrow(() -> new CompactSampler.Builder<>().capacity(1).weight(new float[] { 0 })\n                .pointIndex(new int[] { 0 }).build());\n\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testAddPoint(Random random, CompactSampler sampler) {\n        when(random.nextDouble()).thenReturn(0.0).thenReturn(0.5).thenReturn(0.0).thenReturn(0.01).thenReturn(0.0)\n                .thenReturn(0.99);\n\n        sampler.acceptPoint(10L);\n        double weight1 = sampler.acceptPointState.getWeight();\n        sampler.addPoint(1);\n        sampler.acceptPoint(11L);\n        double weight2 = sampler.acceptPointState.getWeight();\n        // acceptstate is non-null\n        assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(12, 2.0f, 0L));\n        sampler.addPoint(12);\n        assertThrows(IllegalArgumentException.class, () -> sampler.acceptPoint(12L, -1f));\n        sampler.acceptPoint(12L, 0f);\n        assertNull(sampler.acceptPointState);\n        sampler.acceptPoint(12L);\n        double weight3 = sampler.acceptPointState.getWeight();\n        sampler.addPoint(123);\n\n        assertEquals(3, sampler.size());\n        assertEquals(sampleSize, sampler.getCapacity());\n\n        List<Weighted<Integer>> samples = sampler.getWeightedSample();\n        samples.sort(Comparator.comparing(Weighted<Integer>::getWeight));\n        assertEquals(3, samples.size());\n\n        assertEquals(123, samples.get(0).getValue());\n        assertEquals(weight3, samples.get(0).getWeight());\n\n        assertEquals(1, samples.get(1).getValue());\n        assertEquals(weight1, samples.get(1).getWeight());\n\n        assertEquals(12, samples.get(2).getValue());\n        assertEquals(weight2, samples.get(2).getWeight());\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testAcceptPoint(Random random, CompactSampler sampler) {\n\n        assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(null, 1, 0L));\n        assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(null, -1, 0L));\n        assertDoesNotThrow(() -> sampler.addPoint(0, 0f, 0L));\n        assertEquals(sampler.size, 1);\n\n        // The sampler should accept all samples until initial fraction\n        for (int i = 0; i < sampleSize * sampler.initialAcceptFraction; i++) {\n            assertTrue(sampler.acceptPoint(i));\n            assertNotNull(sampler.acceptPointState);\n            sampler.addPoint(i);\n        }\n        assertTrue(sampler.initialAcceptProbability(sampler.size) < 1.0);\n        for (int i = 0; i < sampleSize * 10; i++) {\n            if (sampler.acceptPoint(i)) {\n                sampler.addPoint(i);\n            }\n        }\n\n        assertTrue(sampler.isFull());\n        assertTrue(sampler.isReady());\n        assertThrows(IllegalStateException.class, () -> sampler.addPoint(sampleSize));\n        assertThrows(IllegalArgumentException.class, () -> sampler.addPoint(sampleSize, 1.0f, 0L));\n        sampler.setTimeDecay(0);\n        // we should only accept sequences of value samplesize - 1 or higher\n        assertThrows(IllegalStateException.class, () -> sampler.acceptPoint(sampleSize - 2));\n        // In subsequent calls to sample, either the result is empty or else\n        // the new weight is smaller than the evicted weight\n\n        int numAccepted = 0;\n        for (int i = 10 * sampleSize; i < 12 * sampleSize; i++) {\n            if (sampler.acceptPoint(i)) {\n                numAccepted++;\n                assertTrue(sampler.getEvictedPoint().isPresent());\n                assertNotNull(sampler.acceptPointState);\n                Weighted<Integer> evictedPoint = (Weighted<Integer>) sampler.getEvictedPoint().get();\n                assertTrue(sampler.acceptPointState.getWeight() < evictedPoint.getWeight());\n                sampler.addPoint(i);\n            }\n        }\n        assertTrue(numAccepted > 0, \"the sampler did not accept any points\");\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testUpdate(Random random, CompactSampler compactSampler) {\n        CompactSampler sampler = spy(compactSampler);\n        for (int i = 0; i < sampleSize * sampler.initialAcceptFraction; i++) {\n            assertTrue(sampler.update(i, i));\n        }\n\n        int num = (int) Math.ceil(sampleSize * sampler.initialAcceptFraction);\n        // all points should be added to the sampler until the sampler is full\n        assertEquals(num, sampler.size());\n        verify(sampler, times(num)).addPoint(any());\n\n        reset(sampler);\n\n        int numSampled = 0;\n        for (int i = num; i < 2 * sampleSize; i++) {\n            if (sampler.update(i, i)) {\n                numSampled++;\n            }\n        }\n        assertTrue(numSampled > 0, \"no new values were sampled\");\n        assertTrue(sampler.initialAcceptFraction > 0.5 || numSampled < 2 * sampleSize - num, \"all values were sampled\");\n\n        verify(sampler, times(numSampled)).addPoint(any());\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testGetScore(Random random, CompactSampler sampler) {\n        when(random.nextDouble()).thenReturn(0.0).thenReturn(0.25).thenReturn(0.0).thenReturn(0.75).thenReturn(0.0)\n                .thenReturn(0.50).thenReturn(0.5).thenReturn(0.1).thenReturn(1.3);\n\n        sampler.update(1, 101);\n        sampler.update(2, 102);\n        sampler.update(3, 103);\n\n        double[] expectedScores = new double[3];\n        expectedScores[0] = -lambda * 101L + Math.log(-Math.log(0.25));\n        expectedScores[1] = -lambda * 102L + Math.log(-Math.log(0.75));\n        expectedScores[2] = -lambda * 103L + Math.log(-Math.log(0.50));\n        Arrays.sort(expectedScores);\n\n        assertFalse(sampler.acceptPoint(104));\n        List<Weighted<Integer>> samples = sampler.getWeightedSample();\n        samples.sort(Comparator.comparing(Weighted<Integer>::getWeight));\n\n        for (int i = 0; i < 3; i++) {\n            assertEquals(expectedScores[i], samples.get(i).getWeight(), EPSILON);\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(SamplerProvider.class)\n    public void testValidateHeap(Random random, CompactSampler sampler) {\n        // populate the heap\n        for (int i = 0; i < 2 * sampleSize; i++) {\n            sampler.update(i, i);\n        }\n\n        float[] weightArray = sampler.getWeightArray();\n\n        // swapping a weight value with one of its children will break the heap property\n        int i = sampleSize / 4;\n        float f = weightArray[i];\n        weightArray[i] = weightArray[2 * i + 1];\n        weightArray[2 * i + 1] = f;\n\n        assertThrows(IllegalStateException.class,\n                () -> new CompactSampler.Builder<>().capacity(sampleSize).size(sampleSize).timeDecay(lambda)\n                        .random(random).weight(weightArray).pointIndex(sampler.getPointIndexArray())\n                        .sequenceIndex(sampler.getSequenceIndexArray()).validateHeap(true).build());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/state/RandomCutForestMapperTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\nimport org.junit.jupiter.params.provider.MethodSource;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.executor.PointStoreCoordinator;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.preprocessor.IPreprocessor;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.MapperFeature;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class RandomCutForestMapperTest {\n\n    private static int dimensions = 5;\n    private static int sampleSize = 128;\n\n    private Version version = new Version();\n\n    private static Stream<RandomCutForest> compactForestProvider() {\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .sampleSize(sampleSize);\n\n        RandomCutForest cachedFloat = builder.boundingBoxCacheFraction(new Random().nextDouble()).build();\n        RandomCutForest uncachedFloat = builder.boundingBoxCacheFraction(0.0).build();\n\n        return Stream.of(cachedFloat, uncachedFloat);\n    }\n\n    private RandomCutForestMapper mapper;\n\n    @BeforeEach\n    public void setUp() {\n        mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n    }\n\n    public void assertCompactForestEquals(RandomCutForest forest, RandomCutForest forest2, boolean saveTree) {\n        assertEquals(forest.getDimensions(), forest2.getDimensions());\n        assertEquals(forest.getSampleSize(), forest2.getSampleSize());\n        assertEquals(forest.getOutputAfter(), forest2.getOutputAfter());\n        assertEquals(forest.getNumberOfTrees(), forest2.getNumberOfTrees());\n        assertEquals(forest.getTimeDecay(), forest2.getTimeDecay());\n        assertEquals(forest.isStoreSequenceIndexesEnabled(), forest2.isStoreSequenceIndexesEnabled());\n        assertEquals(forest.isCompact(), forest2.isCompact());\n        assertEquals(forest.getPrecision(), forest2.getPrecision());\n        assertEquals(forest.getBoundingBoxCacheFraction(), forest2.getBoundingBoxCacheFraction());\n        assertEquals(forest.isCenterOfMassEnabled(), forest2.isCenterOfMassEnabled());\n        assertEquals(forest.isParallelExecutionEnabled(), forest2.isParallelExecutionEnabled());\n        assertEquals(forest.getThreadPoolSize(), forest2.getThreadPoolSize());\n\n        PointStoreCoordinator coordinator = (PointStoreCoordinator) forest.getUpdateCoordinator();\n        PointStoreCoordinator coordinator2 = (PointStoreCoordinator) forest2.getUpdateCoordinator();\n\n        PointStore store = (PointStore) coordinator.getStore();\n        PointStore store2 = (PointStore) coordinator2.getStore();\n        assertArrayEquals(store.getRefCount(), store2.getRefCount());\n        assertArrayEquals(store.getStore(), store2.getStore());\n        assertEquals(store.getCapacity(), store2.getCapacity());\n        assertEquals(store.size(), store2.size());\n\n        ComponentList<?, ?> components = forest.getComponents();\n        ComponentList<?, ?> otherComponents = new ComponentList(forest2.getComponents());\n        for (int i = 0; i < components.size(); i++) {\n            SamplerPlusTree first = (SamplerPlusTree<?, ?>) components.get(i);\n            SamplerPlusTree second = (SamplerPlusTree<?, ?>) otherComponents.get(i);\n            if (saveTree) {\n                assertEquals(first.getTree().getRandomSeed(), second.getTree().getRandomSeed());\n            }\n            assertEquals(((CompactSampler) first.getSampler()).getRandomSeed(),\n                    ((CompactSampler) second.getSampler()).getRandomSeed());\n        }\n    }\n\n    void testForest(RandomCutForest forest, Boolean saveTree) {\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(sampleSize, dimensions)) {\n            forest.update(point);\n        }\n        RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));\n        assertCompactForestEquals(forest, forest2, saveTree);\n\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"compactForestProvider\")\n    public void testRoundTripForCompactForest(RandomCutForest forest) {\n        testForest(forest, false);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"compactForestProvider\")\n    public void testRoundTripForCompactForestSaveTreeState(RandomCutForest forest) {\n        mapper.setSaveTreeStateEnabled(true);\n        testForest(forest, true);\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"compactForestProvider\")\n    public void testRoundTripForCompactForestSaveTreeStatePartial(RandomCutForest forest) {\n        mapper.setSaveTreeStateEnabled(true);\n        mapper.setPartialTreeStateEnabled(true);\n        testRoundTripForCompactForest(forest);\n    }\n\n    @Test\n    void testSaveSamplers() {\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).sampleSize(sampleSize)\n                .numberOfTrees(1).build();\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(sampleSize, dimensions)) {\n            forest.update(point);\n        }\n        mapper.setSaveSamplerStateEnabled(false);\n        assertThrows(IllegalArgumentException.class, () -> mapper.toModel(mapper.toState(forest), 10));\n        mapper.setSaveSamplerStateEnabled(true);\n    }\n\n    @Test\n    void executionContext() {\n        ExecutionContext ec = new ExecutionContext();\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).sampleSize(sampleSize)\n                .parallelExecutionEnabled(true).threadPoolSize(23).numberOfTrees(1).build();\n        RandomCutForest forest2 = mapper.toModel(mapper.toState(forest), ec);\n        assertFalse(forest2.isParallelExecutionEnabled());\n        assertEquals(0, forest2.getThreadPoolSize());\n    }\n\n    @Test\n    void testVersion() {\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).sampleSize(sampleSize)\n                .parallelExecutionEnabled(true).threadPoolSize(23).numberOfTrees(1).build();\n        assertEquals(mapper.toState(forest).getVersion(), version.V4_0);\n    }\n\n    @Test\n    void testPrecisionException() {\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).sampleSize(sampleSize)\n                .parallelExecutionEnabled(true).threadPoolSize(23).numberOfTrees(1).build();\n        RandomCutForestState state = mapper.toState(forest);\n        assertDoesNotThrow(() -> mapper.toModel(state, 0L));\n        state.setPrecision(Precision.FLOAT_64.name());\n        assertThrows(IllegalStateException.class, () -> mapper.toModel(state, 0));\n    }\n\n    @Test\n    public void testRoundTripForEmptyForest() {\n        Precision precision = Precision.FLOAT_64;\n\n        RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).sampleSize(sampleSize)\n                .numberOfTrees(1).build();\n\n        mapper.setSaveTreeStateEnabled(true);\n        RandomCutForest forest2 = mapper.toModel(mapper.toState(forest));\n\n        assertCompactForestEquals(forest, forest2, true);\n    }\n\n    @Test\n    public void testRoundTripForSingleNodeForest() {\n        int dimensions = 10;\n        long seed = new Random().nextLong();\n        System.out.println(\" Seed \" + seed);\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).numberOfTrees(1)\n                .precision(Precision.FLOAT_32).internalShinglingEnabled(false).randomSeed(seed).build();\n        Random r = new Random(seed + 1);\n        double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n        for (int i = 0; i < new Random().nextInt(1000); i++) {\n            forest.update(point);\n        }\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        mapper.setSaveTreeStateEnabled(true);\n        mapper.setPartialTreeStateEnabled(true);\n        RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));\n\n        for (int i = 0; i < new Random(seed + 2).nextInt(1000); i++) {\n            double[] anotherPoint = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n            assertEquals(forest.getAnomalyScore(anotherPoint), copyForest.getAnomalyScore(anotherPoint), 1e-10);\n            forest.update(anotherPoint);\n            copyForest.update(anotherPoint);\n        }\n    }\n\n    private static float[] generate(int input) {\n        return new float[] { (float) (20 * Math.sin(input / 10.0)), (float) (20 * Math.cos(input / 10.0)) };\n    }\n\n    @Test\n    void benchmarkMappers() {\n        long seed = new Random().nextLong();\n        System.out.println(\" Seed \" + seed);\n        Random random = new Random(seed);\n\n        RandomCutForest rcf = RandomCutForest.builder().dimensions(2 * 10).shingleSize(10).sampleSize(628)\n                .internalShinglingEnabled(true).randomSeed(random.nextLong()).build();\n        for (int i = 0; i < 10000; i++) {\n            rcf.update(generate(i));\n        }\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        mapper.setSaveTreeStateEnabled(true);\n        for (int j = 0; j < 1000; j++) {\n            RandomCutForest newRCF = mapper.toModel(mapper.toState(rcf));\n            float[] test = generate(10000 + j);\n            assertEquals(newRCF.getAnomalyScore(test), rcf.getAnomalyScore(test), 1e-6);\n            rcf.update(test);\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(V2RCFJsonResource.class)\n    public void testJson(V2RCFJsonResource jsonResource) throws JsonProcessingException {\n        RandomCutForestMapper rcfMapper = new RandomCutForestMapper();\n        String json = getStateFromFile(jsonResource.getResource());\n        assertNotNull(json);\n        ObjectMapper mapper = new ObjectMapper();\n        mapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);\n        RandomCutForestState state = mapper.readValue(json, RandomCutForestState.class);\n        RandomCutForest forest = rcfMapper.toModel(state);\n        Random r = new Random(0);\n        for (int i = 0; i < 20000; i++) {\n            double[] point = r.ints(forest.getDimensions(), 0, 50).asDoubleStream().toArray();\n            forest.getAnomalyScore(point);\n            forest.update(point, 0L);\n        }\n        assertNotNull(forest);\n    }\n\n    @ParameterizedTest\n    @EnumSource(V2PreProcessorJsonResource.class)\n    public void testPreprocessorJson(V2PreProcessorJsonResource jsonResource) throws JsonProcessingException {\n        PreprocessorMapper preMapper = new PreprocessorMapper();\n        String json = getStateFromFile(jsonResource.getResource());\n        assertNotNull(json);\n        ObjectMapper mapper = new ObjectMapper();\n        mapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);\n        PreprocessorState state = mapper.readValue(json, PreprocessorState.class);\n        IPreprocessor preprocessor = preMapper.toModel(state);\n        assertNotNull(preprocessor);\n    }\n\n    private String getStateFromFile(String resourceFile) {\n        try (InputStream is = RandomCutForestMapperTest.class.getResourceAsStream(resourceFile);\n                BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) {\n            StringBuilder b = new StringBuilder();\n            String line;\n            while ((line = rr.readLine()) != null) {\n                b.append(line);\n            }\n            return b.toString();\n        } catch (IOException e) {\n            fail(\"Unable to load resource\");\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/state/V2PreProcessorJsonResource.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport lombok.Getter;\n\n@Getter\npublic enum V2PreProcessorJsonResource {\n\n    Preprocessor_1(\"Preprocessor_1.json\"), Preprocessor_2(\"Preprocessor_2.json\"), Preprocessor_3(\"Preprocessor_3.json\");\n\n    private final String resource;\n\n    V2PreProcessorJsonResource(String resource) {\n        this.resource = resource;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/state/V2RCFJsonResource.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state;\n\nimport lombok.Getter;\n\n@Getter\npublic enum V2RCFJsonResource {\n\n    RCF_1(\"state_1.json\"), RCF_2(\"state_2.json\"), RCF_3(\"state_2.json\");\n\n    private final String resource;\n\n    V2RCFJsonResource(String resource) {\n        this.resource = resource;\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/state/sampler/CompactSamplerMapperTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.sampler;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.MethodSource;\n\nimport com.amazon.randomcutforest.sampler.CompactSampler;\n\npublic class CompactSamplerMapperTest {\n\n    private static int sampleSize = 20;\n    private static double lambda = 0.01;\n    private static long seed = 4444;\n\n    public static Stream<Arguments> nonemptySamplerProvider() {\n        CompactSampler fullSampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)\n                .storeSequenceIndexesEnabled(false).build();\n        CompactSampler fullSampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)\n                .storeSequenceIndexesEnabled(true).build();\n\n        Random random = new Random();\n        long baseIndex = 10_000;\n        for (int i = 0; i < 100; i++) {\n            int pointReference = random.nextInt();\n            fullSampler1.update(pointReference, baseIndex + i);\n            fullSampler2.update(pointReference, baseIndex + i);\n        }\n\n        CompactSampler partiallyFullSampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda)\n                .randomSeed(seed).storeSequenceIndexesEnabled(false).build();\n        CompactSampler partiallyFullSampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda)\n                .randomSeed(seed).storeSequenceIndexesEnabled(true).build();\n\n        for (int i = 0; i < sampleSize / 2; i++) {\n            int pointReference = random.nextInt();\n            partiallyFullSampler1.update(pointReference, baseIndex + i);\n            partiallyFullSampler2.update(pointReference, baseIndex + i);\n        }\n\n        return Stream.of(Arguments.of(\"full sampler without sequence indexes\", fullSampler1),\n                Arguments.of(\"full sampler with sequence indexes\", fullSampler2),\n                Arguments.of(\"partially full sampler without sequence indexes\", partiallyFullSampler1),\n                Arguments.of(\"partially full sampler with sequence indexes\", partiallyFullSampler2));\n    }\n\n    public static Stream<Arguments> samplerProvider() {\n        CompactSampler emptySampler1 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)\n                .storeSequenceIndexesEnabled(false).build();\n        CompactSampler emptySampler2 = CompactSampler.builder().capacity(sampleSize).timeDecay(lambda).randomSeed(seed)\n                .storeSequenceIndexesEnabled(true).build();\n\n        return Stream.concat(nonemptySamplerProvider(),\n                Stream.of(Arguments.of(\"empty sampler without sequence indexes\", emptySampler1),\n                        Arguments.of(\"empty sampler with sequence indexes\", emptySampler2)));\n    }\n\n    private CompactSamplerMapper mapper;\n\n    @BeforeEach\n    public void setUp() {\n        mapper = new CompactSamplerMapper();\n        mapper.setValidateHeapEnabled(false);\n    }\n\n    private void assertValidMapping(CompactSampler original, CompactSampler mapped) {\n        assertArrayEquals(original.getWeightArray(), mapped.getWeightArray(), \"different weight arrays\");\n        assertArrayEquals(original.getPointIndexArray(), mapped.getPointIndexArray(), \"different point index arrays\");\n        assertEquals(original.getCapacity(), mapped.getCapacity());\n        assertEquals(original.size(), mapped.size());\n        assertEquals(original.getTimeDecay(), mapped.getTimeDecay());\n        assertFalse(mapped.getEvictedPoint().isPresent());\n\n        if (original.isStoreSequenceIndexesEnabled()) {\n            assertTrue(mapped.isStoreSequenceIndexesEnabled());\n            assertArrayEquals(original.getSequenceIndexArray(), mapped.getSequenceIndexArray(),\n                    \"different sequence index arrays\");\n        } else {\n            assertFalse(mapped.isStoreSequenceIndexesEnabled());\n            assertNull(mapped.getSequenceIndexArray());\n        }\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"nonemptySamplerProvider\")\n    public void testRoundTripInvalidHeap(String description, CompactSampler sampler) {\n        mapper.setValidateHeapEnabled(true);\n        CompactSamplerState state = mapper.toState(sampler);\n\n        // swap to weights in the weight array in order to violate the heap property\n        float[] weights = state.getWeight();\n        int index = state.getSize() / 4;\n        float temp = weights[index];\n        weights[index] = weights[2 * index + 1];\n        weights[2 * index + 1] = temp;\n\n        assertThrows(IllegalStateException.class, () -> mapper.toModel(state));\n\n        mapper.setValidateHeapEnabled(false);\n        CompactSampler sampler2 = mapper.toModel(state);\n        assertArrayEquals(sampler.getWeightArray(), sampler2.getWeightArray());\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/state/store/PointStoreMapperTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.state.store;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.store.PointStoreSmall;\n\npublic class PointStoreMapperTest {\n    private PointStoreMapper mapper;\n\n    @BeforeEach\n    public void setUp() {\n        mapper = new PointStoreMapper();\n    }\n\n    @Test\n    public void testRoundTrip() {\n        int dimensions = 2;\n        int capacity = 4;\n        PointStore store = new PointStoreSmall(dimensions, capacity);\n\n        float[] point1 = { 1.1f, -22.2f };\n        int index1 = store.add(point1, 1);\n        float[] point2 = { 3.3f, -4.4f };\n        int index2 = store.add(point2, 2);\n        float[] point3 = { 10.1f, 100.1f };\n        int index3 = store.add(point3, 3);\n\n        PointStore store2 = mapper.toModel(mapper.toState(store));\n\n        assertEquals(capacity, store2.getCapacity());\n        assertEquals(3, store2.size());\n        assertEquals(dimensions, store2.getDimensions());\n        assertArrayEquals(store.getStore(), store2.getStore());\n\n        PointStoreState state = mapper.toState(store);\n        state.setDuplicateRefs(null);\n        assertDoesNotThrow(() -> mapper.toModel(state));\n        state.setDuplicateRefs(new int[1]);\n        assertThrows(IllegalArgumentException.class, () -> mapper.toModel(state));\n        state.setDuplicateRefs(new int[2]);\n        assertDoesNotThrow(() -> mapper.toModel(state));\n        state.setPrecision(Precision.FLOAT_64.name());\n        assertThrows(IllegalArgumentException.class, () -> mapper.toModel(state));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/statistics/StatisticsTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.statistics;\n\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\n\npublic class StatisticsTest {\n\n    @Test\n    void constructorTest() {\n        assertThrows(IllegalArgumentException.class, () -> new Deviation(-1));\n\n        assertThrows(IllegalArgumentException.class, () -> new Deviation(2));\n        Deviation newDeviation = new Deviation(new Random().nextDouble());\n        assertThrows(IllegalArgumentException.class, () -> newDeviation.setDiscount(-1));\n        assertThrows(IllegalArgumentException.class, () -> newDeviation.setDiscount(2));\n        assertDoesNotThrow(() -> newDeviation.setDiscount(0.519));\n        assertEquals(newDeviation.getDiscount(), 0.519);\n    }\n\n    @Test\n    void getMeanTest() {\n        double discount = new Random().nextDouble();\n        Deviation deviation = new Deviation(discount);\n        assertEquals(deviation.getMean(), 0);\n        assertTrue(deviation.isEmpty());\n        deviation.setCount(100);\n        assertTrue(deviation.isEmpty());\n        assertTrue(deviation.count == 100);\n        deviation.update(-0);\n        assertEquals(101, deviation.count);\n        assertEquals(deviation.getMean(), 0);\n        assertFalse(deviation.isEmpty());\n        deviation.reset();\n        assertEquals(deviation.getDiscount(), discount);\n        assertTrue(deviation.isEmpty());\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/store/PointStoreTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotSame;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class PointStoreTest {\n\n    private int dimensions;\n    private int capacity;\n    private PointStore pointStore;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 2;\n        capacity = 4;\n        pointStore = new PointStoreSmall(dimensions, capacity);\n    }\n\n    @Test\n    public void testNew() {\n        assertEquals(dimensions, pointStore.getDimensions());\n        assertEquals(capacity, pointStore.getCapacity());\n        assertEquals(0, pointStore.size());\n\n        for (int i = 0; i < pointStore.getIndexCapacity(); i++) {\n            assertEquals(0, pointStore.getRefCount(i));\n        }\n        pointStore.add(new float[2], 0);\n        int index = pointStore.add(new float[2], 0);\n        assertEquals(index, 1);\n        for (int y = 0; y < 1000; y++) {\n            pointStore.incrementRefCount(index);\n        }\n        assertEquals(pointStore.getRefCount(index), 1001);\n        int[] counts = pointStore.getRefCount();\n        assertEquals(counts[0], 1);\n        assertEquals(counts[index], 1001);\n        assertThrows(AssertionError.class, () -> pointStore.setLocation(0, 13));\n        assertThrows(AssertionError.class, () -> pointStore.extendLocationList(-10));\n    }\n\n    @Test\n    public void testConstructors() {\n        PointStore.Builder builder = new PointStore.Builder().dynamicResizingEnabled(true);\n        assertThrows(IllegalArgumentException.class, () -> new PointStoreSmall(builder));\n        builder.dimensions(1000);\n        assertThrows(IllegalArgumentException.class, () -> new PointStoreSmall(builder));\n        builder.capacity(100000);\n        assertThrows(IllegalArgumentException.class, () -> new PointStoreSmall(builder));\n        assertDoesNotThrow(() -> new PointStoreLarge(builder));\n        builder.shingleSize(3);\n        assertThrows(IllegalArgumentException.class, () -> new PointStoreLarge(builder));\n        builder.shingleSize(1);\n        builder.dimensions(2);\n        PointStoreLarge large = new PointStoreLarge(builder);\n        assertThrows(IllegalArgumentException.class, () -> large.checkFeasible(0));\n        assertEquals(large.size(), 0);\n        large.add(new float[2], 0L);\n        assertEquals(large.size(), 1);\n    }\n\n    @Test\n    public void testAdd() {\n        float[] point1 = { 1.2f, -3.4f };\n        int offset1 = pointStore.add(point1, 1);\n        assertTrue(offset1 >= 0 && offset1 < capacity);\n        assertEquals(1, pointStore.getRefCount(offset1));\n        assertEquals(1, pointStore.size());\n\n        float[] retrievedPoint1 = pointStore.getNumericVector(offset1);\n        assertNotSame(point1, retrievedPoint1);\n        assertArrayEquals(point1, retrievedPoint1);\n\n        float[] point2 = { 111.2f, -333.4f };\n        int offset2 = pointStore.add(point2, 2);\n        assertTrue(offset2 >= 0 && offset2 < capacity);\n        assertEquals(1, pointStore.getRefCount(offset2));\n        assertEquals(2, pointStore.size());\n        assertNotEquals(offset1, offset2);\n\n        float[] retrievedPoint2 = pointStore.getNumericVector(offset2);\n        assertNotSame(point2, retrievedPoint2);\n        assertArrayEquals(point2, retrievedPoint2);\n\n        // check that adding a second point didn't change the first stored point's value\n        retrievedPoint1 = pointStore.getNumericVector(offset1);\n        assertNotSame(point1, retrievedPoint1);\n        assertArrayEquals(point1, retrievedPoint1);\n    }\n\n    @Test\n    public void testAddInvalid() {\n        assertThrows(IllegalArgumentException.class, () -> pointStore.add(new float[] { 1.1f, -2.2f, 3.0f }, 0));\n        for (int i = 0; i < capacity; i++) {\n            float[] point = new float[dimensions];\n            point[0] = (float) Math.random();\n            point[1] = (float) Math.random();\n            pointStore.add(point, i + 2);\n        }\n        // point store is full\n        assertThrows(IllegalStateException.class, () -> pointStore.add(new float[] { 1.1f, -2.2f }, 0));\n    }\n\n    @Test\n    public void testGetInvalid() {\n        assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(-1));\n        assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(capacity));\n    }\n\n    @Test\n    public void testIncrementRefCount() {\n        float[] point = { 1.2f, -3.4f };\n        int offset = pointStore.add(point, 0);\n        assertEquals(1, pointStore.getRefCount(offset));\n\n        pointStore.incrementRefCount(offset);\n        assertEquals(2, pointStore.getRefCount(offset));\n    }\n\n    @Test\n    public void testIncrementRefCountInvalid() {\n        assertThrows(IllegalArgumentException.class, () -> pointStore.incrementRefCount(-1));\n        assertThrows(IllegalArgumentException.class, () -> pointStore.incrementRefCount(0));\n    }\n\n    @Test\n    public void testDecrementRefCount() {\n        float[] point = { 1.2f, -3.4f };\n        int offset = pointStore.add(point, 0);\n        pointStore.incrementRefCount(offset);\n        assertEquals(2, pointStore.getRefCount(offset));\n        assertEquals(1, pointStore.size());\n\n        pointStore.decrementRefCount(offset);\n        assertEquals(1, pointStore.getRefCount(offset));\n        assertEquals(1, pointStore.size());\n\n        pointStore.decrementRefCount(offset);\n        assertEquals(0, pointStore.getRefCount(offset));\n        assertEquals(0, pointStore.size());\n    }\n\n    @Test\n    public void testDecrementRefCountInvalid() {\n        assertThrows(IllegalArgumentException.class, () -> pointStore.decrementRefCount(-1));\n        assertThrows(IllegalArgumentException.class, () -> pointStore.decrementRefCount(0));\n    }\n\n    @Test\n    public void testPointEquals() {\n        float[] point = { 1.2f, -3.4f };\n        int offset = pointStore.add(point, 0);\n        assertArrayEquals(pointStore.getNumericVector(offset), point);\n        assertNotEquals(pointStore.getNumericVector(offset), new float[] { 5.6f, -7.8f });\n    }\n\n    @Test\n    public void testPointEqualsInvalid() {\n        float[] point = { 1.2f, -3.4f };\n        assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(-1));\n        assertThrows(IllegalArgumentException.class, () -> pointStore.getNumericVector(0));\n    }\n\n    @Test\n    public void internalShinglingTestNoRotation() {\n        int shinglesize = 10;\n        PointStore store = new PointStore.Builder().capacity(20 * shinglesize).dimensions(shinglesize)\n                .shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)\n                .currentStoreCapacity(1).build();\n        assertFalse(store.isInternalRotationEnabled());\n        Random random = new Random(0);\n        float[] shingle = new float[shinglesize];\n        for (int i = 0; i < 10 * shinglesize - 3; i++) {\n            shingle[(i + 3) % shinglesize] = (float) random.nextDouble();\n            store.add(new float[] { shingle[(i + 3) % shinglesize] }, i);\n        }\n        assertArrayEquals(store.getNumericVector(9 * shinglesize - 3), shingle, (float) 1e-6);\n        assertArrayEquals(store.getInternalShingle(), shingle, (float) 1e-6);\n        assertArrayEquals(store.transformIndices(new int[] { 0 }), new int[] { shinglesize - 1 });\n        assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 1 }));\n        assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 0, 0 }));\n        assertArrayEquals(store.transformToShingledPoint(new float[] { 0.0f }),\n                store.transformToShingledPoint(new float[] { -0.0f }), (float) 1e-6);\n        assertThrows(IllegalArgumentException.class, () -> store.add(new float[] { 0, 0 }, 0));\n    }\n\n    @Test\n    public void internalShinglingTestWithRotation() {\n        int shinglesize = 10;\n        PointStore store = new PointStore.Builder().capacity(20 * shinglesize).dimensions(shinglesize)\n                .shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)\n                .internalRotationEnabled(true).currentStoreCapacity(1).build();\n        assertTrue(store.isInternalRotationEnabled());\n        Random random = new Random(0);\n        float[] shingle = new float[shinglesize];\n        float[] temp = null;\n        for (int i = 0; i < 10 * shinglesize + 5; i++) {\n            shingle[i % shinglesize] = (float) random.nextDouble();\n            temp = store.transformToShingledPoint(new float[] { shingle[i % shinglesize] });\n            store.add(new float[] { shingle[i % shinglesize] }, i);\n        }\n        assertEquals(store.getNextSequenceIndex(), 10 * shinglesize + 5);\n        assertArrayEquals(temp, shingle, (float) 1e-6);\n        assertArrayEquals(store.getNumericVector(9 * shinglesize + 5), shingle, (float) 1e-6);\n        assertNotEquals(store.internalShingle, store.getInternalShingle());\n        assertArrayEquals(store.getNumericVector(9 * shinglesize + 5), shingle);\n        assertNotEquals(store.getNumericVector(9 * shinglesize + 4), shingle);\n        assertArrayEquals(store.getInternalShingle(), shingle, (float) 1e-6);\n        assertArrayEquals(store.transformIndices(new int[] { 0 }), new int[] { 5 });\n        assertThrows(IllegalArgumentException.class, () -> store.transformIndices(new int[] { 1 }));\n        assertEquals(store.transformToShingledPoint(new float[] { 1, 2 }).length, 2);\n        assertArrayEquals(store.transformToShingledPoint(new float[] { 0.0f }),\n                store.transformToShingledPoint(new float[] { -0.0f }), (float) 1e-6);\n    }\n\n    @Test\n    public void checkRotationAndCompact() {\n        int shinglesize = 4;\n        PointStore store = new PointStore.Builder().capacity(2 * shinglesize).dimensions(shinglesize)\n                .shingleSize(shinglesize).indexCapacity(shinglesize).internalShinglingEnabled(true)\n                .internalRotationEnabled(true).currentStoreCapacity(1).build();\n        for (int i = 0; i < 2 * shinglesize; i++) {\n            store.add(new float[] { -i - 1 }, i);\n        }\n        for (int i = 0; i < 2 * shinglesize - shinglesize + 1; i++) {\n            if (i != shinglesize - 1) {\n                store.decrementRefCount(i);\n            }\n        }\n        assertThrows(IllegalArgumentException.class, () -> store.getNumericVector(0));\n        float[] test = new float[shinglesize];\n        for (int i = 0; i < shinglesize; i++) {\n            test[i] = -(i + shinglesize + 1);\n        }\n        test[shinglesize - 1] = -shinglesize;\n        assertArrayEquals(store.getNumericVector(shinglesize - 1), test, 1e-6f);\n        store.compact();\n        for (int i = 2 * shinglesize; i < 4 * shinglesize - 1; i++) {\n            store.add(new float[] { -i - 1 }, i);\n        }\n        assertThrows(IllegalStateException.class, () -> store.add(new float[] { -4 * shinglesize }, 0));\n        for (int i = 0; i < 2 * shinglesize; i++) {\n            if (i != shinglesize - 1) {\n                store.decrementRefCount(i);\n            }\n        }\n        assertEquals(store.toString(shinglesize - 1), Arrays.toString(test));\n        for (int i = 4 * shinglesize; i < 6 * shinglesize - 1; i++) {\n            store.add(new float[] { -i - 1 }, i);\n        }\n        assertThrows(IllegalStateException.class,\n                () -> store.add(new float[] { -6 * shinglesize }, 6 * shinglesize - 1));\n        store.decrementRefCount(shinglesize - 1);\n        store.add(new float[] { -6 * shinglesize }, 6 * shinglesize - 1);\n        store.decrementRefCount(shinglesize);\n        store.compact();\n    }\n\n    @Test\n    void CompactionTest() {\n        int shinglesize = 2;\n        PointStore store = new PointStore.Builder().capacity(6).dimensions(shinglesize).shingleSize(shinglesize)\n                .indexCapacity(6).directLocationEnabled(false).internalShinglingEnabled(true).build();\n\n        store.add(new float[] { 0 }, 0L);\n        for (int i = 0; i < 5; i++) {\n            store.add(new float[] { i + 1 }, 0L);\n        }\n        int finalIndex = store.add(new float[] { 4 + 2 }, 0L);\n        assertArrayEquals(store.getNumericVector(finalIndex), new float[] { 5, 6 });\n        store.decrementRefCount(1);\n        store.decrementRefCount(2);\n        int index = store.add(new float[] { 7 }, 0L);\n        assertArrayEquals(store.getNumericVector(index), new float[] { 6, 7 });\n        store.decrementRefCount(index);\n        assertTrue(store.size() < store.capacity);\n        index = store.add(new float[] { 8 }, 0L);\n        assertArrayEquals(store.getNumericVector(index), new float[] { 7, 8 });\n    }\n\n    @Test\n    public void indexIntervalTest() {\n        assertThrows(IllegalArgumentException.class, () -> new IndexIntervalManager(0));\n        assertThrows(IllegalArgumentException.class, () -> new IndexIntervalManager(1, 0, null));\n        assertThrows(IllegalArgumentException.class, () -> IndexIntervalManager.toBits(null));\n        IndexIntervalManager a = new IndexIntervalManager(new int[] { 0, 1 }, 2);\n        IndexIntervalManager manager = new IndexIntervalManager(1);\n        manager.takeIndex();\n        assertThrows(IllegalStateException.class, () -> manager.takeIndex());\n        assertThrows(IllegalArgumentException.class, () -> manager.extendCapacity(1));\n        manager.extendCapacity(2);\n        manager.extendCapacity(3);\n        assertEquals(manager.getCapacity(), 3);\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/store/StreamSamplerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.store;\n\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.Test;\n\npublic class StreamSamplerTest {\n\n    @Test\n    void testBuilder() {\n        StreamSampler.Builder builder = StreamSampler.builder().capacity(10).timeDecay(0).randomSeed(0);\n        assertTrue(builder.getCapacity() == 10);\n        assertTrue(builder.getRandomSeed() == 0);\n        assertTrue(builder.getTimeDecay() == 0);\n    }\n\n    @Test\n    void testConstructor() {\n        StreamSampler<float[]> sampler = StreamSampler.builder().initialAcceptFraction(1.0)\n                .storeSequenceIndexesEnabled(true).build();\n        assertEquals(sampler.getEntriesSeen(), 0);\n        assertEquals(sampler.getSequenceNumber(), -1L);\n        sampler.sample(new float[] {}, 1f);\n        StreamSampler<float[]> second = StreamSampler.builder().initialAcceptFraction(0.5)\n                .storeSequenceIndexesEnabled(false).build();\n        second.sample(new float[] {}, 0.5f);\n        second.sample(new float[] {}, 2f);\n        assertThrows(IllegalArgumentException.class, () -> new StreamSampler(sampler, second, 0, 0, 0L));\n        StreamSampler<float[]> merged = new StreamSampler(sampler, second, 10, 0, 0L);\n        assertEquals(merged.entriesSeen, 3);\n        assertEquals(merged.sampler.getInitialAcceptFraction(), 1.0);\n        assertEquals(merged.getSequenceNumber(), 1);\n    }\n\n    @Test\n    public void testConfig() {\n        StreamSampler<float[]> sampler = StreamSampler.builder().initialAcceptFraction(1.0).build();\n        assertTrue(sampler.isCurrentlySampling());\n        assertTrue(sampler.getEntriesSeen() == 0);\n        sampler.pauseSampling();\n        assertFalse(sampler.isCurrentlySampling());\n        sampler.sample(new float[] {}, 0.1f);\n        assertTrue(sampler.getEntriesSeen() == 1);\n        assertTrue(sampler.getObjectList().size() == 0);\n        sampler.resumeSampling();\n        assertTrue(sampler.isCurrentlySampling());\n        sampler.sample(new float[] { 1.0f, 1.0f }, 0.2f);\n        assertTrue(sampler.getEntriesSeen() == 2);\n        assertTrue(sampler.getObjectList().size() == 1);\n        sampler.pauseSampling();\n        assertFalse(sampler.isCurrentlySampling());\n        sampler.sample(new float[] { 1.0f, 1.0f }, 0.2f);\n        assertTrue(sampler.getEntriesSeen() == 3);\n        assertTrue(sampler.getObjectList().size() == 1);\n        assertEquals(sampler.getCapacity(), DEFAULT_SAMPLE_SIZE);\n        sampler.resumeSampling();\n        for (int i = 0; i < 10000; i++) {\n            sampler.sample(new float[] { 1.0f, 1.0f }, 1f);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/tree/BoundingBoxTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.defaultRCFgVecFunction;\nimport static com.amazon.randomcutforest.CommonUtils.getProbabilityOfSeparation;\nimport static com.amazon.randomcutforest.TestUtils.EPSILON;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.Matchers.closeTo;\nimport static org.hamcrest.Matchers.is;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class BoundingBoxTest {\n\n    private float[] point1;\n    private float[] point2;\n    private BoundingBox box1;\n    private BoundingBox box2;\n\n    @BeforeEach\n    public void setUp() {\n        point1 = new float[] { 1.5f, 2.7f };\n        point2 = new float[] { 3.0f, 1.2f };\n        box1 = new BoundingBox(point1);\n        box2 = new BoundingBox(point2);\n    }\n\n    @Test\n    public void dimensionTest() {\n        assertThrows(IllegalArgumentException.class, () -> new BoundingBox(point1, new float[1]));\n        assertThrows(IllegalArgumentException.class, () -> box1.getMergedBox(new float[1]));\n        assertThrows(IllegalArgumentException.class, () -> box1.contains(new float[1]));\n        assertThrows(IllegalArgumentException.class, () -> box1.contains(new BoundingBox(new float[1])));\n    }\n\n    @Test\n    public void equalsTest() {\n        assertFalse(box1.equals(point1));\n        assertFalse(box1.equals(box2));\n        assertFalse(box1.equals(new BoundingBox(point1, new float[] { 3.0f, 2.7f })));\n        assertTrue(box1.equals(box1.copy()));\n    }\n\n    @Test\n    public void testNewFromSinglePoint() {\n        assertThat(box1.getDimensions(), is(2));\n        assertThat((float) box1.getMinValue(0), is(point1[0]));\n        assertThat((float) box1.getMaxValue(0), is(point1[0]));\n        assertThat(box1.getRange(0), is(0.0));\n        assertThat((float) box1.getMinValue(1), is(point1[1]));\n        assertThat((float) box1.getMaxValue(1), is(point1[1]));\n        assertThat(box1.getRange(1), is(0.0));\n        assertThat(box1.getRangeSum(), is(0.0));\n\n        assertThat(box2.getDimensions(), is(2));\n        assertThat((float) box2.getMinValue(0), is(point2[0]));\n        assertThat((float) box2.getMaxValue(0), is(point2[0]));\n        assertThat(box2.getRange(0), is(0.0));\n        assertThat((float) box2.getMinValue(1), is(point2[1]));\n        assertThat((float) box2.getMaxValue(1), is(point2[1]));\n        assertThat(box2.getRange(1), is(0.0));\n        assertThat(box2.getRangeSum(), is(0.0));\n\n        assertTrue(box1.probabilityOfCut(point2) == 1.0);\n        assertTrue(box1.probabilityOfCut(point1) == 0.0);\n    }\n\n    @Test\n    public void testGetMergedBoxWithOtherBox() {\n\n        assertThrows(IllegalStateException.class, () -> box1.addBox(box2));\n        assertThrows(IllegalArgumentException.class, () -> box1.addPoint(new float[1]));\n        assertThrows(IllegalArgumentException.class, () -> box1.addPoint(new float[2]));\n        assertDoesNotThrow(() -> box1.copy().addPoint(new float[2]));\n\n        BoundingBox mergedBox = box1.getMergedBox(box2);\n\n        assertThat(mergedBox.getDimensions(), is(2));\n        assertThat((float) mergedBox.getMinValue(0), is(1.5f));\n        assertThat((float) mergedBox.getMaxValue(0), is(3.0f));\n        assertThat(mergedBox.getRange(0), closeTo(3.0 - 1.5, EPSILON));\n        assertThat((float) mergedBox.getMinValue(1), is(1.2f));\n        assertThat((float) mergedBox.getMaxValue(1), is(2.7f));\n        assertThat(mergedBox.getRange(1), closeTo(2.7 - 1.2, EPSILON));\n\n        double rangeSum = (3.0 - 1.5) + (2.7 - 1.2);\n        assertThat(mergedBox.getRangeSum(), closeTo(rangeSum, EPSILON));\n\n        // check that box1 and box2 were not changed\n\n        assertThat(box1.getDimensions(), is(2));\n        assertThat((float) box1.getMinValue(0), is(point1[0]));\n        assertThat((float) box1.getMaxValue(0), is(point1[0]));\n        assertThat(box1.getRange(0), is(0.0));\n        assertThat((float) box1.getMinValue(1), is(point1[1]));\n        assertThat((float) box1.getMaxValue(1), is(point1[1]));\n        assertThat(box1.getRange(1), is(0.0));\n        assertThat(box1.getRangeSum(), is(0.0));\n\n        assertThat(box2.getDimensions(), is(2));\n        assertThat((float) box2.getMinValue(0), is(point2[0]));\n        assertThat((float) box2.getMaxValue(0), is(point2[0]));\n        assertThat(box2.getRange(0), is(0.0));\n        assertThat((float) box2.getMinValue(1), is(point2[1]));\n        assertThat((float) box2.getMaxValue(1), is(point2[1]));\n        assertThat(box2.getRange(1), is(0.0));\n        assertThat(box2.getRangeSum(), is(0.0));\n    }\n\n    @Test\n    public void testContainsBoundingBox() {\n        BoundingBox box1 = new BoundingBox(new float[] { 0.0f, 0.0f })\n                .getMergedBox(new BoundingBox(new float[] { 10.0f, 10.0f }));\n\n        BoundingBox box2 = new BoundingBox(new float[] { 2.0f, 2.0f })\n                .getMergedBox(new BoundingBox(new float[] { 8.0f, 8.0f }));\n\n        BoundingBox box3 = new BoundingBox(new float[] { -4.0f, -4.0f })\n                .getMergedBox(new BoundingBox(new float[] { -1.0f, -1.0f }));\n\n        BoundingBox box4 = new BoundingBox(new float[] { -1.0f, -1.0f })\n                .getMergedBox(new BoundingBox(new float[] { 5.0f, 5.0f }));\n\n        // completely contains\n        assertTrue(box1.contains(box2));\n        assertFalse(box2.contains(box1));\n\n        // completely disjoint\n        assertFalse(box1.contains(box3));\n        assertFalse(box3.contains(box1));\n\n        // partially intersect\n        assertFalse(box1.contains(box4));\n        assertFalse(box4.contains(box1));\n    }\n\n    @Test\n    public void testContainsPoint() {\n        BoundingBox box1 = new BoundingBox(new float[] { 0.0f, 0.0f })\n                .getMergedBox(new BoundingBox(new float[] { 10.0f, 10.0f }));\n\n        assertTrue(box1.contains(new float[] { 0.0f, 0.1f }));\n        assertTrue(box1.contains(new float[] { 5.5f, 6.5f }));\n        assertFalse(box1.contains(new float[] { -0.7f, -4.5f }));\n        assertFalse(box1.contains(new float[] { 5.0f, 11.0f }));\n        assertFalse(box1.contains(new float[] { -5.0f, 10.0f }));\n    }\n\n    @Test\n    public void probability() {\n        IBoundingBoxView box = new BoundingBox(new float[1], new float[1]);\n        assertEquals(0, getProbabilityOfSeparation(box, new float[1]));\n        assertArrayEquals(new double[1], defaultRCFgVecFunction(box));\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/tree/BoxCacheTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.RandomCutForestTest;\nimport com.amazon.randomcutforest.config.Precision;\n\npublic class BoxCacheTest {\n\n    @Test\n    public void testChangingBoundingBoxFloat32() {\n        int dimensions = 4;\n        int numberOfTrees = 1;\n        int sampleSize = 64;\n        int dataSize = 1000 * sampleSize;\n        Random random = new Random();\n        long seed = random.nextLong();\n        double[][] big = RandomCutForestTest.generateShingledData(dataSize, dimensions, 2);\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(Precision.FLOAT_32).randomSeed(seed)\n                .boundingBoxCacheFraction(0).build();\n        RandomCutForest otherForest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(Precision.FLOAT_32).randomSeed(seed)\n                .boundingBoxCacheFraction(1).build();\n        int num = 0;\n        for (double[] point : big) {\n            ++num;\n            if (num % sampleSize == 0) {\n                forest.setBoundingBoxCacheFraction(random.nextDouble());\n            }\n            assertEquals(forest.getAnomalyScore(point), otherForest.getAnomalyScore(point));\n            forest.update(point);\n            otherForest.update(point);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/tree/CutTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.Matchers.is;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class CutTest {\n\n    private int splitDimension;\n    private double splitValue;\n    private Cut cut;\n\n    @BeforeEach\n    public void setUp() {\n        splitDimension = 2;\n        splitValue = 3.4;\n        cut = new Cut(splitDimension, splitValue);\n    }\n\n    @Test\n    public void testNew() {\n        assertThat(cut.getDimension(), is(splitDimension));\n        assertThat(cut.getValue(), is(splitValue));\n    }\n\n    @Test\n    public void testIsLeftOf() {\n        double[] point = new double[] { 1.0, 2.0, 3.0, 4.0 };\n        assertTrue(Cut.isLeftOf(point, cut));\n\n        point[2] = 99.9;\n        assertFalse(Cut.isLeftOf(point, cut));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/tree/HyperTreeTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\nimport java.util.function.BinaryOperator;\nimport java.util.function.Function;\n\nimport org.junit.jupiter.api.BeforeAll;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.CommonUtils;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.VisitorFactory;\nimport com.amazon.randomcutforest.anomalydetection.TransductiveScalarScoreVisitor;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class HyperTreeTest {\n\n    private static int numberOfTrees;\n    private static int sampleSize;\n    private static int dimensions;\n    private static int randomSeed;\n\n    private static double baseMu;\n    private static double baseSigma;\n    private static double anomalyMu;\n    private static double anomalySigma;\n    private static double transitionToAnomalyProbability;\n    private static double transitionToBaseProbability;\n    private static int dataSize;\n    private static NormalMixtureTestData generator;\n    private static int numTrials = 5;\n    private static int numTest = 5;\n\n    public static Function<IBoundingBoxView, double[]> LAlphaSeparation(final double alpha) {\n\n        return (IBoundingBoxView boundingBox) -> {\n            double[] answer = new double[boundingBox.getDimensions()];\n\n            for (int i = 0; i < boundingBox.getDimensions(); ++i) {\n                double maxVal = boundingBox.getMaxValue(i);\n                double minVal = boundingBox.getMinValue(i);\n                double oldRange = maxVal - minVal;\n\n                if (oldRange > 0) {\n                    if (alpha == 0)\n                        answer[i] = 1.0;\n                    else\n                        answer[i] = Math.pow(oldRange, alpha);\n                }\n            }\n\n            return answer;\n        };\n    }\n\n    public static Function<IBoundingBoxView, double[]> GTFSeparation(final double gauge) {\n\n        return (IBoundingBoxView boundingBox) -> {\n            double[] answer = new double[boundingBox.getDimensions()];\n\n            for (int i = 0; i < boundingBox.getDimensions(); ++i) {\n                double maxVal = boundingBox.getMaxValue(i);\n                double minVal = boundingBox.getMinValue(i);\n                double oldRange = maxVal - minVal;\n\n                if (oldRange > 0) {\n                    answer[i] = Math.log(1 + oldRange / gauge);\n                }\n            }\n\n            return answer;\n        };\n    }\n\n    class HyperForest {\n        int dimensions;\n        int seed;\n        Random random;\n        int sampleSize;\n        int numberOfTrees;\n\n        PointStore pointStore;\n        ArrayList<HyperTree> trees;\n\n        public HyperForest(int dimensions, int numberOfTrees, int sampleSize, int seed,\n                Function<IBoundingBoxView, double[]> vecSeparation) {\n            this.numberOfTrees = numberOfTrees;\n            this.seed = seed;\n            this.sampleSize = sampleSize;\n            this.dimensions = dimensions;\n            pointStore = PointStore.builder().capacity(numberOfTrees * sampleSize).dimensions(dimensions).shingleSize(1)\n                    .build();\n            trees = new ArrayList<>();\n            random = new Random(seed);\n            for (int i = 0; i < numberOfTrees; i++) {\n                trees.add(new HyperTree.Builder().pointStoreView(pointStore).dimension(dimensions)\n                        .buildGVec(vecSeparation).randomSeed(random.nextInt()).build());\n            }\n        }\n\n        // displacement scoring (multiplied by the normalizer log_2(treesize)) on the\n        // fly !!\n        // as introduced in Robust Random Cut Forest Based Anomaly Detection in Streams\n        // @ICML 2016. This does not address co-displacement (duplicity).\n        // seen function is (x,y) -> 1 which basically ignores everything\n        // unseen function is (x,y) -> y which corresponds to mass of sibling\n        // damp function is (x,y) -> 1 which is no dampening\n\n        public double getDisplacementScore(float[] point) {\n            return getDynamicScore(point, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0);\n        }\n\n        // Expected height (multiplied by the normalizer log_2(treesize) ) scoring on\n        // the fly !!\n        // seen function is (x,y) -> x+log(Y)/log(2) which depth + duplicity converted\n        // to depth\n        // unseen function is (x,y) -> x which is depth\n        // damp function is (x,y) -> 1 which is no dampening\n        // note that this is *NOT* anything like the expected height in\n        // Isolation Forest/Random Forest algorithms, because here\n        // the Expected height takes into account the contrafactual\n        // that \"what would have happened had the point been available during\n        // the construction of the forest\"\n\n        public double getHeightScore(float[] point) {\n            return getDynamicScore(point, (x, y) -> 1.0 * (x + Math.log(y)), (x, y) -> 1.0 * x, (x, y) -> 1.0);\n        }\n\n        public double getAnomalyScore(float[] point) {\n            return getDynamicScore(point, CommonUtils::defaultScoreSeenFunction,\n                    CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction);\n        }\n\n        public double getDynamicScore(float[] point, BiFunction<Double, Double, Double> seen,\n                BiFunction<Double, Double, Double> unseen, BiFunction<Double, Double, Double> newDamp) {\n\n            checkArgument(dimensions == point.length, \"incorrect dimensions\");\n\n            VisitorFactory<Double> visitorFactory = new VisitorFactory<>(\n                    (tree, y) -> new TransductiveScalarScoreVisitor(tree.projectToTree(y), tree.getMass(), seen, unseen,\n                            newDamp, ((HyperTree) tree).getgVec()));\n            BinaryOperator<Double> accumulator = Double::sum;\n\n            Function<Double, Double> finisher = sum -> sum / numberOfTrees;\n\n            return trees.parallelStream().map(tree -> tree.traverse(point, visitorFactory)).reduce(accumulator)\n                    .map(finisher).orElseThrow(() -> new IllegalStateException(\"accumulator returned an empty result\"));\n\n        }\n\n        void makeForest(double[][] pointList, int prefix) {\n            for (int i = 0; i < pointList.length; i++) {\n                if (pointList[i].length != dimensions) {\n                    throw new IllegalArgumentException(\"Points have incorrect dimensions\");\n                }\n            }\n            boolean[][] status = new boolean[numberOfTrees + 1][pointList.length];\n            for (int i = 0; i < numberOfTrees; i++) {\n                int y = 0;\n                while (y < sampleSize) {\n                    int z = random.nextInt(prefix);\n                    if (!status[i][z]) {\n                        status[i + 1][z] = true;\n                        status[0][z] = true; // will compute union across trees\n                        ++y;\n                    }\n                }\n            }\n            int[] reference = new int[pointList.length];\n            List<Integer>[] lists = new List[numberOfTrees];\n            for (int i = 0; i < numberOfTrees; i++) {\n                lists[i] = new ArrayList<>();\n            }\n            for (int i = 0; i < pointList.length; i++) {\n                if (status[0][i]) {\n                    reference[i] = pointStore.add(toFloatArray(pointList[i]), 0L);\n                    for (int j = 0; j < numberOfTrees; j++) {\n                        if (status[j + 1][i]) {\n                            lists[j].add(reference[i]);\n                        }\n                    }\n                }\n                ;\n            }\n            for (int i = 0; i < numberOfTrees; i++) {\n                trees.get(i).makeTree(lists[i], random.nextInt());\n            }\n        }\n\n    }\n\n    // ===========================================================\n\n    public static double getSimulatedAnomalyScore(RandomCutForest forest, float[] point,\n            Function<IBoundingBoxView, double[]> gVec) {\n        return forest.getDynamicSimulatedScore(point, CommonUtils::defaultScoreSeenFunction,\n                CommonUtils::defaultScoreUnseenFunction, CommonUtils::defaultDampFunction, gVec);\n    }\n\n    public static double getSimulatedHeightScore(RandomCutForest forest, float[] point,\n            Function<IBoundingBoxView, double[]> gvec) {\n        return forest.getDynamicSimulatedScore(point, (x, y) -> 1.0 * (x + Math.log(y)), (x, y) -> 1.0 * x,\n                (x, y) -> 1.0, gvec);\n    }\n\n    public static double getSimulatedDisplacementScore(RandomCutForest forest, float[] point,\n            Function<IBoundingBoxView, double[]> gvec) {\n        return forest.getDynamicSimulatedScore(point, (x, y) -> 1.0, (x, y) -> y, (x, y) -> 1.0, gvec);\n    }\n\n    // ===========================================================\n    @BeforeAll\n    public static void setup() {\n        dataSize = 2000;\n        numberOfTrees = 1; // this is a tree test\n        sampleSize = 256;\n        dimensions = 30;\n\n        baseMu = 0.0;\n        baseSigma = 1.0;\n        anomalyMu = 0.0;\n        anomalySigma = 1.5;\n        transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        transitionToBaseProbability = 1.0;\n        generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n    }\n\n    private class TestScores {\n        double sumCenterScore = 0;\n        double sumCenterDisp = 0;\n        double sumCenterHeight = 0;\n        double sumLeftScore = 0;\n        double sumRightScore = 0;\n        double sumLeftDisp = 0;\n        double sumRightDisp = 0;\n        double sumLeftHeight = 0;\n        double sumRightHeight = 0;\n    }\n\n    public static void runRCF(TestScores testScore, Function<IBoundingBoxView, double[]> gVec) {\n        Random prg = new Random(randomSeed);\n        for (int trials = 0; trials < numTrials; trials++) {\n            double[][] data = generator.generateTestData(dataSize + numTest, dimensions, 100 + trials);\n\n            RandomCutForest newForest = RandomCutForest.builder().dimensions(dimensions).numberOfTrees(numberOfTrees)\n                    .sampleSize(sampleSize).randomSeed(prg.nextInt()).build();\n\n            for (int i = 0; i < dataSize; i++) {\n                // shrink, shift at random\n                for (int j = 0; j < dimensions; j++)\n                    data[i][j] *= 0.01;\n                if (prg.nextDouble() < 0.5)\n                    data[i][0] += 5.0;\n                else\n                    data[i][0] -= 5.0;\n                newForest.update(data[i]);\n                // the points are streamed\n            }\n\n            for (int i = dataSize; i < dataSize + numTest; i++) {\n                for (int j = 0; j < dimensions; j++)\n                    data[i][j] *= 0.01;\n                testScore.sumCenterScore += getSimulatedAnomalyScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumCenterHeight += getSimulatedHeightScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumCenterDisp += getSimulatedDisplacementScore(newForest, toFloatArray(data[i]), gVec);\n\n                data[i][0] += 5; // move to right cluster\n\n                testScore.sumRightScore += getSimulatedAnomalyScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumRightHeight += getSimulatedHeightScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumRightDisp += getSimulatedDisplacementScore(newForest, toFloatArray(data[i]), gVec);\n\n                data[i][0] -= 10; // move to left cluster\n\n                testScore.sumLeftScore += getSimulatedAnomalyScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumLeftHeight += getSimulatedHeightScore(newForest, toFloatArray(data[i]), gVec);\n                testScore.sumLeftDisp += getSimulatedDisplacementScore(newForest, toFloatArray(data[i]), gVec);\n            }\n        }\n        assert (testScore.sumCenterScore > 2 * testScore.sumLeftScore);\n        assert (testScore.sumCenterScore > 2 * testScore.sumRightScore);\n\n        assert (testScore.sumCenterDisp > 10 * testScore.sumLeftDisp);\n        assert (testScore.sumCenterDisp > 10 * testScore.sumRightDisp);\n\n        assert (2 * testScore.sumCenterHeight < testScore.sumLeftHeight);\n        assert (2 * testScore.sumCenterHeight < testScore.sumRightHeight);\n\n    }\n\n    public void runGTFLAlpha(TestScores testScore, boolean flag, double gaugeOrAlpha) {\n        Random prg = new Random(randomSeed);\n        for (int trials = 0; trials < numTrials; trials++) {\n            double[][] data = generator.generateTestData(dataSize + numTest, dimensions, 100 + trials);\n\n            HyperForest newForest;\n            if (flag)\n                newForest = new HyperForest(dimensions, numberOfTrees, sampleSize, prg.nextInt(),\n                        GTFSeparation(gaugeOrAlpha));\n            else\n                newForest = new HyperForest(dimensions, numberOfTrees, sampleSize, prg.nextInt(),\n                        LAlphaSeparation(gaugeOrAlpha));\n\n            for (int i = 0; i < dataSize; i++) {\n                // shrink, shift at random\n                for (int j = 0; j < dimensions; j++)\n                    data[i][j] *= 0.01;\n                if (prg.nextDouble() < 0.5)\n                    data[i][0] += 5.0;\n                else\n                    data[i][0] -= 5.0;\n            }\n            newForest.makeForest(data, dataSize);\n\n            for (int i = dataSize; i < dataSize + numTest; i++) {\n                for (int j = 0; j < dimensions; j++)\n                    data[i][j] *= 0.01;\n                testScore.sumCenterScore += newForest.getAnomalyScore(toFloatArray(data[i]));\n                testScore.sumCenterHeight += newForest.getHeightScore(toFloatArray(data[i]));\n                testScore.sumCenterDisp += newForest.getDisplacementScore(toFloatArray(data[i]));\n\n                data[i][0] += 5; // move to right cluster\n\n                testScore.sumRightScore += newForest.getAnomalyScore(toFloatArray(data[i]));\n                testScore.sumRightHeight += newForest.getHeightScore(toFloatArray(data[i]));\n                testScore.sumRightDisp += newForest.getDisplacementScore(toFloatArray(data[i]));\n\n                data[i][0] -= 10; // move to left cluster\n\n                testScore.sumLeftScore += newForest.getAnomalyScore(toFloatArray(data[i]));\n                testScore.sumLeftHeight += newForest.getHeightScore(toFloatArray(data[i]));\n                testScore.sumLeftDisp += newForest.getDisplacementScore(toFloatArray(data[i]));\n\n            }\n        }\n\n        assert (testScore.sumCenterScore > 1.5 * testScore.sumLeftScore);\n        assert (testScore.sumCenterScore > 1.5 * testScore.sumRightScore);\n\n        assert (testScore.sumCenterDisp > 10 * testScore.sumLeftDisp);\n        assert (testScore.sumCenterDisp > 10 * testScore.sumRightDisp);\n\n        assert (1.5 * testScore.sumCenterHeight < testScore.sumLeftHeight);\n        assert (1.5 * testScore.sumCenterHeight < testScore.sumRightHeight);\n    }\n\n    public void simulateGTFLAlpha(TestScores testScore, boolean flag, double gaugeOrAlpha) {\n        Function<IBoundingBoxView, double[]> gVec = LAlphaSeparation(gaugeOrAlpha);\n        if (flag)\n            gVec = GTFSeparation(gaugeOrAlpha);\n        runRCF(testScore, gVec);\n    }\n\n    @Test\n    public void GaugeTransductiveForestTest() {\n\n        TestScores testScoreA = new TestScores();\n        runGTFLAlpha(testScoreA, true, 1);\n        TestScores testScoreB = new TestScores();\n        simulateGTFLAlpha(testScoreB, true, 1);\n\n    }\n\n    @Test\n    public void LAlphaForestTest() {\n\n        TestScores testScoreA = new TestScores();\n        runGTFLAlpha(testScoreA, false, 0.5);\n        TestScores testScoreB = new TestScores();\n        simulateGTFLAlpha(testScoreB, false, 0.5);\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/tree/RandomCutTreeTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.tree;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.CommonUtils.validateInternalState;\nimport static com.amazon.randomcutforest.tree.AbstractNodeStore.Null;\nimport static java.lang.Math.max;\nimport static org.hamcrest.MatcherAssert.assertThat;\nimport static org.hamcrest.Matchers.closeTo;\nimport static org.hamcrest.Matchers.is;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.mockito.ArgumentMatchers.any;\nimport static org.mockito.ArgumentMatchers.anyInt;\nimport static org.mockito.Mockito.mock;\nimport static org.mockito.Mockito.when;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.MultiVisitor;\nimport com.amazon.randomcutforest.MultiVisitorFactory;\nimport com.amazon.randomcutforest.config.Config;\nimport com.amazon.randomcutforest.sampler.Weighted;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeContext;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;\nimport com.amazon.randomcutforest.state.tree.RandomCutTreeMapper;\nimport com.amazon.randomcutforest.store.PointStore;\n\npublic class RandomCutTreeTest {\n\n    private static final double EPSILON = 1e-8;\n\n    private Random rng;\n    private RandomCutTree tree;\n\n    private PointStore pointStoreFloat;\n\n    @BeforeEach\n    public void setUp() {\n        rng = mock(Random.class);\n        pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100).dimensions(2)\n                .build();\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)\n                .storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();\n\n        // Create the following tree structure (in the second diagram., backticks denote\n        // cuts)\n        // The leaf point 0,1 has mass 2, all other nodes have mass 1.\n        //\n        // /\\\n        // / \\\n        // -1,-1 / \\\n        // / \\\n        // /\\ 1,1\n        // / \\\n        // -1,0 0,1\n        //\n        //\n        // 0,1 1,1\n        // ----------*---------*\n        // | ` | ` |\n        // | ` | ` |\n        // | ` | ` |\n        // -1,0 *-------------------|\n        // | |\n        // |```````````````````|\n        // | |\n        // -1,-1 *--------------------\n        //\n        // We choose the insertion order and random draws carefully so that each split\n        // divides its parent in half.\n        // The random values are used to set the cut dimensions and values.\n\n        assertEquals(pointStoreFloat.add(new float[] { -1, -1 }, 1), 0);\n        assertEquals(pointStoreFloat.add(new float[] { 1, 1 }, 2), 1);\n        assertEquals(pointStoreFloat.add(new float[] { -1, 0 }, 3), 2);\n        assertEquals(pointStoreFloat.add(new float[] { 0, 1 }, 4), 3);\n        assertEquals(pointStoreFloat.add(new float[] { 0, 1 }, 5), 4);\n        assertEquals(pointStoreFloat.add(new float[] { 0, 0 }, 6), 5);\n\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));\n\n        tree.addPoint(0, 1);\n\n        tree.deletePoint(0, 1);\n        assertTrue(tree.root == Null);\n\n        tree.addPoint(0, 1);\n        when(rng.nextDouble()).thenReturn(0.625);\n        tree.addPoint(1, 2);\n\n        when(rng.nextDouble()).thenReturn(0.5);\n        tree.addPoint(2, 3);\n\n        when(rng.nextDouble()).thenReturn(0.25);\n        tree.addPoint(3, 4);\n\n        // add mass to 0,1\n        tree.addPoint(4, 5);\n        assertArrayEquals(tree.liftFromTree(new float[] { 17, 18 }), new float[] { 17, 18 });\n    }\n\n    @Test\n    public void testConfig() {\n        Config config = new Config();\n        assertThrows(IllegalArgumentException.class, () -> tree.setBoundingBoxCacheFraction(-0.5));\n        assertThrows(IllegalArgumentException.class, () -> tree.setBoundingBoxCacheFraction(2.0));\n        assertThrows(IllegalArgumentException.class, () -> tree.setConfig(\"foo\", 0));\n        assertThrows(IllegalArgumentException.class, () -> tree.getConfig(\"bar\"));\n        assertEquals(tree.getConfig(Config.BOUNDING_BOX_CACHE_FRACTION), 1.0);\n        assertThrows(IllegalArgumentException.class, () -> tree.setConfig(config.BOUNDING_BOX_CACHE_FRACTION, true));\n        assertThrows(IllegalArgumentException.class,\n                () -> tree.getConfig(Config.BOUNDING_BOX_CACHE_FRACTION, boolean.class));\n        tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.2);\n    }\n\n    @Test\n    public void testConfigStore() {\n        assertEquals(tree.nodeStore.isLeaf(-1), tree.isLeaf(-1));\n        assertEquals(tree.nodeStore.isLeaf(256), tree.isLeaf(256));\n        assertEquals(tree.nodeStore.isInternal(-1), tree.isInternal(-1));\n        assertEquals(tree.nodeStore.isInternal(0), tree.isInternal(0));\n        assertEquals(tree.nodeStore.isInternal(255), tree.isInternal(255));\n        assertEquals(tree.nodeStore.isInternal(256), tree.isInternal(256));\n    }\n\n    @Test\n    public void testParent() {\n        PointStore pointStore = mock(PointStore.class);\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)\n                .storeSequenceIndexesEnabled(true).storeParent(false).dimension(3).build();\n        assertThrows(IllegalArgumentException.class, () -> tree.nodeStore.getParentIndex(tree.root));\n    }\n\n    @Test\n    public void testConfigDelete() {\n        PointStore pointStore = mock(PointStore.class);\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)\n                .storeSequenceIndexesEnabled(true).storeParent(true).dimension(3).build();\n        when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[] { 0 }).thenReturn(new float[3])\n                .thenReturn(new float[3]);\n        tree.addPoint(0, 1);\n        // fails vor dimension\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(2, 1));\n        // wrong sequence index\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 2));\n        // state is corrupted\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(0, 1));\n    }\n\n    @Test\n    public void testConfigAdd() {\n        PointStore pointStore = mock(PointStore.class);\n        float[] test = new float[] { 1.119f, 0f, -3.11f, 100f };\n        float[] copies = new float[] { 0, 17, 0, 0 };\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)\n                .centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).storeParent(true).dimension(4).build();\n        when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[0]).thenReturn(test)\n                .thenReturn(new float[62]).thenReturn(new float[4]).thenReturn(new float[17]).thenReturn(new float[4])\n                .thenReturn(new float[4]).thenReturn(new float[5]).thenReturn(copies).thenReturn(test)\n                .thenReturn(copies).thenReturn(copies).thenReturn(test);\n\n        // cannot have partial addition to empty tree\n        assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(0, 1));\n        // the following does not consume any points\n        tree.addPoint(0, 1);\n        // consumes from pointstore but gets 0 length vector\n        assertThrows(IllegalArgumentException.class, () -> tree.getPointSum(tree.getRoot()));\n        // passes, consumes pointstore\n        assertArrayEquals(tree.getPointSum(tree.getRoot()), test);\n        // sequel fails because dimension is 62\n        assertThrows(IllegalArgumentException.class, () -> tree.getBox(tree.root));\n        // in the sequel point is [0,0,0,0] fails because old point appears to have 17\n        // dimensions\n        assertThrows(IllegalArgumentException.class, () -> tree.addPoint(1, 1));\n        // this invocation succeeds, but points are same\n        tree.addPoint(1, 1);\n        assertTrue(tree.isLeaf(tree.getRoot()));\n        // dimension = 5\n        assertThrows(IllegalArgumentException.class, () -> tree.addPoint(2, 1));\n        // switch the vector\n        assertArrayEquals(tree.getPointSum(tree.getRoot()), new float[] { 0, 34, 0, 0 });\n        // adding test, consumes the copy\n        tree.addPoint(2, 1);\n        assertEquals(tree.getMass(), 3);\n        assertArrayEquals(tree.getPointSum(tree.getRoot()), new float[] { 1.119f, 34, -3.11f, 100 }, 1e-3f);\n\n        // bounding boxes are incorrect they are minvalues = test, maxvalues = test\n        assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct(tree.root));\n        assertTrue(tree.getCutDimension(tree.root) == 3);\n        // cut cannot be the same as right minvalue\n        tree.nodeStore.cutValue[tree.root] = 100;\n        assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct(tree.root));\n\n    }\n\n    @Test\n    public void testConfigPartialAdd() {\n        PointStore pointStore = mock(PointStore.class);\n        float[] test = new float[] { 1.119f, 0f, -3.11f, 100f };\n        float[] copies = new float[] { 0, 17, 0, 0 };\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore)\n                .centerOfMassEnabled(true).storeSequenceIndexesEnabled(true).storeParent(true).dimension(4).build();\n        when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[0]).thenReturn(test)\n                .thenReturn(new float[0]).thenReturn(test).thenReturn(new float[4]).thenReturn(new float[5])\n                .thenReturn(copies).thenReturn(test).thenReturn(copies).thenReturn(copies).thenReturn(test);\n\n        // the following does not consume any points\n        tree.addPoint(0, 1);\n        assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));\n        // fails at check of dimension of retrieved point\n        assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));\n        // fails at equality check\n        assertThrows(IllegalArgumentException.class, () -> tree.addPointToPartialTree(1, 1));\n    }\n\n    @Test\n    public void testCut() {\n        PointStore pointStore = mock(PointStore.class);\n        Random random = mock(Random.class);\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStore).random(random)\n                .storeSequenceIndexesEnabled(true).storeParent(true).dimension(1).build();\n        when(pointStore.getNumericVector(any(Integer.class))).thenReturn(new float[1]).thenReturn(new float[] { 1 })\n                .thenReturn(new float[] { 0 }).thenReturn(new float[] { 0 }).thenReturn(new float[] { 2 })\n                .thenReturn(new float[] { 1 }).thenReturn(new float[0]).thenReturn(new float[] { 2 })\n                .thenReturn(new float[] { 1 }).thenReturn(new float[1]);\n        // testing the cut assumptions -- the values should not be 1 or larger, but is\n        // useful for testing\n        when(random.nextDouble()).thenReturn(1.2).thenReturn(1.5).thenReturn(1.5).thenReturn(0.0);\n        // following does not query pointstore\n        tree.addPoint(0, 1);\n        // following tries to add [0.0], and discovers point index 0 is [1.0]\n        tree.addPoint(1, 1);\n        assertTrue(tree.getCutValue(tree.getRoot()) == (double) Math.nextAfter(1.0f, 0.0));\n\n        assertThrows(IllegalArgumentException.class, () -> tree.addPoint(1, 2)); // copy\n        tree.addPoint(1, 2); // passes\n        assertTrue(tree.getRoot() == 0);\n        assertTrue(tree.getCutValue(0) == (double) Math.nextAfter(1.0f, 0.0));\n        assertTrue(tree.getCutValue(1) == (double) Math.nextAfter(2.0f, 1.0));\n        assertFalse(tree.checkStrictlyContains(1, new float[] { 2 }));\n        assertTrue(tree.checkStrictlyContains(1, new float[] { 1.001f }));\n    }\n\n    /**\n     * Verify that the tree has the form described in the setUp method.\n     */\n    @Test\n    public void testInitialTreeState() {\n        int node = tree.getRoot();\n        // the second double[] is intentional\n        IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(1));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(), is(5));\n        assertArrayEquals(new double[] { -1, 2 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, -1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);\n        // testing inappropriate\n        assertThrows(IllegalArgumentException.class, () -> tree.getLeftChild(Integer.MAX_VALUE));\n        assertThrows(IllegalArgumentException.class, () -> tree.getRightChild(500));\n        assertThrows(IllegalArgumentException.class, () -> tree.getCutValue(-1));\n        assertThrows(IllegalArgumentException.class, () -> tree.getCutDimension(-1));\n        // pointIndex should have a value at least as large as number of leaves\n        assertThrows(IllegalArgumentException.class, () -> tree.getPointIndex(0));\n\n        NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;\n        assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);\n        node = tree.getRightChild(node);\n        expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new BoundingBox(new float[] { 1, 1 }));\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));\n        assertThat(tree.getMass(node), is(4));\n        assertArrayEquals(new double[] { 0.0, 3.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 1, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);\n\n        assert (nodeStoreSmall.getParentIndex(tree.getLeftChild(node)) == node);\n        node = tree.getLeftChild(node);\n        expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(node), is(3));\n        assertArrayEquals(new double[] { -1.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, 0 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 0, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(2));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(4L), 1);\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(5, 6));\n    }\n\n    @Test\n    public void testTreeMapper() {\n        RandomCutTreeMapper mapper = new RandomCutTreeMapper();\n        CompactRandomCutTreeState state = mapper.toState(tree);\n        CompactRandomCutTreeContext context = new CompactRandomCutTreeContext();\n        context.setPointStore(pointStoreFloat);\n        context.setDimension(tree.getDimension());\n        state.setDimensions(0);\n        RandomCutTree newTree = mapper.toModel(state, context);\n        assertEquals(newTree.getDimension(), 2);\n    }\n\n    @Test\n    public void treeTraversal() {\n        class DepthCounter implements MultiVisitor<Integer> {\n\n            int depth = 0;\n\n            DepthCounter(int num) {\n                depth = 0;\n            }\n\n            @Override\n            public boolean trigger(INodeView node) {\n                return true;\n            }\n\n            @Override\n            public MultiVisitor<Integer> newPartialCopy() {\n                return new DepthCounter(depth);\n            }\n\n            @Override\n            public void combine(MultiVisitor<Integer> other) {\n                depth = max(depth, other.getResult());\n            }\n\n            @Override\n            public void accept(INodeView node, int depthOfNode) {\n                validateInternalState(!isConverged(), \"error\");\n                depth++;\n            }\n\n            @Override\n            public Integer getResult() {\n                return depth;\n            }\n        }\n        MultiVisitorFactory<Integer> factory = new MultiVisitorFactory<>((tree, x) -> new DepthCounter(0));\n        assertEquals((int) tree.traverseMulti(new float[2], factory), 4);\n\n    }\n\n    @Test\n    public void testDeletePointWithLeafSibling() {\n        tree.deletePoint(2, 3);\n\n        // root node bounding box and cut remains unchanged, mass and centerOfMass are\n        // updated\n\n        int node = tree.getRoot();\n        IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(1));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(), is(4));\n\n        assertArrayEquals(new double[] { 0.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, -1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);\n        // sibling node moves up and bounding box recomputed\n\n        NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;\n        assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);\n        node = tree.getRightChild(node);\n        expectedBox = new BoundingBox(new float[] { 0, 1 }).getMergedBox(new float[] { 1, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));\n        assertThat(tree.getMass(node), is(3));\n        assertArrayEquals(new double[] { 1.0, 3.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { 0, 1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(2));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(4L), 1);\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(5L), 1);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 1, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);\n    }\n\n    @Test\n    public void testDeletePointWithNonLeafSibling() {\n        tree.deletePoint(1, 2);\n\n        // root node bounding box recomputed\n\n        int node = tree.getRoot();\n        IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 0, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(1));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(), is(4));\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, -1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);\n\n        // sibling node moves up and bounding box stays the same\n        NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;\n        assert (nodeStoreSmall.getParentIndex(tree.getRightChild(node)) == node);\n        node = tree.getRightChild(node);\n        expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, 0 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 0, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(2));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(4L), 1);\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);\n    }\n\n    @Test\n    public void testDeletePointWithMassGreaterThan1() {\n\n        assertTrue(tree.boundingBoxCacheFraction == 1.0);\n        tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.5);\n        assertTrue(tree.boundingBoxData != null);\n        assertTrue(tree.boundingBoxData.length == ((tree.numberOfLeaves - 1) / 2) * 4);\n        assertTrue(tree.rangeSumData != null);\n        assertTrue(tree.rangeSumData.length == (tree.numberOfLeaves - 1) / 2);\n\n        int root = tree.getRoot();\n        assertTrue(tree.checkStrictlyContains(root, new float[2]));\n\n        tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.0);\n        assertTrue(tree.boundingBoxData == null);\n        assertTrue(tree.rangeSumData == null);\n        assertFalse(tree.checkStrictlyContains(root, new float[2]));\n\n        tree.deletePoint(3, 4);\n        tree.setConfig(Config.BOUNDING_BOX_CACHE_FRACTION, 0.5);\n        assertTrue(tree.boundingBoxData != null);\n        assertTrue(tree.boundingBoxData.length == ((tree.numberOfLeaves - 1) / 2) * 4);\n        assertTrue(tree.rangeSumData != null);\n        assertTrue(tree.rangeSumData.length == (tree.numberOfLeaves - 1) / 2);\n\n        // same as initial state except mass at 0,1 is 1\n\n        int node = tree.getRoot();\n        IBoundingBoxView expectedBox = new BoundingBox(new float[] { -1, -1 }).getMergedBox(new float[] { 1, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(1));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(), is(4));\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, -1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);\n        assertArrayEquals(new double[] { -1.0, 1.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, -1 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(1L), 1);\n\n        node = tree.getRightChild(node);\n        expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 1, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(0.5, EPSILON));\n        assertThat(tree.getMass(node), is(3));\n        NodeView nodeView = new NodeView(tree, tree.pointStoreView, node);\n        assertTrue(nodeView.getCutDimension() == 0);\n        assertTrue(nodeView.getCutValue() == 0.5);\n\n        assertArrayEquals(new double[] { 0.0, 2.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 1, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(2L), 1);\n\n        NodeStoreSmall nodeStoreSmall = (NodeStoreSmall) tree.nodeStore;\n        assert (nodeStoreSmall.getParentIndex(tree.getLeftChild(node)) == node);\n        node = tree.getLeftChild(node);\n        expectedBox = new BoundingBox(new float[] { -1, 0 }).getMergedBox(new float[] { 0, 1 });\n        assertThat(tree.getBox(node), is(expectedBox));\n        assertEquals(expectedBox.toString(), tree.getBox(node).toString());\n        assertThat(tree.getCutDimension(node), is(0));\n        assertThat(tree.getCutValue(node), closeTo(-0.5, EPSILON));\n        assertThat(tree.getMass(), is(4));\n\n        assertArrayEquals(new double[] { -1.0, 1.0 }, toDoubleArray(tree.getPointSum(node)), EPSILON);\n\n        assertThat(tree.isLeaf(tree.getLeftChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getLeftChild(node))),\n                is(new float[] { -1, 0 }));\n        assertThat(tree.getMass(tree.getLeftChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getLeftChild(node))).get(3L), 1);\n\n        assertThat(tree.isLeaf(tree.getRightChild(node)), is(true));\n        assertThat(tree.pointStoreView.getNumericVector(tree.getPointIndex(tree.getRightChild(node))),\n                is(new float[] { 0, 1 }));\n        assertThat(tree.getMass(tree.getRightChild(node)), is(1));\n        assertEquals(tree.getSequenceMap(tree.getPointIndex(tree.getRightChild(node))).get(5L), 1);\n    }\n\n    @Test\n    public void testDeletePointInvalid() {\n        // specified sequence index does not exist\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(2, 99));\n\n        // point does not exist in tree\n        assertThrows(IllegalArgumentException.class, () -> tree.deletePoint(7, 3));\n    }\n\n    @Test\n    public void testUpdatesOnSmallBoundingBox() {\n        // verifies on small bounding boxes random cuts and tree updates are functional\n        PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(10).capacity(10).currentStoreCapacity(10)\n                .dimensions(1).build();\n        RandomCutTree tree = RandomCutTree.builder().random(rng).pointStoreView(pointStoreFloat).build();\n\n        List<Weighted<double[]>> points = new ArrayList<>();\n        points.add(new Weighted<>(new double[] { 48.08 }, 0, 1L));\n        points.add(new Weighted<>(new double[] { 48.08001 }, 0, 2L));\n\n        pointStoreFloat.add(toFloatArray(points.get(0).getValue()), 0);\n        pointStoreFloat.add(toFloatArray(points.get(1).getValue()), 1);\n        tree.addPoint(0, points.get(0).getSequenceIndex());\n        tree.addPoint(1, points.get(1).getSequenceIndex());\n        assertNotEquals(pointStoreFloat.getNumericVector(0)[0], pointStoreFloat.getNumericVector(1)[0]);\n\n        for (int i = 0; i < 10000; i++) {\n            Weighted<double[]> point = points.get(i % points.size());\n            tree.deletePoint(i % points.size(), point.getSequenceIndex());\n            tree.addPoint(i % points.size(), point.getSequenceIndex());\n        }\n    }\n\n    @Test\n    public void testfloat() {\n        float x = 110.13f;\n        double sum = 0;\n        int trials = 230000;\n        for (int i = 0; i < trials; i++) {\n            float z = (x * (trials - i + 1) - x);\n            sum += z;\n        }\n        System.out.println(sum);\n        for (int i = 0; i < trials - 1; i++) {\n            float z = (x * (trials - i + 1) - x);\n            sum -= z;\n        }\n        System.out.println(sum + \" \" + (double) x + \" \" + (sum <= (double) x));\n        float[] possible = new float[trials];\n        float[] alsoPossible = new float[trials];\n        for (int i = 0; i < trials; i++) {\n            possible[i] = x;\n            alsoPossible[i] = (trials - i + 1) * x;\n        }\n        BoundingBox box = new BoundingBox(possible, alsoPossible);\n        System.out.println(\"rangesum \" + box.getRangeSum());\n        double factor = 1.0 - 1e-16;\n        System.out.println(factor);\n        RandomCutTree tree = RandomCutTree.builder().dimension(trials).build();\n        // tries both path\n        tree.randomCut(factor, possible, box);\n        tree.randomCut(1.0 - 1e-17, possible, box);\n\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 100, 10000, 100000 })\n    void testNodeStore(int size) {\n        PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)\n                .dimensions(2).build();\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)\n                .capacity(size).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();\n        long seed = new Random().nextLong();\n        System.out.println(\"seed :\" + seed);\n        Random rng = new Random(seed);\n        for (int i = 0; i < 100; i++) {\n            pointStoreFloat.add(new double[] { rng.nextDouble(), rng.nextDouble() }, 0L);\n        }\n        ArrayList<Weighted<Integer>> list = new ArrayList<>();\n        for (int i = 0; i < 100; i++) {\n            tree.addPoint(i, 0L);\n            list.add(new Weighted<>(i, rng.nextFloat(), 0));\n        }\n        list.sort((o1, o2) -> Float.compare(o1.getWeight(), o2.getWeight()));\n        for (int i = 0; i < 50; i++) {\n            tree.deletePoint(list.remove(0).getValue(), 0L);\n        }\n\n        AbstractNodeStore nodeStore = tree.getNodeStore();\n\n        for (int i = 0; i < 25; i++) {\n            if (!tree.isLeaf(tree.getLeftChild(tree.getRoot()))) {\n                assert (nodeStore.getParentIndex(tree.getLeftChild(tree.getRoot())) == tree.root);\n            }\n            if (!tree.isLeaf(tree.getRightChild(tree.getRoot()))) {\n                assert (nodeStore.getParentIndex(tree.getRightChild(tree.getRoot())) == tree.root);\n            }\n            tree.deletePoint(list.remove(0).getValue(), 0L);\n        }\n    }\n\n    // spoofs the cut (using a changing box) to hit illegal state\n    @Test\n    public void cutTest1() {\n        BoundingBox box1 = mock(BoundingBox.class);\n        when(box1.getMinValue(anyInt())).thenReturn(0.0).thenReturn(0.0).thenReturn(1.0);\n        assertThrows(IllegalStateException.class, () -> tree.randomCut(1.2, new float[] { 1.0f }, box1));\n    }\n\n    // spoofs the cut (usina a changing box) to hit illegal state\n    @Test\n    public void cutTest2() {\n        BoundingBox box1 = mock(BoundingBox.class);\n        when(box1.getMinValue(anyInt())).thenReturn(0.0).thenReturn(0.0).thenReturn(1.0);\n        assertThrows(IllegalStateException.class, () -> tree.randomCut(1.5, new float[] { 1.0f }, box1));\n    }\n\n    @Test\n    public void cutTestMultiD() {\n        float[] point = new float[2];\n        float[] newPoint = new float[] { 0.1f + new Random().nextFloat(), 0.1f + new Random().nextFloat() };\n        float[] testPoint = new float[] { point[0], newPoint[1] };\n        float[] testPoint2 = new float[] { newPoint[0], point[1] };\n        BoundingBox box1 = new BoundingBox(point, point);\n        BoundingBox box2 = new BoundingBox(newPoint, newPoint);\n\n        assertThrows(IllegalArgumentException.class, () -> tree.randomCut(new Random().nextDouble(), point, box1));\n        assertDoesNotThrow(() -> tree.randomCut(new Random().nextDouble(), point, box2));\n        assertDoesNotThrow(() -> tree.randomCut(new Random().nextDouble(), newPoint, box1));\n\n        Cut cut1 = tree.randomCut(0, new float[] { 0, 1.0f }, box1);\n        // first dimension is identical\n        assertTrue(cut1.getDimension() == 1);\n        assertTrue(cut1.getValue() == 0f);\n        assertEquals(cut1.toString(), \"Cut(1, 0.000000)\");\n\n        Cut cut2 = tree.randomCut(1.2, point, box2);\n        assertTrue(cut2.getDimension() == 0);\n        assertTrue(cut2.getValue() == Math.nextAfter(newPoint[0], point[0]));\n        Cut largeCut = tree.randomCut(1.2, newPoint, box1);\n        assertTrue(largeCut.getDimension() == 0);\n        assertTrue(largeCut.getValue() == Math.nextAfter(newPoint[0], point[0]));\n        Cut testCut = tree.randomCut(1.2, testPoint, box2);\n        assertTrue(testCut.getDimension() == 0);\n        assertTrue(testCut.getValue() == Math.nextAfter(newPoint[0], testPoint[0]));\n        Cut testCut2 = tree.randomCut(1.2, testPoint2, box2);\n        assertTrue(testCut2.getDimension() == 1);\n        assertTrue(testCut2.getValue() == Math.nextAfter(newPoint[1], point[1]));\n\n        Cut another = tree.randomCut(1.5, point, box2);\n        assertTrue(another.getDimension() == 1);\n        assertTrue(another.getValue() == Math.nextAfter(newPoint[1], point[1]));\n        Cut anotherLargeCut = tree.randomCut(1.5, newPoint, box1);\n        assertTrue(anotherLargeCut.getDimension() == 1);\n        assertTrue(anotherLargeCut.getValue() == Math.nextAfter(newPoint[1], point[1]));\n        Cut anotherTestCut = tree.randomCut(1.5, testPoint, box1);\n        assertTrue(testCut.getDimension() == 0);\n        assertTrue(testCut.getValue() == Math.nextAfter(newPoint[0], point[0]));\n        Cut anotherTestCut2 = tree.randomCut(1.5, testPoint2, box1);\n        assertTrue(testCut2.getDimension() == 1);\n        assertTrue(testCut2.getValue() == Math.nextAfter(newPoint[1], point[1]));\n\n    }\n\n    // the following are tested directly since they are unreachable\n    @Test\n    public void traverseTest() {\n        PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)\n                .dimensions(2).build();\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)\n                .capacity(188).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();\n        assertDoesNotThrow(() -> tree.validateAndReconstruct());\n        assertThrows(IllegalArgumentException.class, () -> tree.traverse(null, null));\n        assertThrows(IllegalArgumentException.class, () -> tree.traverseMulti(null, null));\n    }\n\n    @Test\n    public void invalidNodeTest() {\n        PointStore pointStoreFloat = new PointStore.Builder().indexCapacity(100).capacity(100).initialSize(100)\n                .dimensions(2).build();\n        tree = RandomCutTree.builder().random(rng).centerOfMassEnabled(true).pointStoreView(pointStoreFloat)\n                .capacity(188).storeSequenceIndexesEnabled(true).storeParent(true).dimension(2).build();\n        tree.root = 187;\n        assertThrows(IllegalStateException.class, () -> tree.validateAndReconstruct());\n        assertThrows(IllegalStateException.class,\n                () -> tree.traversePathToLeafAndVisitNodes(null, null, null, tree.root, 0));\n        assertThrows(IllegalStateException.class, () -> tree.traverseTreeMulti(null, null, null, tree.root, 0));\n\n        assertThrows(IllegalStateException.class, () -> tree.growNodeBox(null, pointStoreFloat, 0, 187));\n        assertThrows(IllegalStateException.class, () -> tree.getBox(187));\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/util/ArrayPackingTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.util.ArrayPacking.pack;\nimport static com.amazon.randomcutforest.util.ArrayPacking.unpackDoubles;\nimport static com.amazon.randomcutforest.util.ArrayPacking.unpackFloats;\nimport static com.amazon.randomcutforest.util.ArrayPacking.unpackInts;\nimport static com.amazon.randomcutforest.util.ArrayPacking.unpackShorts;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\npublic class ArrayPackingTest {\n    private Random rng;\n\n    @BeforeEach\n    public void setUp() {\n        rng = new Random();\n        ArrayPacking arrayPacking = new ArrayPacking();\n    }\n\n    @Test\n    public void testLogMax() {\n        long[] bases = new long[] { 2, 101, 3_456_789 };\n        Arrays.stream(bases).forEach(base -> {\n            int log = ArrayPacking.logMax(base);\n            assertTrue(Math.pow(base, log + 1) >= Integer.MAX_VALUE);\n            assertTrue(Math.pow(base, log) < Integer.MAX_VALUE);\n        });\n    }\n\n    @Test\n    public void testLogMaxInvalid() {\n        assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(1));\n        assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(0));\n        assertThrows(IllegalArgumentException.class, () -> ArrayPacking.logMax(-123467890));\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 11, 100 })\n    public void testIntsPackRoundTrip(int inputLength) {\n        int[] inputArray = rng.ints().limit(inputLength).toArray();\n        assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, false), false));\n        assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, true), true));\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 17, 100 })\n    public void testShortsPackRoundTrip(int inputLength) {\n        short[] inputArray = new short[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = (short) (rng.nextInt() % 100);\n        }\n        assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, false), false));\n        assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, true), true));\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 11, 100 })\n    public void testIdenticalInts(int inputLength) {\n        int[] inputArray = new int[inputLength];\n        Arrays.fill(inputArray, rng.nextInt());\n        assertArrayEquals(inputArray, ArrayPacking.unpackInts(ArrayPacking.pack(inputArray, false), false));\n        int[] result = ArrayPacking.pack(inputArray, true);\n        assertTrue(result.length == 3 || inputLength < 3 && result.length == inputLength);\n        assertArrayEquals(inputArray, ArrayPacking.unpackInts(result, true));\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 17, 100 })\n    public void testIdenticalShorts(int inputLength) {\n        short item = (short) (rng.nextInt() % 100);\n        short[] inputArray = new short[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = item;\n        }\n        assertArrayEquals(inputArray, ArrayPacking.unpackShorts(ArrayPacking.pack(inputArray, false), false));\n        int[] result = ArrayPacking.pack(inputArray, true);\n        assertTrue(result.length == 3 || inputLength < 3 && result.length == inputLength);\n        assertArrayEquals(inputArray, ArrayPacking.unpackShorts(result, true));\n    }\n\n    @Test\n    public void testUnpackIntsWithLengthGiven() {\n        int inputLength = 100;\n        int[] inputArray = rng.ints().limit(inputLength).toArray();\n\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 1, false));\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 1, true));\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -1, false));\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -1, true));\n        assertDoesNotThrow(() -> pack(inputArray, 0, true));\n        assertDoesNotThrow(() -> pack(inputArray, 0, false));\n\n        int[] uncompressed = ArrayPacking.pack(inputArray, false);\n        int[] compressed = ArrayPacking.pack(inputArray, true);\n\n        int[] result = ArrayPacking.unpackInts(uncompressed, 50, false);\n        assertThrows(IllegalArgumentException.class, () -> unpackInts(compressed, -1, true));\n        assertEquals(50, result.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, 50), result);\n\n        result = ArrayPacking.unpackInts(compressed, 50, true);\n        assertEquals(50, result.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, 50), result);\n\n        result = ArrayPacking.unpackInts(uncompressed, 200, false);\n        assertEquals(200, result.length);\n        assertArrayEquals(inputArray, Arrays.copyOf(result, 100));\n        for (int i = 100; i < 200; i++) {\n            assertEquals(0, result[i]);\n        }\n\n        result = ArrayPacking.unpackInts(compressed, 200, true);\n        assertEquals(200, result.length);\n        assertArrayEquals(inputArray, Arrays.copyOf(result, 100));\n        for (int i = 100; i < 200; i++) {\n            assertEquals(0, result[i]);\n        }\n    }\n\n    @Test\n    public void testUnpackShortsWithLengthGiven() {\n        int inputLength = 100;\n        short[] inputArray = new short[50];\n        Arrays.fill(inputArray, (short) 2);\n        short[] test = new short[2];\n        short[] test2 = new short[3];\n        int[] uncompressed = ArrayPacking.pack(inputArray, false);\n        int[] compressed = ArrayPacking.pack(inputArray, true);\n\n        assertArrayEquals(test, unpackShorts(new int[2], true));\n        assertArrayEquals(test, unpackShorts(new int[2], false));\n        assertArrayEquals(test2, unpackShorts(new int[3], false));\n\n        assertThrows(IllegalArgumentException.class, () -> unpackShorts(uncompressed, -1, false));\n\n        short[] result = ArrayPacking.unpackShorts(uncompressed, 50, false);\n        assertEquals(50, result.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, 50), result);\n\n        result = ArrayPacking.unpackShorts(compressed, 100, true);\n        assertEquals(100, result.length);\n        for (int y = 0; y < 50; y++) {\n            assertTrue(result[y] == 2);\n        }\n        for (int y = 50; y < 100; y++) {\n            assertTrue(result[y] == 0);\n        }\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 17, 100 })\n    public void testPackDoublesRoundTrip(int inputLength) {\n        double[] inputArray = rng.doubles().limit(inputLength).toArray();\n        assertArrayEquals(inputArray, ArrayPacking.unpackDoubles(ArrayPacking.pack(inputArray)));\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 0, 1, 2, 3, 5, 100 })\n    public void testPackFloatsRoundTrip(int inputLength) {\n        float[] inputArray = new float[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = rng.nextFloat();\n        }\n        assertArrayEquals(inputArray, unpackFloats(ArrayPacking.pack(inputArray)));\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    public void testPackShortsWithLength(boolean compress) {\n        int inputLength = 100;\n        int packLength = 76;\n        short[] inputArray = new short[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = (short) (rng.nextInt() % 100);\n        }\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 10, compress));\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, -10, compress));\n        int[] array = ArrayPacking.pack(inputArray, packLength, compress);\n        short[] outputArray = ArrayPacking.unpackShorts(array, compress);\n\n        assertEquals(packLength, outputArray.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);\n    }\n\n    @Test\n    public void testPackDoublesWithLength() {\n        int inputLength = 100;\n        int packLength = 76;\n        double[] inputArray = rng.doubles().limit(inputLength).toArray();\n        byte[] bytes = ArrayPacking.pack(inputArray, packLength);\n        double[] outputArray = ArrayPacking.unpackDoubles(bytes);\n\n        assertEquals(packLength, outputArray.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);\n        assertDoesNotThrow(() -> pack(new double[0], 0));\n        assertThrows(IllegalArgumentException.class, () -> pack(new double[10], 11));\n        assertThrows(IllegalArgumentException.class, () -> pack(new double[10], -1));\n    }\n\n    @Test\n    public void testPackFloatsWithLength() {\n        int inputLength = 100;\n        int packLength = 76;\n        float[] inputArray = new float[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = rng.nextFloat();\n        }\n        byte[] bytes = ArrayPacking.pack(inputArray, packLength);\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, inputLength + 10));\n        float[] outputArray = unpackFloats(bytes);\n\n        assertEquals(packLength, outputArray.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, packLength), outputArray);\n        assertDoesNotThrow(() -> pack(new float[0], 0));\n        assertThrows(IllegalArgumentException.class, () -> pack(new float[10], -1));\n    }\n\n    @Test\n    public void testUnpackDoublesWithLength() {\n        int inputLength = 100;\n        double[] inputArray = rng.doubles().limit(inputLength).toArray();\n        byte[] bytes = ArrayPacking.pack(inputArray);\n\n        int unpackLength1 = 25;\n        double[] outputArray1 = ArrayPacking.unpackDoubles(bytes, unpackLength1);\n        assertEquals(unpackLength1, outputArray1.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, unpackLength1), outputArray1);\n\n        int unpackLength2 = 123;\n        assertThrows(IllegalArgumentException.class, () -> pack(inputArray, unpackLength2));\n        double[] outputArray2 = ArrayPacking.unpackDoubles(bytes, unpackLength2);\n        assertEquals(unpackLength2, outputArray2.length);\n        assertArrayEquals(inputArray, Arrays.copyOf(outputArray2, inputLength));\n        for (int i = inputLength; i < unpackLength2; i++) {\n            assertEquals(0.0, outputArray2[i]);\n        }\n    }\n\n    @Test\n    public void testUnpackFloatWithLength() {\n        int inputLength = 100;\n        float[] inputArray = new float[inputLength];\n        for (int i = 0; i < inputLength; i++) {\n            inputArray[i] = rng.nextFloat();\n        }\n        byte[] bytes = ArrayPacking.pack(inputArray);\n\n        int unpackLength1 = 25;\n        float[] outputArray1 = unpackFloats(bytes, unpackLength1);\n        assertEquals(unpackLength1, outputArray1.length);\n        assertArrayEquals(Arrays.copyOf(inputArray, unpackLength1), outputArray1);\n\n        int unpackLength2 = 123;\n        float[] outputArray2 = unpackFloats(bytes, unpackLength2);\n        assertEquals(unpackLength2, outputArray2.length);\n        assertArrayEquals(inputArray, Arrays.copyOf(outputArray2, inputLength));\n        for (int i = inputLength; i < unpackLength2; i++) {\n            assertEquals(0.0, outputArray2[i]);\n        }\n    }\n\n    @Test\n    public void testConfig() {\n        byte[] array = new byte[1];\n        assertThrows(IllegalArgumentException.class, () -> unpackFloats(array, 1));\n        assertThrows(IllegalArgumentException.class, () -> unpackDoubles(array, 1));\n\n        byte[] newArray = new byte[Double.BYTES];\n        assertDoesNotThrow(() -> unpackDoubles(newArray, 1));\n        assertDoesNotThrow(() -> unpackFloats(newArray, 1));\n        assertThrows(IllegalArgumentException.class, () -> unpackFloats(newArray, -1));\n        assertThrows(IllegalArgumentException.class, () -> unpackDoubles(newArray, -1));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/util/ArrayUtilsTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArrayNullable;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArrayNullable;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotSame;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.CsvSource;\n\npublic class ArrayUtilsTest {\n\n    ArrayUtils utils = new ArrayUtils();\n\n    @ParameterizedTest\n    @CsvSource({ \"-0.0,0.0\", \"0.0,0.0\", \"-0.0:0.0:1.0,0.0:0.0:1.0\" })\n    public void cleanCopy(String input, String expected) {\n        double[] inputArray = array(input);\n        double[] cleanCopy = ArrayUtils.cleanCopy(inputArray);\n        assertNotSame(inputArray, cleanCopy);\n        assertArrayEquals(array(expected), cleanCopy);\n    }\n\n    private double[] array(String arrayString) {\n        return Arrays.stream(arrayString.split(\":\")).mapToDouble(Double::valueOf).toArray();\n    }\n\n    @Test\n    void testNullable() {\n        assertNull(toDoubleArrayNullable(null));\n        assertNull(toFloatArrayNullable(null));\n        float random = new Random().nextFloat();\n        assertArrayEquals(toFloatArrayNullable(new double[] { random }), toFloatArray(new double[] { random }));\n        assertArrayEquals(toDoubleArrayNullable(new float[] { random }), toDoubleArray(new float[] { random }));\n        assertThrows(NullPointerException.class, () -> toDoubleArray(null));\n        assertThrows(NullPointerException.class, () -> toFloatArray(null));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/util/ShingleBuilderTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class ShingleBuilderTest {\n\n    private int dimensions;\n    private int shingleSize;\n    private ShingleBuilder builder;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 2;\n        shingleSize = 3;\n        builder = new ShingleBuilder(dimensions, shingleSize);\n    }\n\n    @Test\n    public void testNew() {\n        assertEquals(dimensions, builder.getInputPointSize());\n        assertEquals(dimensions * shingleSize, builder.getShingledPointSize());\n        assertFalse(builder.isCyclic());\n    }\n\n    @Test\n    public void testNewWithInvalidArguments() {\n        assertThrows(IllegalArgumentException.class, () -> new ShingleBuilder(0, shingleSize));\n        assertThrows(IllegalArgumentException.class, () -> new ShingleBuilder(dimensions, 0));\n    }\n\n    @Test\n    public void testAddPoint() {\n        double[] shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, shingle);\n\n        builder.addPoint(new double[] { 9, 10 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 0, 0, 0, 0, 9, 10 }, shingle);\n\n        builder.addPoint(new double[] { 7, 8 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 0, 0, 9, 10, 7, 8 }, shingle);\n\n        builder.addPoint(new double[] { 5, 6 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 9, 10, 7, 8, 5, 6 }, shingle);\n\n        builder.addPoint(new double[] { 3, 4 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 7, 8, 5, 6, 3, 4 }, shingle);\n    }\n\n    @Test\n    public void testAddPointCyclic() {\n        builder = new ShingleBuilder(dimensions, shingleSize, true);\n        double[] shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, shingle);\n\n        builder.addPoint(new double[] { 9, 10 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 9, 10, 0, 0, 0, 0 }, shingle);\n\n        builder.addPoint(new double[] { 7, 8 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 9, 10, 7, 8, 0, 0 }, shingle);\n\n        builder.addPoint(new double[] { 5, 6 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 9, 10, 7, 8, 5, 6 }, shingle);\n\n        builder.addPoint(new double[] { 3, 4 });\n        shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 3, 4, 7, 8, 5, 6 }, shingle);\n    }\n\n    @Test\n    public void testAddPointWithInvalidArguments() {\n        assertThrows(NullPointerException.class, () -> builder.addPoint(null));\n\n        double[] point = new double[9]; // wrong size of array\n        assertThrows(IllegalArgumentException.class, () -> builder.addPoint(point));\n    }\n\n    @Test\n    public void testShingleCopy() {\n        double[] buffer = new double[dimensions * shingleSize];\n\n        builder.addPoint(new double[] { 2, 1 });\n        builder.addPoint(new double[] { 4, 3 });\n        builder.addPoint(new double[] { 6, 5 });\n\n        double[] shingle = builder.getShingle();\n        assertArrayEquals(new double[] { 2, 1, 4, 3, 6, 5 }, shingle);\n        assertArrayEquals(new double[] { 0, 0, 0, 0, 0, 0 }, buffer);\n\n        builder.getShingle(buffer);\n        assertArrayEquals(shingle, buffer);\n\n        buffer[0] = 0;\n        assertEquals(2, shingle[0]);\n    }\n\n    @Test\n    public void testGetShingleWithInvalidArguments() {\n        assertThrows(NullPointerException.class, () -> builder.getShingle(null));\n\n        double[] buffer = new double[2]; // wrong size of array\n        assertThrows(IllegalArgumentException.class, () -> builder.getShingle(buffer));\n    }\n}\n"
  },
  {
    "path": "Java/core/src/test/java/com/amazon/randomcutforest/util/WeightedTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.util;\n\nimport static com.amazon.randomcutforest.util.Weighted.createSample;\nimport static com.amazon.randomcutforest.util.Weighted.prefixPick;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\npublic class WeightedTest {\n    private Random rng;\n    int size = 10000;\n    int heavyIndex;\n    ArrayList<Weighted<Integer>> list = new ArrayList<>();\n\n    @BeforeEach\n    public void setUp() {\n        rng = new Random();\n        list = new ArrayList<>();\n        for (int i = 0; i < size; i++) {\n            list.add(new Weighted<>(i, (float) (0.1 + rng.nextDouble())));\n        }\n        heavyIndex = size + 7;\n        list.add(new Weighted<>(heavyIndex, size));\n    }\n\n    @Test\n    public void testCreateSample() {\n        // forcedSample 0 will return a null list\n        assertTrue(createSample(list, 0, 10, 0, 1.0).size() == 0);\n        // the following should add the last item first\n        List<Weighted<Integer>> sampledList = createSample(list, 0, 10, 0.1, 1.0);\n        assertTrue(sampledList.size() > 0);\n        assertTrue(sampledList.get(0).index == heavyIndex);\n        assertTrue(sampledList.get(0).weight == (float) size);\n    }\n\n    @Test\n    public void testPrefixPick() {\n        double total = list.stream().mapToDouble(e -> e.weight).sum();\n        assertTrue(total < 2 * size);\n\n        Weighted<Integer> item = prefixPick(list, size / 3.0);\n        assertTrue(item.index < size);\n        assertTrue(item.weight <= 1.1);\n\n        // should be the last element\n        Weighted<Integer> heavyItem = prefixPick(list, 3.0 * size / 4);\n        assertTrue(heavyItem.index == heavyIndex);\n        assertTrue(heavyItem.weight == (float) size);\n\n        // checking extreme weights\n        heavyItem = prefixPick(list, 2 * size);\n        assertTrue(heavyItem.index == heavyIndex);\n        assertTrue(heavyItem.weight == (float) size);\n    }\n\n    @Test\n    public void emptyList() {\n        List<Weighted<Integer>> list = new ArrayList<>();\n        assertThrows(IllegalArgumentException.class, () -> prefixPick(list, 1.0f));\n    }\n\n}\n"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/Preprocessor_1.json",
    "content": "{\n  \"version\": \"2.1\",\n  \"useImputedFraction\": 0.5,\n  \"imputationMethod\": \"PREVIOUS\",\n  \"forestMode\": \"STANDARD\",\n  \"transformMethod\": \"NONE\",\n  \"weights\": [\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0\n  ],\n  \"lastShingledPoint\": [\n    0.0,\n    79.5,\n    83.0,\n    76.0,\n    0.0,\n    94.0,\n    94.0,\n    94.0,\n    0.0,\n    85.0,\n    85.0,\n    85.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0,\n    0.0,\n    97.0,\n    97.0,\n    97.0,\n    0.0,\n    66.0,\n    66.0,\n    66.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0\n  ],\n  \"lastShingledInput\": [\n    0.0,\n    79.5,\n    83.0,\n    76.0,\n    0.0,\n    94.0,\n    94.0,\n    94.0,\n    0.0,\n    85.0,\n    85.0,\n    85.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0,\n    0.0,\n    97.0,\n    97.0,\n    97.0,\n    0.0,\n    66.0,\n    66.0,\n    66.0,\n    0.0,\n    53.0,\n    53.0,\n    53.0\n  ],\n  \"timeDecay\": 0.0,\n  \"startNormalization\": 10,\n  \"stopNormalization\": 2147483647,\n  \"shingleSize\": 8,\n  \"dimensions\": 32,\n  \"inputLength\": 32,\n  \"clipFactor\": 10.0,\n  \"normalizeTime\": false,\n  \"previousTimeStamps\": [\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0\n  ],\n  \"valuesSeen\": 1257,\n  \"internalTimeStamp\": 1257,\n  \"dataQualityState\": {\n    \"discount\": 1.0e-4,\n    \"weight\": 629.4992050874407,\n    \"sumSquared\": 629.4992050874407,\n    \"sum\": 629.4992050874407,\n    \"count\": 1257\n  },\n  \"timeStampDeviationState\": {\n    \"discount\": 1.0e-4,\n    \"weight\": 629.4992050874407,\n    \"sumSquared\": 0.0,\n    \"sum\": 0.0,\n    \"count\": 1257\n  }\n}\n\n"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/Preprocessor_2.json",
    "content": "{\n  \"version\": \"2.1\",\n  \"useImputedFraction\": 0.5,\n  \"imputationMethod\": \"PREVIOUS\",\n  \"forestMode\": \"STANDARD\",\n  \"transformMethod\": \"NONE\",\n  \"weights\": [\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0,\n    1.0\n  ],\n  \"lastShingledPoint\": [\n    0.0,\n    81.1,\n    98.0,\n    54.0,\n    0.0,\n    72.875,\n    92.0,\n    53.0,\n    0.0,\n    64.3,\n    84.0,\n    50.0,\n    0.0,\n    84.9,\n    97.0,\n    55.0,\n    0.0,\n    72.11111111111111,\n    92.0,\n    51.0,\n    0.0,\n    70.36363636363636,\n    84.0,\n    51.0,\n    0.0,\n    85.66666666666667,\n    96.0,\n    65.0,\n    0.0,\n    75.875,\n    97.0,\n    53.0\n  ],\n  \"lastShingledInput\": [\n    0.0,\n    81.1,\n    98.0,\n    54.0,\n    0.0,\n    72.875,\n    92.0,\n    53.0,\n    0.0,\n    64.3,\n    84.0,\n    50.0,\n    0.0,\n    84.9,\n    97.0,\n    55.0,\n    0.0,\n    72.11111111111111,\n    92.0,\n    51.0,\n    0.0,\n    70.36363636363636,\n    84.0,\n    51.0,\n    0.0,\n    85.66666666666667,\n    96.0,\n    65.0,\n    0.0,\n    75.875,\n    97.0,\n    53.0\n  ],\n  \"timeDecay\": 0.0,\n  \"startNormalization\": 10,\n  \"stopNormalization\": 2147483647,\n  \"shingleSize\": 8,\n  \"dimensions\": 32,\n  \"inputLength\": 32,\n  \"clipFactor\": 10.0,\n  \"normalizeTime\": false,\n  \"previousTimeStamps\": [\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0\n  ],\n  \"valuesSeen\": 505,\n  \"internalTimeStamp\": 505,\n  \"dataQualityState\": {\n    \"discount\": 1.0E-4,\n    \"weight\": 253.49802371541492,\n    \"sumSquared\": 253.49802371541492,\n    \"sum\": 253.49802371541492,\n    \"count\": 505\n  },\n  \"timeStampDeviationState\": {\n    \"discount\": 1.0E-4,\n    \"weight\": 253.49802371541492,\n    \"sumSquared\": 0.0,\n    \"sum\": 0.0,\n    \"count\": 505\n  }\n}\n"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/Preprocessor_3.json",
    "content": "    {\n      \"version\": \"2.1\",\n      \"useImputedFraction\": 0.5,\n      \"imputationMethod\": \"PREVIOUS\",\n      \"forestMode\": \"STANDARD\",\n      \"transformMethod\": \"NONE\",\n      \"weights\": [1.0, 1.0],\n      \"lastShingledPoint\": [81.0, 82.0, 83.0, 84.0, 86.0, 87.0, 88.0, 88.0],\n      \"lastShingledInput\": [81.0, 82.0, 83.0, 84.0, 86.0, 87.0, 88.0, 88.0],\n      \"timeDecay\": 0.0,\n      \"startNormalization\": 10,\n      \"stopNormalization\": 2147483647,\n      \"shingleSize\": 8,\n      \"dimensions\": 8,\n      \"inputLength\": 1,\n      \"clipFactor\": 10.0,\n      \"normalizeTime\": false,\n      \"previousTimeStamps\": [0, 0, 0, 0, 0, 0, 0, 0],\n      \"valuesSeen\": 1502,\n      \"internalTimeStamp\": 1502,\n      \"dataQualityState\": {\n        \"discount\": 1.0e-4,\n        \"weight\": 751.9993346640052,\n        \"sumSquared\": 751.9993346640052,\n        \"sum\": 751.9993346640052,\n        \"count\": 1502\n      },\n      \"timeStampDeviationState\": {\n        \"discount\": 1.0e-4,\n        \"weight\": 751.9993346640052,\n        \"sumSquared\": 0.0,\n        \"sum\": 0.0,\n        \"count\": 1502\n      }\n    }\n"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/state_1.json",
    "content": "{\n  \"version\": \"2.0\",\n  \"totalUpdates\": 1257,\n  \"timeDecay\": 1.0e-4,\n  \"numberOfTrees\": 30,\n  \"sampleSize\": 256,\n  \"shingleSize\": 8,\n  \"dimensions\": 32,\n  \"outputAfter\": 32,\n  \"compressed\": true,\n  \"partialTreeState\": true,\n  \"boundingBoxCacheFraction\": 0.0,\n  \"storeSequenceIndexesEnabled\": false,\n  \"compact\": true,\n  \"internalShinglingEnabled\": false,\n  \"centerOfMassEnabled\": false,\n  \"precision\": \"FLOAT_32\",\n  \"pointStoreState\": {\n    \"version\": \"2.0\",\n    \"dimensions\": 32,\n    \"capacity\": 7681,\n    \"shingleSize\": 8,\n    \"precision\": \"FLOAT_32\",\n    \"startOfFreeSegment\": 9536,\n    \"pointData\": [\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -65,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -71,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      114,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      114,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      122,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      122,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      102,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      102,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -59,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -59,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -87,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -87,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      110,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      106,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -81,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -73,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -87,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -87,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      -56,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0\n    ],\n    \"compressed\": true,\n    \"refCount\": [\n      1,\n      14,\n      1257,\n      890731498,\n      374221668,\n      253604528,\n      650321227,\n      793411785,\n      959695834,\n      259937023,\n      679318644,\n      560624350,\n      815504289,\n      437912711,\n      537835778,\n      656352711,\n      768154046,\n      447744646,\n      816151886,\n      237848008,\n      347168801,\n      349292853,\n      643946593,\n      890679179,\n      364428614,\n      687504757,\n      484396673,\n      674678697,\n      476681853,\n      1110239934,\n      235302022,\n      575603917,\n      566644786,\n      350555497,\n      454130325,\n      492737986,\n      446444997,\n      478864503,\n      558547701,\n      1011233667,\n      463355641,\n      568763853,\n      364277821,\n      660081620,\n      808048492,\n      659541431,\n      906453230,\n      560775240,\n      339423827,\n      786043959,\n      455161553,\n      772974277,\n      788075462,\n      341332677,\n      702531530,\n      356746157,\n      454052692,\n      793952200,\n      538535356,\n      334118729,\n      454585985,\n      454064120,\n      342254895,\n      680513489,\n      686521829,\n      678482664,\n      432537945,\n      778498523,\n      483737874,\n      666644502,\n      664463976,\n      364815644,\n      885388692,\n      438493209,\n      352097009,\n      476601308,\n      862366586,\n      688037103,\n      372255050,\n      258559718,\n      891383955,\n      553680254,\n      484635160,\n      448710446,\n      348409645,\n      761313087,\n      364852355,\n      598146072,\n      657273937,\n      334802373,\n      578296323,\n      145926973,\n      465002236,\n      787083911,\n      673562882,\n      703183826,\n      764298562,\n      357319020,\n      795686981,\n      884469075,\n      778023464,\n      891458028,\n      273912611,\n      693964365,\n      891845519,\n      363120831,\n      326551167,\n      380495310,\n      1018352344,\n      454048928,\n      687441502,\n      348330462,\n      915001195,\n      657968130,\n      808534764,\n      371300136,\n      798293151,\n      853725099,\n      250991796,\n      454124785,\n      694085102,\n      656854093,\n      464425577,\n      437380865,\n      787161091,\n      665457119,\n      544981452,\n      673071948,\n      989228900,\n      664795999,\n      981826014,\n      988842809,\n      686001116,\n      470800728,\n      591365799,\n      439109097,\n      679934225,\n      130357591,\n      667728580,\n      762920903,\n      988255765,\n      776717853,\n      692413349,\n      242183545,\n      342224473,\n      251019601,\n      267538542,\n      658033846,\n      574103676,\n      462696983,\n      265889949,\n      546167198,\n      236321922,\n      462249978,\n      1011933754,\n      483203552,\n      566188034,\n      597644660,\n      846903695,\n      1064483747,\n      5\n    ],\n    \"directLocationMap\": false,\n    \"locationList\": [\n      0,\n      9504,\n      1257,\n      38020,\n      114068,\n      456284,\n      798472,\n      874548,\n      950596,\n      1026644,\n      1102692,\n      1178740,\n      1254788,\n      1330836,\n      1673024,\n      1749100,\n      1825148,\n      1901196,\n      1977244,\n      2053292,\n      2129340,\n      2205388,\n      2281436,\n      2357484,\n      2433532,\n      2509580,\n      2585628,\n      2927844,\n      3003892,\n      3079940,\n      3155988,\n      3498204,\n      3574252,\n      3650300,\n      3726348,\n      3802396,\n      3878444,\n      3954492,\n      4030540,\n      4106588,\n      4182636,\n      4258684,\n      4600900,\n      4676948,\n      4752996,\n      4829044,\n      5171232,\n      5513476,\n      5589524,\n      5665572,\n      6007788,\n      6083836,\n      6159884,\n      6235932,\n      6311980,\n      6388028,\n      6730216,\n      6806292,\n      6882340,\n      6958388,\n      7034436,\n      7376624,\n      7452700,\n      7528748,\n      7870964,\n      8213152,\n      8555396,\n      8631444,\n      8707492,\n      8783540,\n      8859588,\n      8935636,\n      9011684,\n      9353872,\n      9429948,\n      9505996,\n      9582044,\n      9658092,\n      9734140,\n      9810188,\n      9886236,\n      10228424,\n      10304500,\n      10646688,\n      10722764,\n      10798812,\n      11141028,\n      11217076,\n      11293124,\n      11369172,\n      11445220,\n      11521268,\n      11597316,\n      11673364,\n      11749412,\n      11825460,\n      11901508,\n      11977556,\n      12319744,\n      12395820,\n      12471868,\n      12547916,\n      12623964,\n      12700012,\n      12776060,\n      12852108,\n      12928156,\n      13270344,\n      13346420,\n      13688636,\n      13764684,\n      13840732,\n      13916780,\n      13992828,\n      14068876,\n      14144924,\n      14220972,\n      14563188,\n      14639236,\n      14715284,\n      14791332,\n      15133548,\n      15209596,\n      15285644,\n      15361692,\n      15437740,\n      15513788,\n      15589836,\n      15932052,\n      16274268,\n      16882624,\n      16958700,\n      17300916,\n      17376964,\n      17453012,\n      17529060,\n      17605108,\n      17681156,\n      17757204,\n      17833252,\n      17909300,\n      17985348,\n      18061396,\n      18137444,\n      18213492,\n      18555708,\n      19164064,\n      19240140,\n      19316188,\n      19392236,\n      19734452,\n      19810500,\n      19886548,\n      19962596,\n      20038644,\n      20114692,\n      20190740,\n      20266788,\n      20875144,\n      20951220,\n      21027268,\n      21103316,\n      21179364,\n      21255412,\n      21331460,\n      21673648,\n      21749724,\n      21825772,\n      21901820,\n      21977868,\n      22053916,\n      22129964,\n      22206012,\n      22282060,\n      22358108,\n      22700296,\n      22776372,\n      22852420,\n      22928468,\n      23004516,\n      23612872,\n      23688948,\n      23764996,\n      23841044,\n      23917092,\n      24259280,\n      24335356,\n      24677544,\n      24753620,\n      25095808,\n      25171884,\n      25247932,\n      25323980,\n      25666196,\n      25742244,\n      26084460,\n      26160508,\n      26502696,\n      26578772,\n      26654820,\n      26730868,\n      27073084,\n      27149132,\n      27225180,\n      27567368,\n      27643444,\n      27985660,\n      28061708,\n      28137756,\n      28213804,\n      28289852,\n      28632068,\n      28974284,\n      29316500,\n      29658716,\n      30000932,\n      30076980,\n      30153028,\n      30495216,\n      30571292,\n      30913480,\n      30989556,\n      31065604,\n      31141652,\n      31217700,\n      31293748,\n      31369796,\n      31445844,\n      31521892,\n      31597940,\n      31673988,\n      31750036,\n      31826084,\n      32168300,\n      32510488,\n      32586564,\n      33194920,\n      33270996,\n      33347044,\n      33423092,\n      33499140,\n      33575188,\n      33651236,\n      33727284,\n      34069472,\n      34145548,\n      34221596,\n      34297644,\n      34373692,\n      34715908,\n      34791956,\n      35134144,\n      35210220,\n      35286268,\n      35362316,\n      35704532,\n      35780580,\n      35856628,\n      35932676,\n      36008724,\n      36084772,\n      36160820,\n      36236868,\n      36579056,\n      36655132,\n      36731180,\n      36807228,\n      36883276,\n      37225492,\n      37301540,\n      37377588,\n      37453636,\n      37529684,\n      37605732,\n      37947948,\n      38023996,\n      38366212,\n      38708428,\n      39050616,\n      39126692,\n      39202740,\n      39278788,\n      39354836,\n      39430884,\n      39506932,\n      39582980,\n      39925168,\n      40001244,\n      40077292,\n      40153340,\n      40229388,\n      40305436,\n      40381484,\n      40723672,\n      40799748,\n      40875796,\n      40951844,\n      41027892,\n      41370108,\n      41446156,\n      41788372,\n      42130560,\n      42206636,\n      42548852,\n      42624900,\n      42700948,\n      42776996,\n      42853044,\n      42929092,\n      43005140,\n      43081188,\n      43423404,\n      43765620,\n      43841668,\n      44183884,\n      44259932,\n      44335980,\n      44412028,\n      44754216,\n      44830292,\n      44906340,\n      45248528,\n      45324604,\n      45400652,\n      45476700,\n      45552748,\n      45628796,\n      45704844,\n      45780892,\n      46123108,\n      46465324,\n      46541372,\n      46617420,\n      46693468,\n      47035656,\n      47111732,\n      47187780,\n      47263828,\n      47606016,\n      47682092,\n      47758140,\n      47834188,\n      47910236,\n      47986284,\n      48062332,\n      48138380,\n      48214428,\n      48556616,\n      48632692,\n      48708740,\n      48784788,\n      48860836,\n      49203024,\n      49279100,\n      49621288,\n      49697364,\n      49773412,\n      49849460,\n      50191648,\n      50267724,\n      50343772,\n      50419820,\n      50495868,\n      50571916,\n      50647964,\n      50724012,\n      50800060,\n      50876108,\n      50952156,\n      51028204,\n      51104252,\n      51180300,\n      51522516,\n      51864704,\n      51940780,\n      52016828,\n      52092876,\n      52435064,\n      52511140,\n      52587188,\n      52663236,\n      52739284,\n      52815332,\n      52891380,\n      52967428,\n      53043476,\n      53385664,\n      53461740,\n      53537788,\n      53613836,\n      53689884,\n      53765932,\n      54108120,\n      54184196,\n      54526384,\n      54602460,\n      54678508,\n      55286864,\n      55362940,\n      55438988,\n      55781176,\n      55857252,\n      55933300,\n      56009348,\n      56085396,\n      56161444,\n      56503660,\n      56579708,\n      56655756,\n      56731804,\n      56807852,\n      56883900,\n      57226116,\n      57302164,\n      57378212,\n      57454260,\n      57530308,\n      57872524,\n      57948572,\n      58290788,\n      58366836,\n      58442884,\n      58518932,\n      58594980,\n      58937168,\n      59013244,\n      59089292,\n      59165340,\n      59241388,\n      59317436,\n      59393484,\n      59469532,\n      59545580,\n      59621628,\n      59963816,\n      60039892,\n      60115940,\n      60458128,\n      60534204,\n      60610252,\n      60952440,\n      61294656,\n      61370732,\n      61446780,\n      61522828,\n      61865044,\n      61941092,\n      62017140,\n      62359356,\n      62435404,\n      62511452,\n      62587500,\n      62663548,\n      63005764,\n      63081812,\n      63157860,\n      63233908,\n      63576096,\n      63652172,\n      63728220,\n      64070408,\n      64412624,\n      64488700,\n      64564748,\n      64640796,\n      64716844,\n      65059032,\n      65135108,\n      65211156,\n      65553344,\n      65629420,\n      65705468,\n      65781516,\n      65857564,\n      66199780,\n      66275828,\n      66618044,\n      67226400,\n      67302476,\n      67378524,\n      67720740,\n      67796788,\n      67872836,\n      67948884,\n      68024932,\n      68100980,\n      68177028,\n      68253076,\n      68329124,\n      68405172,\n      68747360,\n      69089576,\n      69165652,\n      69241700,\n      69317748,\n      69393796,\n      69735984,\n      69812060,\n      69888108,\n      69964156,\n      70040204,\n      70116252,\n      70458468,\n      70534516,\n      70610564,\n      70686612,\n      70762660,\n      70838708,\n      70914756,\n      70990804,\n      71066852,\n      71409068,\n      71485116,\n      71561164,\n      71637212,\n      71979428,\n      72321644,\n      72397692,\n      72473740,\n      72815956,\n      72892004,\n      72968052,\n      73576408,\n      73652484,\n      73994672,\n      74070748,\n      74146796,\n      74222844,\n      74298892,\n      74374940,\n      74450988,\n      74527036,\n      74603084,\n      74679132,\n      74755180,\n      74831228,\n      74907276,\n      74983324,\n      75059372,\n      75401588,\n      75477636,\n      75819852,\n      75895900,\n      75971948,\n      76047996,\n      76124044,\n      76200092,\n      76276140,\n      76352188,\n      76428236,\n      76504284,\n      76580332,\n      76656380,\n      76732428,\n      77074616,\n      77150692,\n      77226740,\n      77568928,\n      77911144,\n      78253360,\n      78329436,\n      78405484,\n      78481532,\n      78823748,\n      78899796,\n      78975844,\n      79051892,\n      79127940,\n      79203988,\n      79280036,\n      79356084,\n      79432132,\n      79508180,\n      79850396,\n      80192584,\n      80800968,\n      81143184,\n      81219260,\n      81295308,\n      81371356,\n      81447404,\n      81523452,\n      81865640,\n      81941716,\n      82283904,\n      82359980,\n      82702196,\n      82778244,\n      83120460,\n      83196508,\n      83272556,\n      83348604,\n      83424652,\n      83500700,\n      83576748,\n      83652796,\n      83728844,\n      84071060,\n      84147108,\n      84489296,\n      84565372,\n      84907588,\n      84983636,\n      85325852,\n      85668068,\n      85744116,\n      86086332,\n      86162380,\n      86504596,\n      86846812,\n      86922860,\n      86998908,\n      87341096,\n      87417172,\n      87493220,\n      87835408,\n      87911484,\n      88253700,\n      88595888,\n      88671964,\n      88748012,\n      88824060,\n      88900108,\n      89242296,\n      89318372,\n      89394420,\n      89470468,\n      89546516,\n      89888732,\n      90230920,\n      90306996,\n      9504\n    ],\n    \"reverseAvailable\": false,\n    \"internalShinglingEnabled\": false,\n    \"lastTimeStamp\": 1257,\n    \"rotationEnabled\": false,\n    \"dynamicResizingEnabled\": true,\n    \"currentStoreCapacity\": 512,\n    \"indexCapacity\": 2048\n  },\n  \"compactSamplerStates\": [\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5861195,\n        -1.605452,\n        -1.5982624,\n        -1.6388674,\n        -1.6310605,\n        -1.6108241,\n        -1.6641783,\n        -1.6470399,\n        -1.662391,\n        -1.66458,\n        -1.6381367,\n        -1.6604348,\n        -1.722461,\n        -1.6904022,\n        -1.6829594,\n        -1.6586255,\n        -1.8328778,\n        -1.6663431,\n        -1.8191967,\n        -1.7615604,\n        -1.7148815,\n        -1.7484502,\n        -1.6753336,\n        -1.7183716,\n        -1.688128,\n        -1.7450564,\n        -2.1241255,\n        -1.755236,\n        -1.716314,\n        -1.6855574,\n        -1.8352082,\n        -1.6686058,\n        -1.7088614,\n        -1.975995,\n        -2.1331093,\n        -1.7441834,\n        -1.7506566,\n        -2.0483587,\n        -2.2547653,\n        -1.8327026,\n        -1.8121274,\n        -1.7302328,\n        -1.7633137,\n        -1.8724989,\n        -1.7519345,\n        -1.7819105,\n        -1.9474361,\n        -1.8973167,\n        -1.8720074,\n        -1.8968571,\n        -1.8915143,\n        -1.8632329,\n        -2.01883,\n        -2.1812334,\n        -2.350511,\n        -1.8697002,\n        -1.8398154,\n        -1.7831405,\n        -1.7950289,\n        -1.932773,\n        -1.715132,\n        -2.1989625,\n        -1.9009744,\n        -1.7144396,\n        -2.7446961,\n        -1.7951992,\n        -2.4029047,\n        -1.9952371,\n        -2.3514855,\n        -2.1877155,\n        -2.3432517,\n        -1.9697201,\n        -2.670341,\n        -2.2128923,\n        -1.972904,\n        -2.251253,\n        -2.5463715,\n        -2.7085016,\n        -2.3085272,\n        -1.9825817,\n        -2.072967,\n        -1.8706789,\n        -1.8189924,\n        -2.3386497,\n        -2.0664465,\n        -3.33256,\n        -2.0368118,\n        -2.2420359,\n        -2.6768208,\n        -1.9206775,\n        -2.5956569,\n        -3.1106741,\n        -1.9571904,\n        -1.9927236,\n        -2.3674033,\n        -1.93867,\n        -2.7404048,\n        -2.3078642,\n        -2.6232505,\n        -2.0971937,\n        -2.4987488,\n        -2.0778205,\n        -2.7914362,\n        -2.1456559,\n        -2.5070734,\n        -2.059889,\n        -2.2235098,\n        -2.9149077,\n        -2.3681848,\n        -2.396997,\n        -2.5068617,\n        -2.5728097,\n        -1.8709942,\n        -2.1455302,\n        -1.9927701,\n        -2.7714732,\n        -2.5182734,\n        -2.1565442,\n        -1.8815223,\n        -2.0475667,\n        -2.3105578,\n        -2.0638764,\n        -2.2973845,\n        -2.3146381,\n        -2.6567106,\n        -2.4039922,\n        -2.6203403,\n        -3.4856248,\n        -5.081313,\n        -3.220412,\n        -4.812422,\n        -1.9047714,\n        -3.6176436,\n        -2.6161025,\n        -3.0316484,\n        -3.013686,\n        -3.5585165,\n        -2.4007401,\n        -2.6348982,\n        -2.766112,\n        -2.9318974,\n        -5.7781105,\n        -3.107272,\n        -2.4127505,\n        -3.3237684,\n        -3.803411,\n        -4.290522,\n        -2.4230351,\n        -3.0978012,\n        -2.1676643,\n        -1.9878286,\n        -2.5379953,\n        -2.336568,\n        -2.6192954,\n        -6.017296,\n        -2.8725154,\n        -6.3045855,\n        -3.6723707,\n        -2.8172421,\n        -2.1898556,\n        -2.3871565,\n        -2.51744,\n        -2.7863479,\n        -2.5990326,\n        -2.4863505,\n        -2.1796257,\n        -1.8518976,\n        -3.370079,\n        -6.063179,\n        -3.258116,\n        -5.121849,\n        -6.345082,\n        -3.6713245,\n        -2.6670954,\n        -3.3832657,\n        -4.917982,\n        -2.97081,\n        -3.0248885,\n        -2.7550912,\n        -2.8822653,\n        -3.8920598,\n        -2.8241985,\n        -4.593358,\n        -4.472623,\n        -3.2764409,\n        -2.8249595,\n        -2.1388478,\n        -2.8180516,\n        -3.9035788,\n        -4.516092,\n        -2.3858354,\n        -3.4240582,\n        -1.9947791,\n        -5.736862,\n        -3.374941,\n        -3.9094043,\n        -5.1914043,\n        -4.203039,\n        -2.6490705,\n        -2.395437,\n        -2.304084,\n        -2.9620337,\n        -3.2512295,\n        -3.9594069,\n        -2.2474992,\n        -2.946548,\n        -4.6885777,\n        -2.8522136,\n        -2.4729125,\n        -2.7774913,\n        -3.1150968,\n        -3.0363977,\n        -4.197292,\n        -2.7846549,\n        -2.6680098,\n        -4.2414436,\n        -3.8353665,\n        -4.186747,\n        -5.6943493,\n        -3.8587017,\n        -3.2020812,\n        -2.5837743,\n        -3.115129,\n        -5.767825,\n        -3.1498506,\n        -4.139573,\n        -2.4234726,\n        -2.7053547,\n        -4.19672,\n        -3.4625618,\n        -2.1197746,\n        -3.3355079,\n        -3.1506562,\n        -2.8635252,\n        -3.1254945,\n        -2.259082,\n        -3.146692,\n        -1.9144876,\n        -4.487445,\n        -4.18177,\n        -2.8036504,\n        -3.9046266,\n        -3.1428185,\n        -3.6163304,\n        -3.2034893,\n        -2.6041164,\n        -2.3078747,\n        -3.577054,\n        -2.5983744,\n        -3.4380496,\n        -3.7947206,\n        -2.8015137,\n        -2.9079344,\n        -5.612013,\n        -3.2981741,\n        -3.4895682\n      ],\n      \"pointIndex\": [\n        6,\n        1256,\n        256,\n        1580125493,\n        1009964779,\n        456934816,\n        1616333668,\n        1152569338,\n        108808059,\n        1358432074,\n        502384566,\n        1024448265,\n        1708452424,\n        1129878003,\n        1087712861,\n        1944483281,\n        1546063584,\n        68288588,\n        1273962710,\n        1220350266,\n        306704637,\n        1103944160,\n        1939752041,\n        1617814426,\n        1196814165,\n        313202232,\n        821422964,\n        1232975405,\n        511497819,\n        1879840308,\n        1710721726,\n        57714776,\n        426392352,\n        144334141,\n        919039593,\n        1301558173,\n        233030408,\n        984855939,\n        1016643541,\n        710104877,\n        1534287009,\n        1312146233,\n        1033950820,\n        1773005522,\n        1528561472,\n        976559285,\n        839263811,\n        515207941,\n        88259328,\n        999862042,\n        278485947,\n        337423713,\n        1396659120,\n        403778703,\n        1123720273,\n        1280314890,\n        1229704194,\n        1575998767,\n        1600038320,\n        453521515,\n        866331345,\n        412877840,\n        60755345,\n        14196166,\n        1493975856,\n        713593,\n        487101666,\n        500744337,\n        548561977,\n        552017438,\n        1948597105,\n        1060517442,\n        782972501,\n        653096203,\n        31812331,\n        1929607279,\n        1627551512,\n        1074301150,\n        19053580,\n        1291784360,\n        333312049,\n        1903734870,\n        966448202,\n        1883562658,\n        1350440220,\n        1448644102,\n        1489459036,\n        1700831799,\n        1250\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 3760827122461395656\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6600187,\n        -1.6628351,\n        -1.6632024,\n        -1.6885065,\n        -1.6844633,\n        -1.673482,\n        -1.6683668,\n        -1.703622,\n        -1.6935254,\n        -1.7039138,\n        -1.7054497,\n        -1.6792103,\n        -1.72292,\n        -1.7178843,\n        -1.7129887,\n        -1.7370491,\n        -1.9077396,\n        -1.7476121,\n        -1.8414545,\n        -1.9110444,\n        -1.801772,\n        -1.7981725,\n        -1.7427167,\n        -1.7117282,\n        -1.8734525,\n        -1.758567,\n        -1.7399035,\n        -1.7192584,\n        -1.7428987,\n        -1.766276,\n        -1.8003932,\n        -1.8911016,\n        -1.8654419,\n        -1.9670025,\n        -1.9240105,\n        -1.7802987,\n        -1.8447324,\n        -1.967483,\n        -1.8591907,\n        -2.2108433,\n        -1.9602256,\n        -1.8811072,\n        -1.8307586,\n        -1.8283223,\n        -1.8461,\n        -1.8134967,\n        -1.9581954,\n        -1.8520461,\n        -2.2066836,\n        -1.8763704,\n        -1.8831326,\n        -1.8191657,\n        -2.065619,\n        -1.7961874,\n        -1.8889962,\n        -1.7972072,\n        -1.7482319,\n        -1.8282884,\n        -1.9215875,\n        -1.7745942,\n        -1.934545,\n        -1.9956965,\n        -1.9398265,\n        -2.018418,\n        -1.9581808,\n        -1.8738496,\n        -2.2210963,\n        -2.4836748,\n        -2.6438246,\n        -2.8173716,\n        -2.0052333,\n        -1.8060782,\n        -1.8316134,\n        -2.104083,\n        -2.064001,\n        -2.0468729,\n        -2.700277,\n        -2.055099,\n        -2.107124,\n        -2.8776038,\n        -4.302666,\n        -2.7787154,\n        -2.3238614,\n        -2.3610747,\n        -2.1134987,\n        -1.8448327,\n        -2.189839,\n        -1.8508947,\n        -2.1103697,\n        -2.0987751,\n        -1.9045995,\n        -2.364662,\n        -1.9768771,\n        -2.3049514,\n        -2.373978,\n        -2.013711,\n        -2.3665648,\n        -3.1223068,\n        -2.4642084,\n        -2.2965417,\n        -2.0021834,\n        -2.0005822,\n        -2.0901864,\n        -2.5504415,\n        -1.8880817,\n        -2.134783,\n        -2.2326784,\n        -1.942063,\n        -2.0569196,\n        -1.966368,\n        -1.9174302,\n        -2.1160188,\n        -2.2602866,\n        -2.4733515,\n        -2.23828,\n        -1.9670134,\n        -1.8519195,\n        -2.0879674,\n        -1.9622679,\n        -2.7823813,\n        -2.1206293,\n        -2.3471045,\n        -1.9580667,\n        -3.0643625,\n        -2.1881764,\n        -2.4692035,\n        -2.1028755,\n        -2.6679616,\n        -2.442942,\n        -2.224716,\n        -2.0679758,\n        -2.283862,\n        -1.9222883,\n        -2.846171,\n        -2.9377835,\n        -2.5189304,\n        -4.815355,\n        -3.8114474,\n        -3.1087315,\n        -3.1940696,\n        -2.9174712,\n        -2.1778202,\n        -2.2337835,\n        -1.843772,\n        -4.1040983,\n        -2.920102,\n        -4.2883368,\n        -2.5598578,\n        -2.9980335,\n        -2.3457928,\n        -3.4544246,\n        -2.3266225,\n        -2.9991736,\n        -4.0963154,\n        -2.8809404,\n        -2.822911,\n        -2.7196505,\n        -3.5385666,\n        -3.0671763,\n        -4.2318783,\n        -3.1986744,\n        -5.5718246,\n        -6.046677,\n        -3.9159167,\n        -3.5294237,\n        -2.4407678,\n        -3.2104867,\n        -3.3342721,\n        -3.589185,\n        -4.893906,\n        -2.4044142,\n        -3.7951546,\n        -2.498565,\n        -3.4903483,\n        -3.1365232,\n        -1.9214913,\n        -2.196149,\n        -2.4012504,\n        -3.4272356,\n        -2.222576,\n        -4.959666,\n        -2.093998,\n        -2.2162402,\n        -2.6007705,\n        -3.4578567,\n        -2.2444172,\n        -3.3098018,\n        -3.1957138,\n        -3.1875644,\n        -2.9233613,\n        -3.6141636,\n        -4.2357774,\n        -2.3627012,\n        -3.0946198,\n        -2.704567,\n        -4.9396143,\n        -3.7426734,\n        -3.4212067,\n        -3.576373,\n        -4.9750557,\n        -2.3576858,\n        -2.2333653,\n        -2.5606627,\n        -3.0442796,\n        -2.2103524,\n        -4.6263657,\n        -2.1121235,\n        -3.3557963,\n        -2.6780543,\n        -2.0564954,\n        -4.5643115,\n        -4.3294086,\n        -3.104085,\n        -3.434541,\n        -2.2409294,\n        -2.0642347,\n        -4.495369,\n        -2.8710287,\n        -3.2579474,\n        -2.7349076,\n        -5.1881814,\n        -2.744094,\n        -2.8721514,\n        -5.458582,\n        -2.5830178,\n        -3.0423794,\n        -2.320381,\n        -3.1947503,\n        -6.776149,\n        -2.886313,\n        -2.7594192,\n        -4.1165075,\n        -2.2551475,\n        -2.4149053,\n        -2.3541772,\n        -2.18199,\n        -2.1128635,\n        -4.0967073,\n        -2.4572492,\n        -3.3653438,\n        -5.049421,\n        -2.8168807,\n        -4.1294785,\n        -3.91239,\n        -2.467887,\n        -3.4557223,\n        -3.03364,\n        -4.037852,\n        -3.395431,\n        -3.4063814,\n        -3.2614224,\n        -3.9141514,\n        -3.2676873,\n        -3.3500817\n      ],\n      \"pointIndex\": [\n        7,\n        1251,\n        254,\n        1525571388,\n        1252725678,\n        734617135,\n        667315246,\n        1657761947,\n        45159910,\n        1062461257,\n        366110,\n        641470608,\n        1064143194,\n        646559403,\n        1483898601,\n        859588358,\n        53598049,\n        855516880,\n        818783012,\n        519999467,\n        1645049211,\n        992924574,\n        220907159,\n        1841205239,\n        1342706414,\n        1224164520,\n        1198234193,\n        1736488113,\n        207560065,\n        345929946,\n        1488342345,\n        1395321750,\n        929518491,\n        853333241,\n        578273904,\n        1912236575,\n        828120018,\n        1770853159,\n        204242299,\n        1734994724,\n        751271264,\n        1516603618,\n        1916659496,\n        1225596508,\n        1873712255,\n        126275624,\n        1592779605,\n        1818674086,\n        1048352023,\n        273865496,\n        918513937,\n        1335984142,\n        1503695003,\n        1575075756,\n        131181001,\n        74515481,\n        1741445112,\n        638558354,\n        1206870861,\n        1359667607,\n        111902919,\n        400935972,\n        1669091394,\n        682883091,\n        1510216409,\n        937889914,\n        226937294,\n        628214313,\n        1082311274,\n        202884689,\n        509614016,\n        1324335895,\n        315225316,\n        574261824,\n        735561627,\n        1070805347,\n        1383128251,\n        1721308942,\n        729293802,\n        1235113953,\n        276931205,\n        1140715726,\n        12209130,\n        1532289813,\n        1218043062,\n        1428698071,\n        1424460215,\n        1549987\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -1114890292068028840\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7043537,\n        -1.7149372,\n        -1.7166703,\n        -1.7321012,\n        -1.7449932,\n        -1.7227896,\n        -1.7189212,\n        -1.7613436,\n        -1.7871635,\n        -1.7572314,\n        -1.9100767,\n        -1.8015554,\n        -1.7829351,\n        -1.745284,\n        -1.7499497,\n        -1.7995156,\n        -1.8531065,\n        -1.8204864,\n        -1.8014812,\n        -1.7835534,\n        -1.7953764,\n        -1.9277264,\n        -2.0113738,\n        -1.8776013,\n        -1.8830832,\n        -1.8430904,\n        -1.9128852,\n        -1.7552594,\n        -1.7954891,\n        -1.8587546,\n        -1.8159387,\n        -1.8180984,\n        -2.0760508,\n        -1.9116563,\n        -1.8913791,\n        -1.8495423,\n        -2.3682237,\n        -1.85536,\n        -1.8663213,\n        -1.8894315,\n        -1.8001022,\n        -1.9510405,\n        -2.0332441,\n        -2.300753,\n        -1.9995476,\n        -2.3203413,\n        -2.022984,\n        -2.4649758,\n        -1.9842328,\n        -1.9025321,\n        -2.0115871,\n        -1.9769313,\n        -1.9613022,\n        -2.3817139,\n        -1.9380883,\n        -1.9054426,\n        -1.7931068,\n        -1.8823017,\n        -2.0860248,\n        -1.9546908,\n        -1.8849564,\n        -1.8492993,\n        -2.1535194,\n        -1.8650378,\n        -1.9573538,\n        -2.232175,\n        -2.7863712,\n        -2.6460342,\n        -2.3452928,\n        -2.1120064,\n        -2.1006784,\n        -2.188658,\n        -1.8926909,\n        -2.4513972,\n        -2.63265,\n        -1.89713,\n        -1.9279783,\n        -2.0149734,\n        -2.3423433,\n        -1.9609698,\n        -2.8362937,\n        -2.5565622,\n        -2.5604684,\n        -2.0057104,\n        -2.191599,\n        -2.223279,\n        -3.202308,\n        -2.3193972,\n        -2.7031338,\n        -2.5877492,\n        -2.016321,\n        -2.7814374,\n        -2.8090856,\n        -2.025436,\n        -2.1793358,\n        -2.4806178,\n        -4.986853,\n        -2.0887856,\n        -1.992191,\n        -2.2180266,\n        -2.1675045,\n        -2.785531,\n        -2.471143,\n        -2.0163069,\n        -2.1689491,\n        -2.176144,\n        -2.142885,\n        -2.805913,\n        -2.7247162,\n        -1.9667814,\n        -3.225324,\n        -1.9381685,\n        -2.148721,\n        -1.8139349,\n        -2.243579,\n        -2.0078924,\n        -1.9078774,\n        -2.228902,\n        -2.2091773,\n        -2.0057032,\n        -2.161559,\n        -2.1420436,\n        -2.3776362,\n        -1.8866057,\n        -1.9161162,\n        -2.997531,\n        -3.6017506,\n        -1.8844341,\n        -3.650594,\n        -2.0622656,\n        -2.4955025,\n        -6.297177,\n        -2.4560344,\n        -2.827355,\n        -3.3375702,\n        -4.2595506,\n        -3.6476152,\n        -5.298893,\n        -4.6368704,\n        -2.5827596,\n        -4.727267,\n        -2.9171727,\n        -3.3863933,\n        -2.998544,\n        -3.1508188,\n        -2.3191326,\n        -3.1005151,\n        -2.5385091,\n        -2.9375808,\n        -5.3113475,\n        -4.4594173,\n        -2.7024257,\n        -1.9682808,\n        -3.3948848,\n        -5.8313107,\n        -2.652302,\n        -3.31744,\n        -2.7928,\n        -2.4913106,\n        -2.1816418,\n        -3.0713098,\n        -2.9353008,\n        -3.9156651,\n        -3.589956,\n        -3.0757117,\n        -3.907892,\n        -4.369662,\n        -3.1807384,\n        -5.566969,\n        -2.5556684,\n        -3.356561,\n        -4.061002,\n        -2.6578262,\n        -3.358592,\n        -4.061826,\n        -3.035102,\n        -2.3694582,\n        -6.079366,\n        -4.249974,\n        -2.6013722,\n        -2.5957072,\n        -2.3388069,\n        -2.0346706,\n        -3.5047348,\n        -4.197596,\n        -4.9675274,\n        -3.9339316,\n        -4.9094734,\n        -2.1441245,\n        -3.6834774,\n        -2.2312505,\n        -3.477304,\n        -5.026757,\n        -7.4546056,\n        -5.137707,\n        -2.6693206,\n        -2.614613,\n        -3.6004379,\n        -4.7180634,\n        -2.28078,\n        -2.42328,\n        -2.7575917,\n        -2.2931254,\n        -3.3485951,\n        -3.7159684,\n        -3.7249277,\n        -4.1952395,\n        -2.490811,\n        -2.0773237,\n        -2.708938,\n        -2.8349159,\n        -2.1985443,\n        -2.5952754,\n        -3.3955815,\n        -2.7381892,\n        -2.8703403,\n        -3.5668871,\n        -2.8732376,\n        -3.704257,\n        -1.9876751,\n        -5.8961663,\n        -3.6108832,\n        -5.704544,\n        -2.116727,\n        -2.1399906,\n        -3.2782805,\n        -3.2339072,\n        -4.4758325,\n        -2.4205503,\n        -3.610604,\n        -3.8915148,\n        -4.0522075,\n        -2.0418868,\n        -7.400977,\n        -4.301539,\n        -2.3623009,\n        -3.504025,\n        -2.3166306,\n        -3.0060487,\n        -3.201693,\n        -2.291483,\n        -3.7904406,\n        -3.120234,\n        -2.4891293,\n        -2.1493428,\n        -2.5106375,\n        -3.7456133,\n        -2.1408231,\n        -3.118479,\n        -2.2031925,\n        -2.4266295,\n        -3.3329139,\n        -4.2434278,\n        -4.0106225,\n        -6.332322,\n        -2.2949204\n      ],\n      \"pointIndex\": [\n        1,\n        1256,\n        256,\n        1800496310,\n        189774369,\n        1190351877,\n        196117206,\n        1209024955,\n        119934254,\n        1060414962,\n        477995845,\n        1078535456,\n        1080003549,\n        269955583,\n        484283630,\n        137292677,\n        1183501254,\n        440274210,\n        1267133467,\n        1592147376,\n        817636891,\n        1473156248,\n        1463437761,\n        1788508518,\n        1194396926,\n        1773629862,\n        1137494071,\n        989375137,\n        1445520569,\n        726923453,\n        985493177,\n        857505566,\n        1568025519,\n        459218697,\n        474222116,\n        510601635,\n        943791427,\n        1120837853,\n        613003481,\n        49457990,\n        1414445346,\n        927181008,\n        1087892123,\n        1239384044,\n        1588103553,\n        1031660723,\n        105739915,\n        1975683509,\n        280236125,\n        1693635831,\n        25808913,\n        313240311,\n        250470390,\n        337217296,\n        1350402607,\n        829898214,\n        84780018,\n        1321833974,\n        140438629,\n        1625445113,\n        1235862771,\n        31884402,\n        731165007,\n        1124183235,\n        306538204,\n        708956841,\n        1789299587,\n        1665028448,\n        1790693330,\n        1617910472,\n        543099424,\n        1793169650,\n        584141920,\n        1896596068,\n        1551822291,\n        616405006,\n        700958677,\n        1210142631,\n        122109340,\n        1978669206,\n        507042740,\n        1070449643,\n        1067804100,\n        1832375323,\n        1185628390,\n        1315264144,\n        1390927903,\n        1837634414,\n        1255\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -8998098257901219462\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4847924,\n        -1.50701,\n        -1.4874974,\n        -1.509434,\n        -1.5804005,\n        -1.517309,\n        -1.4889102,\n        -1.5924674,\n        -1.5380169,\n        -1.5827285,\n        -1.5945977,\n        -1.6033465,\n        -1.522429,\n        -1.5085082,\n        -1.5259198,\n        -1.6608759,\n        -1.7157124,\n        -1.6005316,\n        -1.7098669,\n        -1.6417782,\n        -1.7827258,\n        -1.6653897,\n        -1.761043,\n        -1.6149433,\n        -1.8048114,\n        -1.6053822,\n        -1.5297601,\n        -1.5150692,\n        -1.5335847,\n        -1.5741924,\n        -1.5903218,\n        -1.6950215,\n        -1.6855102,\n        -1.7289294,\n        -1.7847432,\n        -1.7010726,\n        -1.7668756,\n        -1.9151553,\n        -1.9073898,\n        -1.7360523,\n        -1.8851597,\n        -1.8604245,\n        -1.820512,\n        -1.7844313,\n        -1.6933676,\n        -1.9365067,\n        -1.8104782,\n        -1.8791007,\n        -1.6876923,\n        -1.8407286,\n        -1.87232,\n        -1.613756,\n        -1.66301,\n        -1.5603495,\n        -1.7004238,\n        -1.6415001,\n        -1.721441,\n        -2.1622522,\n        -1.5830246,\n        -1.7812307,\n        -1.5904065,\n        -1.61179,\n        -1.7648445,\n        -2.074227,\n        -2.0032582,\n        -1.6958426,\n        -1.7092977,\n        -2.421047,\n        -1.9955932,\n        -2.127096,\n        -2.2460995,\n        -2.0733979,\n        -2.1900704,\n        -2.218347,\n        -1.9587811,\n        -2.30464,\n        -2.3489394,\n        -2.2056513,\n        -2.4812882,\n        -2.078726,\n        -2.0368192,\n        -2.1401744,\n        -1.9548048,\n        -2.2267509,\n        -2.3673909,\n        -1.8815244,\n        -2.0716653,\n        -1.8121344,\n        -1.9581162,\n        -1.7133777,\n        -1.8833144,\n        -2.7113278,\n        -2.1894011,\n        -2.0413165,\n        -2.2483463,\n        -2.0375435,\n        -2.023743,\n        -1.7335436,\n        -1.8081505,\n        -1.9442836,\n        -1.9203551,\n        -2.0609462,\n        -1.9958264,\n        -1.8094562,\n        -1.7540293,\n        -1.7631687,\n        -1.9121307,\n        -1.7658973,\n        -1.6152104,\n        -2.3850179,\n        -1.7996753,\n        -1.9506682,\n        -2.1833467,\n        -1.836597,\n        -2.4442554,\n        -2.2432363,\n        -2.3038113,\n        -1.69391,\n        -2.0176008,\n        -1.9852964,\n        -2.22754,\n        -1.5936773,\n        -2.037487,\n        -2.4741428,\n        -1.724286,\n        -2.2496772,\n        -2.1840785,\n        -2.8905258,\n        -2.5314753,\n        -3.2166378,\n        -3.863982,\n        -1.7463908,\n        -2.2845786,\n        -2.1900523,\n        -1.9322034,\n        -2.7665477,\n        -5.5880685,\n        -5.0508027,\n        -2.4426787,\n        -2.914124,\n        -3.4584386,\n        -2.4449632,\n        -2.3848393,\n        -2.6335444,\n        -4.1916275,\n        -2.6579905,\n        -2.668138,\n        -4.2272997,\n        -3.0954368,\n        -4.5089917,\n        -2.0358038,\n        -7.6616907,\n        -3.7811038,\n        -2.4330842,\n        -2.4255292,\n        -3.343035,\n        -4.25475,\n        -2.5636013,\n        -3.1577828,\n        -5.828969,\n        -2.755335,\n        -2.1080794,\n        -2.4202938,\n        -3.3008184,\n        -3.6514235,\n        -2.5095358,\n        -3.8863716,\n        -2.957376,\n        -3.5885198,\n        -2.5151875,\n        -2.5355968,\n        -2.064986,\n        -2.024324,\n        -3.3344228,\n        -2.124326,\n        -2.6870174,\n        -1.8441193,\n        -3.208695,\n        -2.3718288,\n        -1.7876742,\n        -5.6086903,\n        -2.0368898,\n        -2.1928809,\n        -4.623443,\n        -3.5671632,\n        -2.7656476,\n        -2.826614,\n        -2.6780763,\n        -2.2869632,\n        -2.5077014,\n        -2.7039807,\n        -2.983468,\n        -2.171145,\n        -2.48659,\n        -3.0943327,\n        -1.8994596,\n        -1.8423891,\n        -2.0048766,\n        -2.3960173,\n        -2.6402557,\n        -2.8885286,\n        -2.8480966,\n        -2.2679713,\n        -4.381718,\n        -2.0998023,\n        -2.8364131,\n        -3.0806136,\n        -2.1412249,\n        -1.8483372,\n        -2.3394818,\n        -3.4883568,\n        -2.0058262,\n        -2.246459,\n        -3.0723298,\n        -2.3015406,\n        -2.499738,\n        -2.8829854,\n        -2.6171253,\n        -2.0047848,\n        -2.90026,\n        -6.3821893,\n        -1.9659716,\n        -2.7831266,\n        -2.6806984,\n        -5.0689373,\n        -2.3406537,\n        -2.19754,\n        -2.8538144,\n        -2.9117985,\n        -3.5243957,\n        -2.9115815,\n        -3.5294943,\n        -3.2356849,\n        -3.04845,\n        -2.9486272,\n        -3.6131105,\n        -1.977847,\n        -6.3758364,\n        -2.1673677,\n        -2.0754735,\n        -2.849869,\n        -3.628077,\n        -2.2775931,\n        -2.9878826,\n        -1.9839334,\n        -2.5885856,\n        -2.2625816,\n        -4.0681295,\n        -3.3519099,\n        -2.0539382,\n        -3.1593044,\n        -4.923024,\n        -2.3891122,\n        -4.795825,\n        -4.269026\n      ],\n      \"pointIndex\": [\n        0,\n        1255,\n        255,\n        710517003,\n        550006573,\n        1460889948,\n        278313358,\n        1928382954,\n        1457868774,\n        375750180,\n        659759259,\n        1245010209,\n        1393816621,\n        1636103232,\n        4955704,\n        320083063,\n        7681779,\n        952566350,\n        47071417,\n        1922726604,\n        703184345,\n        1166656767,\n        1673490262,\n        1537542088,\n        267954450,\n        123703178,\n        1580307934,\n        1819137992,\n        192979481,\n        1164935904,\n        1382586570,\n        1219881396,\n        516977693,\n        1870190553,\n        1809325884,\n        584116891,\n        147195552,\n        669041452,\n        837313472,\n        731650393,\n        471068713,\n        1265016102,\n        1211335961,\n        1217334750,\n        1427413919,\n        1822094254,\n        769160,\n        265112274,\n        969608639,\n        627954958,\n        1097449659,\n        974211871,\n        1811479358,\n        1339015972,\n        1232402352,\n        30077149,\n        1551276114,\n        1814770485,\n        1115304358,\n        762235225,\n        591355174,\n        627715801,\n        406647944,\n        1666268896,\n        1320446352,\n        474012195,\n        4550334,\n        1336588673,\n        916815690,\n        1115793964,\n        980593224,\n        637951823,\n        1489716944,\n        414502382,\n        813421999,\n        1643886547,\n        1470838323,\n        1925194630,\n        1453541655,\n        975575488,\n        1013479497,\n        923605226,\n        390447053,\n        942631434,\n        1204611503,\n        1686576751,\n        1845234451,\n        1981264463\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 38561163469837675\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4610527,\n        -1.48245,\n        -1.4639573,\n        -1.5015463,\n        -1.5454192,\n        -1.4826655,\n        -1.4737648,\n        -1.6106573,\n        -1.525192,\n        -1.5471185,\n        -1.6125231,\n        -1.505095,\n        -1.5421757,\n        -1.5058594,\n        -1.5234768,\n        -1.6741594,\n        -1.9094331,\n        -1.7995727,\n        -1.6336598,\n        -1.8679664,\n        -1.595384,\n        -1.6628966,\n        -1.6441401,\n        -1.7069467,\n        -1.6833416,\n        -1.9401432,\n        -1.588832,\n        -1.5424707,\n        -1.7687211,\n        -1.5431672,\n        -1.5274942,\n        -1.8162181,\n        -1.7659051,\n        -1.9308692,\n        -1.9288825,\n        -1.9051023,\n        -1.9985346,\n        -1.6765146,\n        -1.6832094,\n        -1.8760269,\n        -1.9810698,\n        -1.6409447,\n        -1.6005028,\n        -1.9358605,\n        -2.1724792,\n        -1.6857752,\n        -1.6740047,\n        -1.8685497,\n        -1.9233545,\n        -1.8496706,\n        -1.6932981,\n        -2.1886141,\n        -1.9640974,\n        -1.6953177,\n        -2.0331624,\n        -1.9634979,\n        -1.6556059,\n        -1.8025886,\n        -1.9032472,\n        -1.5521348,\n        -1.7520154,\n        -1.5808368,\n        -1.5562763,\n        -2.6935897,\n        -2.6085107,\n        -2.0786266,\n        -1.9147294,\n        -1.9673487,\n        -2.1261866,\n        -2.8225563,\n        -3.1242068,\n        -2.2550943,\n        -2.5607169,\n        -2.4968712,\n        -2.1055422,\n        -2.3150427,\n        -1.7537509,\n        -1.737771,\n        -2.0998735,\n        -2.077217,\n        -2.2581348,\n        -2.054537,\n        -2.0298986,\n        -1.9485894,\n        -1.792361,\n        -1.7761369,\n        -2.2579038,\n        -2.0488272,\n        -2.5935009,\n        -2.393297,\n        -2.56187,\n        -3.085866,\n        -1.7331612,\n        -1.8431895,\n        -1.9609962,\n        -2.9646697,\n        -2.5008316,\n        -2.0777664,\n        -1.9832842,\n        -1.9663002,\n        -2.3825483,\n        -1.9030224,\n        -1.8568329,\n        -2.2311988,\n        -2.5922384,\n        -2.0080917,\n        -2.460374,\n        -2.4697714,\n        -1.7773494,\n        -2.2537482,\n        -2.8810356,\n        -2.011637,\n        -2.4875133,\n        -2.241009,\n        -2.056611,\n        -1.8162489,\n        -1.9599434,\n        -2.1026204,\n        -1.9309919,\n        -1.6801635,\n        -2.6104155,\n        -1.9660062,\n        -1.8532897,\n        -1.9000486,\n        -2.7101104,\n        -1.6306072,\n        -1.6173607,\n        -3.9097295,\n        -3.8940182,\n        -2.6971962,\n        -3.3223963,\n        -3.4434423,\n        -3.4364245,\n        -2.1258404,\n        -2.3368773,\n        -2.441312,\n        -2.1282659,\n        -3.1267962,\n        -3.5049665,\n        -3.6809883,\n        -3.9945335,\n        -3.6619272,\n        -5.4106927,\n        -2.2863889,\n        -2.4651685,\n        -2.6824324,\n        -2.964626,\n        -3.6960573,\n        -3.0068357,\n        -2.2048318,\n        -4.7914104,\n        -2.3593392,\n        -2.8784318,\n        -6.197217,\n        -3.3857186,\n        -3.8565915,\n        -4.5197845,\n        -2.6455338,\n        -2.2545123,\n        -2.4242928,\n        -3.8621643,\n        -2.764206,\n        -2.6078105,\n        -2.3123567,\n        -2.1793225,\n        -2.3973627,\n        -3.571396,\n        -2.1274083,\n        -3.0041566,\n        -1.8787048,\n        -1.8227599,\n        -4.072172,\n        -2.113721,\n        -3.4936752,\n        -3.411475,\n        -4.599114,\n        -2.5682142,\n        -2.9804878,\n        -4.6184344,\n        -5.995202,\n        -2.5762093,\n        -3.5060537,\n        -4.381844,\n        -3.2079947,\n        -5.117505,\n        -3.109518,\n        -1.779692,\n        -2.0144317,\n        -3.0996954,\n        -4.316557,\n        -2.463515,\n        -3.9305122,\n        -3.3319016,\n        -2.8997607,\n        -3.4705358,\n        -2.4835873,\n        -5.850858,\n        -1.9866576,\n        -3.528817,\n        -3.0366244,\n        -3.3330505,\n        -2.718776,\n        -2.718735,\n        -1.9553635,\n        -3.5689576,\n        -2.9435978,\n        -4.1096764,\n        -2.6101785,\n        -4.1002913,\n        -2.6639524,\n        -2.9546604,\n        -2.4940739,\n        -2.1748304,\n        -4.535578,\n        -4.664909,\n        -2.7323165,\n        -4.75438,\n        -3.2862427,\n        -3.2614667,\n        -2.484071,\n        -3.7774487,\n        -3.1729262,\n        -7.547099,\n        -2.623589,\n        -2.038379,\n        -3.7038631,\n        -2.7044697,\n        -3.5491626,\n        -4.805202,\n        -2.4135547,\n        -2.1463873,\n        -2.3763375,\n        -2.5803976,\n        -2.7008436,\n        -5.6769996,\n        -4.0309668,\n        -3.6546214,\n        -4.3860655,\n        -3.794969,\n        -3.0905387,\n        -3.366902,\n        -3.4968405,\n        -5.5036583,\n        -2.1061406,\n        -2.2766025,\n        -2.2931263,\n        -2.0818744,\n        -3.2892034,\n        -1.9717209,\n        -4.085529,\n        -3.8437142,\n        -3.004554,\n        -1.7959849,\n        -5.085871,\n        -1.8072813\n      ],\n      \"pointIndex\": [\n        3,\n        1247,\n        255,\n        1199942421,\n        562999165,\n        1070673656,\n        1648005124,\n        1929122355,\n        1827691693,\n        1321528356,\n        1421296621,\n        1292041204,\n        1667219473,\n        600183277,\n        274451700,\n        305722385,\n        1287568165,\n        524209408,\n        1579828185,\n        1488493102,\n        1440127259,\n        1187010258,\n        1115156372,\n        1611712180,\n        1609013166,\n        435872960,\n        963643107,\n        1092400314,\n        260870723,\n        941609578,\n        330439147,\n        1452373916,\n        369867335,\n        1413235891,\n        453582315,\n        1807834238,\n        1883437356,\n        560520063,\n        1380675025,\n        731172719,\n        822295120,\n        1176500626,\n        1030630770,\n        884990575,\n        1562333977,\n        704088563,\n        35590968,\n        1814568983,\n        825249153,\n        1832027883,\n        323386335,\n        842455318,\n        1335947106,\n        1529407566,\n        300958270,\n        979530004,\n        319923248,\n        1589275081,\n        758569902,\n        351543768,\n        144906887,\n        375687507,\n        1671274399,\n        1111674965,\n        471064683,\n        451480909,\n        1673206955,\n        1014142190,\n        1607091130,\n        1352018168,\n        516567802,\n        746070329,\n        1688411892,\n        1085929213,\n        630386208,\n        719834486,\n        202105144,\n        751110218,\n        793271136,\n        1220769635,\n        814291763,\n        213857926,\n        1774853900,\n        1546837229,\n        1143631568,\n        1537069672,\n        1597334650,\n        1918918486\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -6370827395103410126\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5445218,\n        -1.5465407,\n        -1.5504966,\n        -1.547764,\n        -1.560619,\n        -1.5533254,\n        -1.5759339,\n        -1.5785238,\n        -1.6054047,\n        -1.5841495,\n        -1.576058,\n        -1.5569911,\n        -1.5982715,\n        -1.612058,\n        -1.5798635,\n        -1.5861368,\n        -1.6388417,\n        -1.6204617,\n        -1.6824679,\n        -1.6229888,\n        -1.6407508,\n        -1.6453023,\n        -1.5819863,\n        -1.566079,\n        -1.5928731,\n        -1.6855135,\n        -1.783996,\n        -1.7316606,\n        -1.6485487,\n        -1.7861702,\n        -1.6092962,\n        -1.6261423,\n        -2.1053495,\n        -1.6423863,\n        -1.7105014,\n        -1.7465937,\n        -1.6570605,\n        -1.8360097,\n        -1.9010681,\n        -1.6361474,\n        -1.77335,\n        -1.6803484,\n        -2.0501294,\n        -1.7019562,\n        -1.7934632,\n        -2.0073125,\n        -1.768919,\n        -1.6914185,\n        -1.7652011,\n        -1.66249,\n        -1.6217879,\n        -1.8140017,\n        -1.7681481,\n        -1.9364661,\n        -1.8415885,\n        -1.7473153,\n        -1.9307748,\n        -1.6952391,\n        -1.7963362,\n        -2.0055091,\n        -1.8705586,\n        -1.8039247,\n        -1.6238333,\n        -1.8887676,\n        -2.1948328,\n        -2.1852083,\n        -2.309651,\n        -1.8040459,\n        -1.8527874,\n        -1.8589574,\n        -1.801687,\n        -2.8378289,\n        -3.095742,\n        -2.419655,\n        -1.6732262,\n        -1.8548335,\n        -2.3534513,\n        -2.5250976,\n        -2.485404,\n        -1.7551335,\n        -2.627682,\n        -1.804988,\n        -1.9720448,\n        -1.7464341,\n        -2.1275232,\n        -2.0847194,\n        -2.2849941,\n        -1.864146,\n        -1.8242195,\n        -1.986032,\n        -2.1918647,\n        -2.9306834,\n        -3.3716435,\n        -1.7709521,\n        -2.3122852,\n        -2.3475082,\n        -2.1665761,\n        -2.0726194,\n        -2.2897077,\n        -1.8331085,\n        -1.9517444,\n        -1.7562053,\n        -1.8337712,\n        -1.8928871,\n        -2.0578384,\n        -1.8316461,\n        -1.8601738,\n        -2.2226956,\n        -2.4449635,\n        -2.0460827,\n        -1.9298851,\n        -1.9729121,\n        -1.8871083,\n        -1.9636409,\n        -2.0409806,\n        -1.7165915,\n        -2.568095,\n        -2.0460265,\n        -2.384036,\n        -3.0289366,\n        -2.0829391,\n        -3.1761105,\n        -2.3723962,\n        -2.9863064,\n        -1.8120085,\n        -1.9704975,\n        -1.7034762,\n        -1.9443214,\n        -3.274785,\n        -3.090201,\n        -2.448313,\n        -3.8890853,\n        -2.2987845,\n        -2.445577,\n        -3.3676233,\n        -2.4992247,\n        -1.8911582,\n        -7.054657,\n        -3.7401574,\n        -2.9138978,\n        -2.9643297,\n        -3.8779972,\n        -1.8223864,\n        -3.0082738,\n        -2.850572,\n        -3.2850523,\n        -4.746652,\n        -4.583561,\n        -2.705282,\n        -3.0956216,\n        -3.2120516,\n        -4.7790074,\n        -2.0452852,\n        -2.407949,\n        -3.5124986,\n        -3.506919,\n        -2.8316567,\n        -2.4972572,\n        -4.2897453,\n        -3.2318325,\n        -2.0848386,\n        -2.7511337,\n        -2.825714,\n        -2.2840638,\n        -2.1273985,\n        -4.573858,\n        -3.5196013,\n        -2.3099709,\n        -1.9768105,\n        -2.6367333,\n        -3.5040345,\n        -2.2665486,\n        -2.7077358,\n        -2.5872097,\n        -3.828445,\n        -3.6953075,\n        -4.895928,\n        -2.4355423,\n        -2.0899951,\n        -2.0495608,\n        -2.7160594,\n        -3.9311116,\n        -3.3196929,\n        -4.747052,\n        -3.7480927,\n        -4.6403666,\n        -3.5007823,\n        -1.8442141,\n        -2.6454961,\n        -4.8937225,\n        -5.3806396,\n        -4.3192015,\n        -2.6148129,\n        -2.677922,\n        -2.215932,\n        -3.3537917,\n        -2.6788485,\n        -2.9680216,\n        -4.078896,\n        -1.8529885,\n        -2.0642333,\n        -2.340028,\n        -1.9987302,\n        -3.3039026,\n        -2.392308,\n        -2.1998007,\n        -3.9700627,\n        -2.3132365,\n        -1.9336995,\n        -3.8793678,\n        -3.0573602,\n        -3.6135347,\n        -4.379453,\n        -1.9174479,\n        -3.2993047,\n        -3.3862681,\n        -2.5543268,\n        -2.916683,\n        -6.335731,\n        -2.5580328,\n        -2.3565786,\n        -2.3169065,\n        -2.1767182,\n        -2.2160468,\n        -2.853828,\n        -4.616713,\n        -3.1134565,\n        -3.1778045,\n        -2.9624515,\n        -2.313375,\n        -2.166796,\n        -1.996399,\n        -3.6453948,\n        -4.732854,\n        -3.3619401,\n        -2.051775,\n        -2.1180842,\n        -2.8457713,\n        -2.5686061,\n        -3.9750416,\n        -3.261386,\n        -3.2534878,\n        -2.3567991,\n        -3.843067,\n        -3.2058804,\n        -3.351548,\n        -4.033013,\n        -3.2406118,\n        -4.746862,\n        -1.873855,\n        -2.789624,\n        -2.095337,\n        -2.0180707,\n        -5.4625974,\n        -3.1724863,\n        -2.760957\n      ],\n      \"pointIndex\": [\n        2,\n        1238,\n        256,\n        858419605,\n        469297476,\n        1490599725,\n        1005017321,\n        1605967623,\n        1016732321,\n        374594265,\n        466246996,\n        838168704,\n        252609730,\n        1175134998,\n        25339492,\n        23200487,\n        1619181267,\n        805357786,\n        461366673,\n        633311396,\n        1065483570,\n        787080239,\n        1444081752,\n        1298668912,\n        244844311,\n        1410626199,\n        291709064,\n        1305596026,\n        763341988,\n        342862589,\n        629194162,\n        1611349848,\n        414202777,\n        934582831,\n        1219546,\n        168058702,\n        1328702083,\n        1554074388,\n        1766367113,\n        918632197,\n        1621630829,\n        886727885,\n        1063006415,\n        1128582551,\n        231273008,\n        1488830882,\n        681790444,\n        1700116165,\n        285796646,\n        1137975848,\n        405847511,\n        8924900,\n        1405645472,\n        1738960049,\n        679238652,\n        64002872,\n        666276996,\n        734704128,\n        371354107,\n        1484993525,\n        508819804,\n        1850027246,\n        792953121,\n        1580734430,\n        438885431,\n        95284342,\n        408412746,\n        1577909765,\n        861150228,\n        749262352,\n        961671547,\n        1291606525,\n        1279110026,\n        684142540,\n        187173978,\n        1579637657,\n        227902840,\n        1686808732,\n        1616456420,\n        872611235,\n        866293550,\n        1390806705,\n        1131570308,\n        1076710441,\n        1656938670,\n        1218981380,\n        1514339915,\n        1813021837,\n        1236\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 8772489341801893028\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.0728947,\n        -1.5259328,\n        -1.5271281,\n        -1.5343946,\n        -1.560383,\n        -1.5384734,\n        -1.5312777,\n        -1.5520419,\n        -1.6264254,\n        -1.6279063,\n        -1.5762386,\n        -1.54961,\n        -1.5745214,\n        -1.670223,\n        -1.5665997,\n        -1.6381234,\n        -1.7441238,\n        -1.6393917,\n        -1.6364237,\n        -1.6332041,\n        -1.6408373,\n        -1.6322473,\n        -1.7203869,\n        -1.7163656,\n        -1.7157981,\n        -1.6053427,\n        -1.6213709,\n        -1.6732472,\n        -1.8829713,\n        -1.5831262,\n        -1.5804985,\n        -1.8121722,\n        -1.9040909,\n        -1.855875,\n        -1.7723962,\n        -1.756655,\n        -1.7166331,\n        -1.7561811,\n        -1.7495925,\n        -1.7460877,\n        -1.8043892,\n        -2.0242321,\n        -1.6566799,\n        -1.8006837,\n        -1.7637908,\n        -1.7983268,\n        -1.8492807,\n        -1.938855,\n        -1.8046734,\n        -1.9368708,\n        -1.7219111,\n        -1.647257,\n        -1.8743986,\n        -1.9102929,\n        -1.6376833,\n        -1.8193051,\n        -1.7348002,\n        -1.9335852,\n        -1.8950299,\n        -1.6522033,\n        -1.8217105,\n        -1.8316988,\n        -1.583854,\n        -1.8243084,\n        -1.9072149,\n        -2.0794513,\n        -1.9411082,\n        -2.0080197,\n        -2.3706264,\n        -3.8453596,\n        -1.9341033,\n        -1.7909273,\n        -2.22557,\n        -1.8950368,\n        -1.8222072,\n        -2.0923498,\n        -1.9481359,\n        -1.9054334,\n        -1.8847692,\n        -1.9231081,\n        -1.8395206,\n        -1.816015,\n        -2.1411002,\n        -2.6224537,\n        -2.1703074,\n        -1.8760118,\n        -1.7247542,\n        -2.1039886,\n        -1.941013,\n        -2.1875296,\n        -2.6584635,\n        -2.4773474,\n        -2.0004907,\n        -2.427941,\n        -1.8578634,\n        -1.993672,\n        -1.9411371,\n        -2.4460554,\n        -2.4973977,\n        -2.587589,\n        -2.143168,\n        -1.9661105,\n        -1.8435761,\n        -1.7624182,\n        -1.8321925,\n        -2.2877474,\n        -1.8888198,\n        -1.9872756,\n        -2.3712525,\n        -2.053333,\n        -2.304413,\n        -1.8495423,\n        -1.8473682,\n        -1.7911122,\n        -2.255487,\n        -1.9753639,\n        -2.8610258,\n        -2.6455243,\n        -1.9160482,\n        -2.0074358,\n        -1.7680875,\n        -2.0996945,\n        -2.1236439,\n        -3.1808512,\n        -2.3633845,\n        -2.0920115,\n        -1.5991555,\n        -2.0979178,\n        -3.580935,\n        -3.3564754,\n        -2.1720605,\n        -3.337978,\n        -2.8830314,\n        -2.6280017,\n        -2.653904,\n        -2.2192788,\n        -2.0428448,\n        -4.37789,\n        -3.6127176,\n        -3.9183333,\n        -4.027198,\n        -2.2029734,\n        -2.4479206,\n        -4.0643425,\n        -2.4019227,\n        -2.8332531,\n        -3.725749,\n        -2.8588874,\n        -2.51254,\n        -2.4987812,\n        -3.2159305,\n        -5.0033402,\n        -3.5965152,\n        -2.3439965,\n        -5.258561,\n        -2.116983,\n        -2.1509147,\n        -2.1029966,\n        -2.2929664,\n        -2.0077374,\n        -2.858487,\n        -2.585806,\n        -5.1803718,\n        -2.1305256,\n        -2.8780572,\n        -3.130301,\n        -3.3613229,\n        -3.407688,\n        -2.8195002,\n        -3.5945868,\n        -2.1957138,\n        -2.7049334,\n        -3.6004672,\n        -3.220083,\n        -5.7187304,\n        -2.3223257,\n        -4.639869,\n        -2.3436725,\n        -2.6810954,\n        -3.5382938,\n        -2.8885171,\n        -5.0835733,\n        -3.452059,\n        -2.9016135,\n        -4.5785546,\n        -2.4024642,\n        -2.6227605,\n        -5.620513,\n        -2.71551,\n        -1.8989049,\n        -2.6357558,\n        -2.3210332,\n        -2.2360513,\n        -2.0786529,\n        -2.1857975,\n        -4.588789,\n        -2.746207,\n        -2.5993383,\n        -3.357406,\n        -3.7500455,\n        -4.308708,\n        -6.10445,\n        -2.2139173,\n        -3.3152285,\n        -3.428218,\n        -2.914128,\n        -2.5067766,\n        -3.2157886,\n        -1.8737192,\n        -2.5115092,\n        -1.9986128,\n        -2.4794385,\n        -4.047551,\n        -2.1282003,\n        -2.304178,\n        -2.4129395,\n        -3.613385,\n        -3.106013,\n        -2.9582293,\n        -4.256424,\n        -2.186439,\n        -2.4176476,\n        -2.6717122,\n        -2.3630428,\n        -2.2466805,\n        -2.6661818,\n        -1.8820276,\n        -2.5826948,\n        -2.4682412,\n        -2.6981506,\n        -3.0258257,\n        -2.1245618,\n        -3.3690262,\n        -3.0876715,\n        -4.162996,\n        -4.5142965,\n        -3.7879217,\n        -2.9665167,\n        -2.791171,\n        -3.5369656,\n        -2.3917305,\n        -2.2554045,\n        -2.6315799,\n        -2.3195806,\n        -2.5932949,\n        -2.5324724,\n        -2.643968,\n        -3.478174,\n        -3.8480647,\n        -4.838305,\n        -4.7640142,\n        -5.7706485,\n        -3.7859595,\n        -1.890822,\n        -3.0025935,\n        -6.5981126\n      ],\n      \"pointIndex\": [\n        0,\n        1255,\n        256,\n        451332295,\n        476844765,\n        353449948,\n        560430769,\n        524584,\n        316601115,\n        394867709,\n        1089903807,\n        681991040,\n        1250248753,\n        1465322267,\n        1221666542,\n        1459131120,\n        1521923180,\n        158727042,\n        493217101,\n        1954551683,\n        1608806476,\n        816219709,\n        739309430,\n        1924093989,\n        586175080,\n        1083141184,\n        1514287099,\n        383171479,\n        335468273,\n        353301328,\n        1087214337,\n        1155612821,\n        422631183,\n        1753804395,\n        473325151,\n        699257600,\n        555443726,\n        1751772757,\n        869201381,\n        787347922,\n        1146538503,\n        1656581780,\n        596168698,\n        1687312381,\n        1815453422,\n        101975884,\n        95255200,\n        954426855,\n        809120055,\n        1809655805,\n        119992139,\n        1700763601,\n        559408775,\n        912305271,\n        932752032,\n        1105958944,\n        649154500,\n        717035920,\n        98100460,\n        310978596,\n        147069943,\n        1203687429,\n        1553807374,\n        1786111910,\n        1834199016,\n        1235945729,\n        482756131,\n        500599205,\n        516810663,\n        936616447,\n        1778816992,\n        587427863,\n        185535326,\n        1799358676,\n        1004637632,\n        200082407,\n        746739784,\n        1457045496,\n        804396910,\n        840863913,\n        858856371,\n        916721559,\n        1447419325,\n        1154031319,\n        1679446170,\n        1585138145,\n        1452617747,\n        1976564591,\n        153\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 5684369017740460239\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4911071,\n        -1.4948497,\n        -1.4967381,\n        -1.5310053,\n        -1.5021715,\n        -1.4987854,\n        -1.5207069,\n        -1.5417862,\n        -1.6222419,\n        -1.5158104,\n        -1.575677,\n        -1.5464264,\n        -1.5918624,\n        -1.6376232,\n        -1.5584885,\n        -1.5438135,\n        -1.7702165,\n        -1.7226433,\n        -1.669732,\n        -1.6314669,\n        -1.5866266,\n        -1.6306599,\n        -1.5853622,\n        -1.6076901,\n        -1.7002006,\n        -1.7073035,\n        -1.6342174,\n        -1.733735,\n        -1.7246922,\n        -1.5689619,\n        -1.6232954,\n        -2.2763486,\n        -1.5731705,\n        -1.7965978,\n        -1.9153309,\n        -2.4673932,\n        -1.9342862,\n        -1.6788498,\n        -1.8781089,\n        -1.7806269,\n        -1.9041122,\n        -1.6793627,\n        -1.6082995,\n        -1.6951773,\n        -1.6438974,\n        -1.6304628,\n        -1.8834292,\n        -1.7857707,\n        -1.8056086,\n        -1.7213256,\n        -1.8343105,\n        -1.7301867,\n        -1.9566417,\n        -2.0860317,\n        -1.8436301,\n        -1.7512348,\n        -2.2028763,\n        -1.982251,\n        -1.7824874,\n        -1.5767955,\n        -2.0471509,\n        -1.7501755,\n        -1.810705,\n        -2.4892054,\n        -2.5443416,\n        -1.584471,\n        -1.8527033,\n        -2.0867581,\n        -1.9452515,\n        -2.5781212,\n        -2.4684212,\n        -3.2122548,\n        -2.4690588,\n        -1.9902788,\n        -2.1398559,\n        -2.1078768,\n        -3.1068273,\n        -2.477722,\n        -2.173951,\n        -1.9940237,\n        -2.3296304,\n        -2.3379073,\n        -1.9337028,\n        -1.7934093,\n        -1.7880102,\n        -2.239576,\n        -1.7530417,\n        -1.8559335,\n        -1.7293302,\n        -1.6868128,\n        -1.8564905,\n        -1.7350532,\n        -1.7223151,\n        -2.2533605,\n        -2.07792,\n        -1.903341,\n        -1.8106148,\n        -1.9080479,\n        -1.8621694,\n        -1.7881052,\n        -1.7325954,\n        -2.2205815,\n        -2.3311322,\n        -2.471289,\n        -2.7758646,\n        -2.4677973,\n        -2.3256052,\n        -2.9003494,\n        -2.2866623,\n        -2.0297635,\n        -1.9258659,\n        -1.8620938,\n        -2.0275686,\n        -2.7264936,\n        -2.297328,\n        -2.0284185,\n        -2.2965612,\n        -2.2951462,\n        -2.0015132,\n        -2.1415493,\n        -2.186847,\n        -2.1374779,\n        -2.862179,\n        -2.3183577,\n        -1.7961106,\n        -2.3331249,\n        -1.8480538,\n        -2.5384324,\n        -5.1544867,\n        -3.026151,\n        -5.105571,\n        -3.2670376,\n        -3.3749888,\n        -3.138002,\n        -3.2801085,\n        -2.3727238,\n        -2.2907364,\n        -3.2307806,\n        -3.6300344,\n        -2.747922,\n        -2.8748686,\n        -3.0952058,\n        -2.580954,\n        -4.8073583,\n        -3.73439,\n        -2.5728266,\n        -3.479313,\n        -2.0984015,\n        -4.709388,\n        -2.9657722,\n        -2.2065113,\n        -2.3346984,\n        -2.6162956,\n        -3.309454,\n        -3.9598234,\n        -2.563438,\n        -5.2532086,\n        -3.0477846,\n        -2.2498612,\n        -3.195835,\n        -2.1837943,\n        -2.565356,\n        -2.3919415,\n        -4.014871,\n        -2.4170105,\n        -3.0006533,\n        -2.1708453,\n        -2.0211153,\n        -4.1956906,\n        -3.5515172,\n        -4.3578415,\n        -2.9570305,\n        -4.4781837,\n        -2.6243632,\n        -2.7781944,\n        -2.0778558,\n        -2.5789433,\n        -2.71087,\n        -3.7467637,\n        -2.1903725,\n        -2.521412,\n        -2.07979,\n        -1.9037058,\n        -2.5047266,\n        -2.299304,\n        -2.8141892,\n        -2.1038964,\n        -2.707932,\n        -3.5448616,\n        -3.4673579,\n        -2.5393233,\n        -2.2156534,\n        -3.419818,\n        -1.8480599,\n        -2.6456294,\n        -1.9716583,\n        -2.0067434,\n        -2.6639864,\n        -3.4829478,\n        -2.005168,\n        -2.0726898,\n        -3.549752,\n        -2.0862799,\n        -2.6057477,\n        -3.413592,\n        -4.0227575,\n        -2.3710632,\n        -2.8646724,\n        -5.5461245,\n        -4.6805344,\n        -5.1303153,\n        -2.591475,\n        -2.4750278,\n        -3.6639812,\n        -2.8366256,\n        -4.1023946,\n        -2.9231687,\n        -2.5331335,\n        -2.953019,\n        -2.6595,\n        -2.183007,\n        -2.2965136,\n        -2.019997,\n        -4.1200275,\n        -3.0445871,\n        -2.3739119,\n        -3.5416205,\n        -6.0304656,\n        -2.7609074,\n        -3.1862056,\n        -3.6426349,\n        -2.0630581,\n        -2.0421371,\n        -2.590611,\n        -3.177964,\n        -4.961881,\n        -4.281495,\n        -2.7978115,\n        -7.188777,\n        -2.5101128,\n        -2.6189158,\n        -2.3621762,\n        -4.326628,\n        -3.0647418,\n        -2.8780253,\n        -4.0096097,\n        -5.4045615,\n        -4.073947,\n        -2.9886022,\n        -2.6005065,\n        -2.0170605,\n        -4.1553526,\n        -2.8099904,\n        -4.852867,\n        -3.1955338\n      ],\n      \"pointIndex\": [\n        0,\n        1256,\n        255,\n        1125197114,\n        82857922,\n        1461216781,\n        1048294166,\n        1233241938,\n        1072651067,\n        1396385587,\n        379592286,\n        817359583,\n        1161848244,\n        1080687724,\n        910355443,\n        1066392929,\n        947891366,\n        472461480,\n        509175965,\n        1695562823,\n        628293117,\n        205968688,\n        1283772976,\n        1904959238,\n        1848038017,\n        1962492536,\n        926798269,\n        417231182,\n        1726165423,\n        1306473602,\n        145177136,\n        1697987296,\n        465815333,\n        594818656,\n        1629364985,\n        1574606,\n        950373332,\n        1814719573,\n        629547692,\n        841675527,\n        1435863509,\n        798550530,\n        1121046315,\n        1580042310,\n        1723551849,\n        267455636,\n        1428579383,\n        464360136,\n        1756191336,\n        303660858,\n        1753359406,\n        1561260355,\n        1049502988,\n        325953841,\n        337209313,\n        356530729,\n        138208649,\n        381091212,\n        413443146,\n        1974615274,\n        869727989,\n        1209973784,\n        71686395,\n        1348683888,\n        739318597,\n        501851431,\n        1803830512,\n        102456874,\n        1575876331,\n        1069757925,\n        1894928579,\n        1064803339,\n        1116966482,\n        1918248381,\n        619871104,\n        1534741660,\n        687015658,\n        1495284408,\n        1537003602,\n        770132244,\n        745400391,\n        858751993,\n        546446138,\n        1538264577,\n        1184394218,\n        1581041753,\n        1498943238,\n        1986067271\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -5961011759105000572\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6237711,\n        -1.628153,\n        -1.6416693,\n        -1.6389134,\n        -1.6589046,\n        -1.6476755,\n        -1.696056,\n        -1.696213,\n        -1.6725714,\n        -1.6640321,\n        -1.6763895,\n        -1.6636418,\n        -1.6921222,\n        -1.6966518,\n        -1.7628441,\n        -1.7269291,\n        -1.6983651,\n        -1.6831176,\n        -1.6787539,\n        -1.6936954,\n        -1.7841918,\n        -1.74626,\n        -1.7725044,\n        -1.6873757,\n        -1.7203944,\n        -1.8625002,\n        -1.8564576,\n        -1.73292,\n        -1.8435144,\n        -1.855701,\n        -1.7792147,\n        -1.8557831,\n        -1.8105886,\n        -1.9192698,\n        -1.7947007,\n        -2.216442,\n        -1.8588217,\n        -2.415632,\n        -1.8353658,\n        -1.7428002,\n        -1.8348613,\n        -2.0291662,\n        -1.9624325,\n        -2.0259523,\n        -1.9174504,\n        -1.986738,\n        -2.2747478,\n        -1.8286602,\n        -1.8884032,\n        -1.9634504,\n        -1.7983656,\n        -1.8889049,\n        -1.9644171,\n        -1.9637636,\n        -2.1408167,\n        -1.7981789,\n        -2.1016233,\n        -2.00415,\n        -2.0105994,\n        -2.1065261,\n        -2.141287,\n        -1.9218062,\n        -1.9633807,\n        -1.8852696,\n        -2.7141705,\n        -1.8775206,\n        -2.0123956,\n        -2.3604717,\n        -2.3413675,\n        -2.055255,\n        -2.4611597,\n        -2.3919573,\n        -2.874939,\n        -1.9921714,\n        -1.8891987,\n        -2.7240236,\n        -2.5091233,\n        -1.8974257,\n        -2.3336713,\n        -2.04924,\n        -1.8874867,\n        -2.400239,\n        -2.0225604,\n        -3.2573667,\n        -2.5920815,\n        -2.237946,\n        -2.6125925,\n        -2.4049811,\n        -2.571699,\n        -2.3181803,\n        -3.2518358,\n        -2.529661,\n        -2.347575,\n        -2.3412488,\n        -2.302599,\n        -1.8972478,\n        -2.1023102,\n        -2.2392087,\n        -3.5990577,\n        -2.2514434,\n        -1.9948503,\n        -1.9095303,\n        -1.8573929,\n        -2.6482105,\n        -1.9383067,\n        -2.068555,\n        -2.6944559,\n        -2.0890465,\n        -2.3229322,\n        -2.2482884,\n        -2.4190507,\n        -1.9795253,\n        -2.1934295,\n        -3.05396,\n        -2.646185,\n        -2.3653991,\n        -3.164765,\n        -3.1067479,\n        -2.0248604,\n        -2.127616,\n        -2.3515325,\n        -2.2254272,\n        -2.1740985,\n        -2.2719512,\n        -2.085722,\n        -2.4028237,\n        -2.0253525,\n        -1.9308548,\n        -2.6048572,\n        -4.0182304,\n        -4.4086246,\n        -1.9217116,\n        -3.025395,\n        -3.6621032,\n        -2.9816504,\n        -4.321547,\n        -2.4466884,\n        -3.1595805,\n        -2.6285114,\n        -2.931134,\n        -2.7492023,\n        -3.6441271,\n        -3.034636,\n        -2.4775612,\n        -2.9013302,\n        -4.2080584,\n        -4.958086,\n        -2.3812592,\n        -2.7652755,\n        -3.1284442,\n        -4.9177685,\n        -4.059228,\n        -3.0657902,\n        -3.2866814,\n        -2.5140004,\n        -3.395545,\n        -3.394022,\n        -4.1792674,\n        -5.2798567,\n        -2.3089683,\n        -2.8747897,\n        -3.5250952,\n        -4.1287413,\n        -2.963254,\n        -3.250532,\n        -2.1863542,\n        -2.416417,\n        -3.6281595,\n        -3.6139681,\n        -4.0049005,\n        -3.2939267,\n        -2.3069274,\n        -2.6169567,\n        -3.554352,\n        -3.1421096,\n        -4.0918937,\n        -2.8538375,\n        -2.9486341,\n        -4.11142,\n        -2.662579,\n        -3.101394,\n        -4.9599957,\n        -3.5882397,\n        -2.598755,\n        -5.9695992,\n        -2.760794,\n        -2.4963899,\n        -2.5378249,\n        -5.551288,\n        -2.7500703,\n        -2.7607353,\n        -1.9182537,\n        -3.4631934,\n        -2.3276725,\n        -5.1479826,\n        -2.8500469,\n        -4.8279233,\n        -4.042882,\n        -3.806317,\n        -3.0019336,\n        -3.7066808,\n        -2.7486873,\n        -5.8026333,\n        -4.1605725,\n        -2.3630943,\n        -2.1956322,\n        -4.7438784,\n        -3.8152585,\n        -3.0708437,\n        -2.610038,\n        -3.1883404,\n        -4.153042,\n        -3.6384819,\n        -4.625368,\n        -4.6413755,\n        -5.181517,\n        -2.506795,\n        -2.6399064,\n        -2.368988,\n        -2.6294613,\n        -2.5832407,\n        -5.06332,\n        -3.4438636,\n        -2.2855315,\n        -2.087224,\n        -2.5356748,\n        -3.0395882,\n        -4.248704,\n        -4.914877,\n        -3.9012988,\n        -3.2828252,\n        -3.193225,\n        -5.193056,\n        -5.249597,\n        -3.2919266,\n        -3.3869119,\n        -5.7165804,\n        -2.650207,\n        -2.1123464,\n        -2.431872,\n        -2.584526,\n        -4.189752,\n        -2.4254708,\n        -2.8907857,\n        -2.560145,\n        -3.9329863,\n        -2.6766164,\n        -4.6441774,\n        -2.7055318,\n        -6.3359137,\n        -3.4322157,\n        -2.986393,\n        -4.379887,\n        -5.3462477,\n        -2.6393416,\n        -3.140426\n      ],\n      \"pointIndex\": [\n        9,\n        1253,\n        256,\n        961539270,\n        1673099742,\n        292864876,\n        93406520,\n        1605143282,\n        1130920593,\n        1312898789,\n        882579986,\n        749347743,\n        1075064541,\n        838538357,\n        272853188,\n        548798662,\n        355454806,\n        232841713,\n        1432004312,\n        469242221,\n        442134772,\n        1150795113,\n        995911589,\n        1768180843,\n        756759527,\n        261221975,\n        95213649,\n        996738658,\n        1559270182,\n        1209279388,\n        358920143,\n        388999972,\n        605928230,\n        1345802258,\n        517882258,\n        1419006519,\n        1559783413,\n        1301851733,\n        1008879669,\n        1786576935,\n        1060918451,\n        1374777785,\n        1772599360,\n        1137569967,\n        1585666898,\n        725303212,\n        239785903,\n        310201483,\n        6508745,\n        537940759,\n        1873686440,\n        379902131,\n        341355727,\n        1617006820,\n        312920987,\n        1650707723,\n        954643590,\n        911482995,\n        50904291,\n        1655019542,\n        1366056569,\n        1708581567,\n        438009943,\n        598454845,\n        211496435,\n        1199696213,\n        1754048226,\n        1684998472,\n        928944341,\n        566199032,\n        197470124,\n        878762558,\n        472550127,\n        681922101,\n        30036178,\n        1808191772,\n        1002679164,\n        960291440,\n        1123215918,\n        908044003,\n        943742961,\n        985172922,\n        1522888374,\n        1286150540,\n        1527864856,\n        1244078233,\n        1450403722,\n        1796263298,\n        1244\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 9095889982550489397\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5965683,\n        -1.605474,\n        -1.5998874,\n        -1.6115742,\n        -1.6243737,\n        -1.6085265,\n        -1.6020123,\n        -1.634418,\n        -1.6425602,\n        -1.6400524,\n        -1.629464,\n        -1.6297138,\n        -1.6337343,\n        -1.6033086,\n        -1.6097484,\n        -1.6624259,\n        -1.6742873,\n        -1.7130513,\n        -1.7128102,\n        -1.6477957,\n        -1.670843,\n        -1.6317906,\n        -1.6782671,\n        -1.6773082,\n        -1.8203907,\n        -1.7286334,\n        -1.806726,\n        -1.6300648,\n        -1.7287886,\n        -1.6415924,\n        -1.6448572,\n        -1.6650454,\n        -1.7876563,\n        -1.7708933,\n        -1.7046733,\n        -1.9888271,\n        -1.972927,\n        -1.8556824,\n        -1.9030229,\n        -1.9477214,\n        -1.8496878,\n        -1.6738344,\n        -1.8104644,\n        -1.6836578,\n        -1.6649296,\n        -1.7982227,\n        -1.7100314,\n        -1.8672014,\n        -1.687297,\n        -2.4574585,\n        -1.9116623,\n        -1.7777321,\n        -1.7388297,\n        -2.0904682,\n        -2.171662,\n        -2.0474243,\n        -2.047354,\n        -1.8827441,\n        -1.7973624,\n        -1.6882379,\n        -1.7449267,\n        -1.6694207,\n        -1.8786082,\n        -1.685836,\n        -2.4038177,\n        -3.2515216,\n        -1.8661153,\n        -1.903327,\n        -2.5530834,\n        -2.1660123,\n        -2.7789965,\n        -2.1661847,\n        -2.5386386,\n        -2.577987,\n        -2.0202804,\n        -2.6948571,\n        -2.0975728,\n        -2.4518397,\n        -2.322574,\n        -2.0369956,\n        -2.3269794,\n        -1.980619,\n        -1.8654743,\n        -2.132766,\n        -1.7511171,\n        -2.1872008,\n        -1.899768,\n        -1.8329855,\n        -2.0645735,\n        -1.793986,\n        -1.9067699,\n        -2.3587954,\n        -1.9459684,\n        -1.7774417,\n        -1.7305173,\n        -2.106843,\n        -2.2295704,\n        -1.7658864,\n        -1.8917775,\n        -2.6521816,\n        -3.1080632,\n        -2.6481214,\n        -1.9151706,\n        -1.9945921,\n        -1.8536875,\n        -2.0041533,\n        -1.8389709,\n        -2.201991,\n        -2.526123,\n        -2.8499606,\n        -2.2844756,\n        -2.0961516,\n        -2.7727692,\n        -2.337646,\n        -2.2693615,\n        -2.6762223,\n        -1.98051,\n        -1.9966611,\n        -2.4551325,\n        -2.1908934,\n        -1.6984668,\n        -2.9126124,\n        -1.754821,\n        -1.8608118,\n        -1.778809,\n        -2.1995912,\n        -1.8802121,\n        -1.754069,\n        -1.9750528,\n        -3.3643022,\n        -3.0671022,\n        -5.499051,\n        -6.351326,\n        -4.5714335,\n        -2.0859892,\n        -2.5843184,\n        -2.7171652,\n        -3.3848102,\n        -2.7166994,\n        -2.4720547,\n        -2.4142833,\n        -4.8606153,\n        -4.5415883,\n        -3.3840468,\n        -2.6464822,\n        -3.5753329,\n        -3.2476146,\n        -2.6213663,\n        -3.1917949,\n        -2.6942282,\n        -2.42582,\n        -3.2648609,\n        -3.1270168,\n        -2.307064,\n        -4.374364,\n        -3.885179,\n        -3.0397565,\n        -2.5333095,\n        -2.4329717,\n        -2.2332306,\n        -2.294948,\n        -3.1335053,\n        -4.5621243,\n        -3.0722396,\n        -2.3758736,\n        -5.0332875,\n        -2.6144414,\n        -3.4925108,\n        -3.4594991,\n        -2.6730094,\n        -1.995602,\n        -5.004476,\n        -2.4933708,\n        -1.9064989,\n        -3.145163,\n        -2.6776807,\n        -2.5378213,\n        -2.7086794,\n        -3.2757256,\n        -1.8860871,\n        -2.3013015,\n        -2.6716664,\n        -3.6417656,\n        -2.7129445,\n        -3.0108495,\n        -2.7780106,\n        -2.0601518,\n        -2.9248378,\n        -2.9490836,\n        -2.2054753,\n        -3.5714936,\n        -3.9003718,\n        -2.609097,\n        -2.988914,\n        -2.3022199,\n        -3.5478654,\n        -3.9104757,\n        -2.8510497,\n        -2.9600012,\n        -3.0319679,\n        -5.06519,\n        -4.875351,\n        -4.142768,\n        -5.731296,\n        -3.408339,\n        -6.5622587,\n        -2.80811,\n        -3.0746014,\n        -3.469099,\n        -4.501739,\n        -2.066536,\n        -2.7396262,\n        -3.2183354,\n        -2.774808,\n        -2.3921945,\n        -4.8227572,\n        -2.8792684,\n        -3.2473035,\n        -2.8974328,\n        -4.7501125,\n        -3.4837053,\n        -2.9300468,\n        -2.5738401,\n        -2.8842216,\n        -3.1261454,\n        -4.581744,\n        -2.9203901,\n        -2.6495404,\n        -3.342783,\n        -2.4231517,\n        -3.1346366,\n        -3.2866194,\n        -3.8044457,\n        -3.9290097,\n        -2.220442,\n        -3.2857993,\n        -2.317145,\n        -3.182098,\n        -2.5939918,\n        -2.685288,\n        -2.73855,\n        -2.1080241,\n        -2.4743004,\n        -4.901883,\n        -4.4570746,\n        -2.675061,\n        -2.5903609,\n        -2.8296156,\n        -3.7289162,\n        -2.9241652,\n        -3.2727346,\n        -4.1241984,\n        -3.8632848,\n        -2.914715,\n        -2.1849167,\n        -2.0690107\n      ],\n      \"pointIndex\": [\n        2,\n        1256,\n        256,\n        1503582068,\n        559482190,\n        1337102976,\n        562729286,\n        1739611809,\n        967291313,\n        425541850,\n        871408859,\n        1146672822,\n        1122805082,\n        395320915,\n        24473670,\n        630240406,\n        400611615,\n        148617549,\n        867539900,\n        810810247,\n        751919595,\n        1609394900,\n        1136416181,\n        1929048211,\n        612684984,\n        1527301945,\n        1577898435,\n        305795120,\n        1880831748,\n        367138434,\n        22172056,\n        1874272017,\n        1304492081,\n        505980363,\n        1231071935,\n        139656808,\n        961302118,\n        822970169,\n        1666978862,\n        173863570,\n        1554254010,\n        1236234523,\n        1635648513,\n        1492888839,\n        1732013932,\n        694525718,\n        872762421,\n        455204267,\n        1773693362,\n        781273967,\n        1676485434,\n        63093379,\n        174927860,\n        312100965,\n        330474076,\n        637861492,\n        555140684,\n        1812290006,\n        1636756093,\n        1445119631,\n        1836815181,\n        456820272,\n        474696245,\n        491235964,\n        502555223,\n        1940258051,\n        530706178,\n        1489639818,\n        523814770,\n        586608468,\n        76358020,\n        653663382,\n        1327743328,\n        1851203310,\n        1673754727,\n        1523023867,\n        857828327,\n        1941160626,\n        830782756,\n        1601969504,\n        1845615641,\n        1401517138,\n        1068653717,\n        1111281952,\n        1140141877,\n        1971318335,\n        1757514124,\n        1743241112,\n        1254\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -4568414323862123324\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6138266,\n        -1.6200708,\n        -1.614556,\n        -1.6233939,\n        -1.6260647,\n        -1.6201645,\n        -1.6304435,\n        -1.669478,\n        -1.6389856,\n        -1.651674,\n        -1.6661147,\n        -1.6698132,\n        -1.6513977,\n        -1.6688465,\n        -1.6691176,\n        -1.7950977,\n        -1.7260531,\n        -1.7063082,\n        -1.6564782,\n        -1.669986,\n        -1.903447,\n        -1.6774566,\n        -1.7375205,\n        -1.9316454,\n        -1.7064815,\n        -1.7414997,\n        -1.6840204,\n        -1.9541612,\n        -1.9007772,\n        -2.018441,\n        -1.7255052,\n        -1.8393124,\n        -2.0276234,\n        -1.8696972,\n        -1.8448665,\n        -1.7897143,\n        -1.7640897,\n        -1.8238368,\n        -1.8092318,\n        -1.8617193,\n        -1.7547783,\n        -2.0482876,\n        -2.110692,\n        -1.7365395,\n        -1.7024146,\n        -2.3914833,\n        -1.850182,\n        -2.068951,\n        -2.242601,\n        -1.7162504,\n        -1.78647,\n        -1.7638264,\n        -2.090236,\n        -1.7976038,\n        -1.7387117,\n        -2.1137908,\n        -2.472179,\n        -2.1319265,\n        -2.2775311,\n        -2.038422,\n        -2.1415591,\n        -2.094432,\n        -1.726505,\n        -2.575303,\n        -1.9282467,\n        -2.8848033,\n        -2.4613793,\n        -1.9956578,\n        -1.9995972,\n        -2.0498257,\n        -1.8999822,\n        -1.968389,\n        -2.1845083,\n        -1.998322,\n        -2.3053951,\n        -2.185676,\n        -1.8633871,\n        -2.577086,\n        -2.3613966,\n        -1.8704422,\n        -2.3792217,\n        -1.9110062,\n        -2.2505798,\n        -2.1216521,\n        -2.0548737,\n        -2.2406547,\n        -2.1248424,\n        -2.1456785,\n        -2.1103835,\n        -2.0191789,\n        -3.127058,\n        -3.0576043,\n        -2.6811328,\n        -1.8934972,\n        -1.8617606,\n        -2.2808363,\n        -2.4195814,\n        -2.9486563,\n        -2.5926979,\n        -1.9886698,\n        -2.084278,\n        -2.182579,\n        -1.8508092,\n        -2.1939347,\n        -2.5398748,\n        -2.717566,\n        -2.4137757,\n        -1.8228891,\n        -3.6212134,\n        -1.7930357,\n        -2.044137,\n        -2.839537,\n        -2.3011444,\n        -2.7147546,\n        -3.0784369,\n        -2.3831677,\n        -3.5555866,\n        -2.469817,\n        -2.3973162,\n        -2.2536638,\n        -2.0598986,\n        -2.2986326,\n        -2.5735557,\n        -2.1581638,\n        -2.221038,\n        -2.3564098,\n        -1.9919771,\n        -5.206828,\n        -3.0944185,\n        -2.483788,\n        -2.118705,\n        -3.271962,\n        -8.166502,\n        -5.0891895,\n        -3.2259262,\n        -5.996233,\n        -7.8219643,\n        -5.435928,\n        -4.096331,\n        -2.0845773,\n        -2.3608477,\n        -2.382382,\n        -2.571299,\n        -2.3727942,\n        -2.1107092,\n        -3.9825644,\n        -5.0206003,\n        -2.0619795,\n        -2.4728963,\n        -2.3282006,\n        -6.0645623,\n        -3.7792861,\n        -2.5139346,\n        -2.4748445,\n        -2.5115914,\n        -3.2220566,\n        -4.2849445,\n        -3.2265825,\n        -3.7718532,\n        -4.087799,\n        -2.0863242,\n        -3.5737815,\n        -2.8400545,\n        -2.1448634,\n        -2.0088718,\n        -2.6729875,\n        -4.4135776,\n        -2.8767314,\n        -2.8736422,\n        -2.6131053,\n        -2.6826117,\n        -2.5429437,\n        -3.638273,\n        -2.4608417,\n        -2.7961802,\n        -2.2718759,\n        -2.3121276,\n        -2.6518314,\n        -2.3440542,\n        -2.045319,\n        -4.7715955,\n        -3.9468198,\n        -3.5987012,\n        -3.7612112,\n        -4.406169,\n        -3.0142753,\n        -2.9591682,\n        -2.205534,\n        -1.9212265,\n        -3.6447978,\n        -2.0212262,\n        -3.6987958,\n        -3.2881184,\n        -2.9900627,\n        -3.8477633,\n        -5.032719,\n        -5.106605,\n        -3.327592,\n        -2.7105112,\n        -2.4769652,\n        -2.9611087,\n        -3.0738149,\n        -2.8768263,\n        -7.121635,\n        -3.7444797,\n        -2.0688655,\n        -2.374881,\n        -2.4311557,\n        -2.7882764,\n        -2.6756434,\n        -11.394501,\n        -2.7900372,\n        -2.7363017,\n        -2.440126,\n        -2.477741,\n        -4.5160103,\n        -2.7817466,\n        -4.2179155,\n        -4.096831,\n        -2.6643949,\n        -6.123933,\n        -2.4551187,\n        -3.8375704,\n        -4.0790167,\n        -2.8787398,\n        -5.083254,\n        -3.9775193,\n        -4.593228,\n        -2.8289692,\n        -3.9375958,\n        -6.2126374,\n        -3.015584,\n        -3.2844162,\n        -3.608685,\n        -4.183773,\n        -3.0704916,\n        -4.6877394,\n        -2.875446,\n        -3.779652,\n        -2.4573023,\n        -2.2609415,\n        -2.8825245,\n        -2.9201965,\n        -2.360224,\n        -3.3201356,\n        -4.1180134,\n        -3.2411907,\n        -2.2871995,\n        -3.1855159,\n        -2.308338,\n        -4.0716915,\n        -3.783447,\n        -2.4058626,\n        -2.3971012\n      ],\n      \"pointIndex\": [\n        1,\n        1252,\n        254,\n        274547076,\n        976034967,\n        315587402,\n        1649179100,\n        1843551661,\n        1376484141,\n        180538131,\n        980182133,\n        861182697,\n        1233326548,\n        250222193,\n        576939298,\n        331323215,\n        540501137,\n        1394344577,\n        1834460469,\n        276498685,\n        1039219316,\n        1437267118,\n        1216370238,\n        1904011025,\n        1224188161,\n        268514160,\n        666171480,\n        1477003370,\n        1592529095,\n        13868853,\n        1195328452,\n        424364948,\n        68286543,\n        1021973364,\n        1840717453,\n        1628669221,\n        1482849275,\n        685094737,\n        742769426,\n        1260848491,\n        1258393488,\n        1697048226,\n        1027507217,\n        1381884431,\n        1786484634,\n        1487562547,\n        1617412411,\n        266446638,\n        1450798852,\n        1932821008,\n        1644546020,\n        1238420991,\n        1122575941,\n        1858877151,\n        1752517175,\n        1085366049,\n        549552616,\n        772172480,\n        1700395270,\n        362405413,\n        32350723,\n        80062926,\n        1507765598,\n        1832014160,\n        871120223,\n        1037491377,\n        167472094,\n        504934741,\n        177951359,\n        500670106,\n        971261282,\n        1275056365,\n        588879903,\n        1924589827,\n        768038482,\n        687266305,\n        1938893689,\n        227921150,\n        1467022746,\n        1039928366,\n        1280589980,\n        1491414349,\n        1019670730,\n        1447688018,\n        658286145,\n        1821877343,\n        1523321737,\n        1563638\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -7769275059528997346\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.467028,\n        -1.4670975,\n        -1.4674501,\n        -1.4778205,\n        -1.4689364,\n        -1.4841985,\n        -1.4681206,\n        -1.4894453,\n        -1.5780511,\n        -1.4848005,\n        -1.5843028,\n        -1.4873016,\n        -1.5212706,\n        -1.477742,\n        -1.4705982,\n        -1.528797,\n        -1.6605811,\n        -1.5975436,\n        -1.6015737,\n        -1.518848,\n        -1.4887893,\n        -1.8078047,\n        -1.6950811,\n        -1.4955248,\n        -1.5334237,\n        -1.5501573,\n        -1.653275,\n        -1.7648107,\n        -1.7915306,\n        -1.4838738,\n        -2.0933008,\n        -1.5549726,\n        -1.5799541,\n        -1.8778653,\n        -2.0852897,\n        -1.598548,\n        -1.6157726,\n        -1.6360085,\n        -1.6091611,\n        -1.5994772,\n        -1.7040111,\n        -1.5370618,\n        -1.5624154,\n        -1.930429,\n        -2.0199976,\n        -1.7245322,\n        -1.7301582,\n        -1.7585945,\n        -1.5455968,\n        -1.7815795,\n        -1.6114988,\n        -1.741401,\n        -1.6412951,\n        -1.7372777,\n        -1.6953312,\n        -1.9503698,\n        -1.8603348,\n        -1.8601714,\n        -1.8995856,\n        -1.7357519,\n        -1.6103076,\n        -2.1440177,\n        -2.3009224,\n        -1.7057087,\n        -1.5696069,\n        -1.8150089,\n        -3.4212294,\n        -2.101055,\n        -2.87118,\n        -2.098325,\n        -2.436333,\n        -1.628527,\n        -2.0695157,\n        -1.7023493,\n        -1.9100912,\n        -1.6787161,\n        -1.6959999,\n        -1.923348,\n        -1.7236828,\n        -1.7092505,\n        -2.1551113,\n        -1.8813591,\n        -2.2988782,\n        -2.1261275,\n        -1.6131617,\n        -1.8154691,\n        -1.8053954,\n        -2.2895265,\n        -2.074795,\n        -2.494754,\n        -3.2932467,\n        -1.8201196,\n        -1.7812427,\n        -1.9736366,\n        -2.131185,\n        -2.472943,\n        -2.473403,\n        -1.7512174,\n        -1.6387186,\n        -2.4311333,\n        -2.6189053,\n        -1.7664279,\n        -2.0674803,\n        -1.8106017,\n        -1.9222755,\n        -1.7646991,\n        -1.8440909,\n        -2.0587785,\n        -2.2700589,\n        -2.2341561,\n        -1.8995575,\n        -2.1170647,\n        -2.1305034,\n        -1.9458102,\n        -2.2541769,\n        -1.8630131,\n        -2.3247356,\n        -2.011005,\n        -2.0010917,\n        -2.0726094,\n        -1.7903583,\n        -1.9421533,\n        -2.0684798,\n        -2.3165772,\n        -2.2252998,\n        -2.8308153,\n        -2.3763773,\n        -2.0261903,\n        -2.3376548,\n        -1.6005598,\n        -5.1900268,\n        -3.3060582,\n        -1.9668535,\n        -4.4343815,\n        -5.726982,\n        -6.5618143,\n        -7.1521225,\n        -3.9599025,\n        -3.087707,\n        -3.7807233,\n        -2.1584404,\n        -3.3265028,\n        -4.7556677,\n        -2.0542948,\n        -1.7263491,\n        -2.553137,\n        -3.8031938,\n        -2.0881627,\n        -3.6006567,\n        -1.9714303,\n        -4.027929,\n        -2.9679847,\n        -2.084463,\n        -2.5373173,\n        -1.7270668,\n        -3.3460476,\n        -2.2720094,\n        -1.9646698,\n        -1.7665315,\n        -2.1895826,\n        -3.1057694,\n        -2.5851593,\n        -2.3558624,\n        -2.7720058,\n        -4.548722,\n        -2.794429,\n        -2.9128704,\n        -2.2261264,\n        -2.5785084,\n        -2.3850858,\n        -3.0014517,\n        -2.3341982,\n        -2.0401964,\n        -3.7656128,\n        -4.918224,\n        -2.3436055,\n        -3.3159294,\n        -2.8349006,\n        -2.212059,\n        -2.8434274,\n        -3.250644,\n        -6.301172,\n        -3.4591837,\n        -2.829273,\n        -5.1736913,\n        -1.8322675,\n        -2.6087058,\n        -2.7446747,\n        -1.9850545,\n        -2.7042725,\n        -2.2928898,\n        -2.8400059,\n        -7.0028443,\n        -3.1880243,\n        -2.7262745,\n        -2.933164,\n        -2.522319,\n        -5.064997,\n        -2.2153957,\n        -2.8580453,\n        -2.6116455,\n        -2.7077794,\n        -2.875213,\n        -1.8732623,\n        -4.516843,\n        -3.31612,\n        -2.9638295,\n        -2.528539,\n        -1.9243155,\n        -3.2457132,\n        -3.8606894,\n        -4.193019,\n        -1.9437845,\n        -2.818514,\n        -2.0880072,\n        -4.4739275,\n        -2.1363142,\n        -3.224624,\n        -4.357141,\n        -2.5012124,\n        -2.285748,\n        -2.908747,\n        -2.3669345,\n        -2.3116171,\n        -2.3229961,\n        -2.6012707,\n        -2.9103174,\n        -2.409648,\n        -2.921963,\n        -2.7194922,\n        -2.717911,\n        -2.218835,\n        -1.9833001,\n        -2.865098,\n        -2.5878224,\n        -2.833754,\n        -3.606414,\n        -2.3370578,\n        -3.9532104,\n        -2.3224201,\n        -3.205591,\n        -2.004258,\n        -2.3421495,\n        -3.8120599,\n        -2.659517,\n        -4.6359572,\n        -3.6126306,\n        -2.8724058,\n        -3.2156265,\n        -3.7808232,\n        -5.067774,\n        -3.3008513,\n        -4.3762145,\n        -3.2654536,\n        -4.3478055,\n        -3.4660358\n      ],\n      \"pointIndex\": [\n        6,\n        1249,\n        256,\n        795392217,\n        638392962,\n        150061942,\n        1614475866,\n        1147699865,\n        1191819344,\n        279537301,\n        551298788,\n        95573517,\n        1280571584,\n        800982649,\n        797086801,\n        1445793454,\n        146122949,\n        60691280,\n        1321646822,\n        985916881,\n        122025031,\n        1066252937,\n        1678302431,\n        1815055294,\n        1539998588,\n        1060685771,\n        430818523,\n        330705144,\n        449039476,\n        662066961,\n        821265261,\n        397155296,\n        207819166,\n        1853345899,\n        877469758,\n        756263593,\n        1024183649,\n        835997518,\n        1899422496,\n        203404796,\n        1368794025,\n        175599895,\n        1007365797,\n        1669139218,\n        1379846804,\n        1796674342,\n        1791417225,\n        7905622,\n        167346105,\n        1058947104,\n        1858394257,\n        1467292877,\n        298716694,\n        1047781028,\n        1629806720,\n        327779287,\n        294734916,\n        865485941,\n        956674799,\n        388742043,\n        142688661,\n        625743782,\n        438297810,\n        1456649084,\n        1603635718,\n        500480003,\n        512765450,\n        1883280749,\n        1915537434,\n        486385823,\n        17492566,\n        1499864378,\n        1410301699,\n        649940136,\n        673716751,\n        737319243,\n        928071726,\n        1896315503,\n        834726615,\n        55181900,\n        896101042,\n        1209358992,\n        790024189,\n        1033022128,\n        1211074512,\n        1246998788,\n        1531517863,\n        1777879970,\n        1243\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 4541724188348374211\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5965717,\n        -1.6001675,\n        -1.6039135,\n        -1.6013881,\n        -1.6059641,\n        -1.6264149,\n        -1.6109191,\n        -1.6168767,\n        -1.6837949,\n        -1.6154885,\n        -1.6385336,\n        -1.6628644,\n        -1.71156,\n        -1.6524342,\n        -1.6569929,\n        -1.6551793,\n        -1.6284081,\n        -1.7612597,\n        -1.7535372,\n        -1.6980925,\n        -1.8142519,\n        -1.7219537,\n        -1.6645398,\n        -1.717013,\n        -1.6743274,\n        -1.7404604,\n        -1.7513614,\n        -1.7165102,\n        -1.8172222,\n        -1.6647087,\n        -1.6899586,\n        -1.8019162,\n        -1.9604771,\n        -1.7325345,\n        -1.8862405,\n        -1.763366,\n        -1.7779802,\n        -1.7706667,\n        -1.927861,\n        -2.421426,\n        -2.0788026,\n        -1.919324,\n        -1.9871373,\n        -1.7431914,\n        -1.8885114,\n        -1.6989186,\n        -1.7150781,\n        -1.8874427,\n        -1.8246077,\n        -1.6761974,\n        -1.700487,\n        -1.8789678,\n        -1.83192,\n        -1.9647466,\n        -1.9222289,\n        -1.7642535,\n        -1.7872288,\n        -2.122375,\n        -1.8567419,\n        -2.0587893,\n        -1.8710225,\n        -1.7320795,\n        -2.0157423,\n        -1.8617587,\n        -1.9388741,\n        -2.1291158,\n        -2.3377938,\n        -1.8759451,\n        -1.9586152,\n        -2.4294631,\n        -2.0866654,\n        -2.352787,\n        -2.368293,\n        -2.4037426,\n        -1.8538191,\n        -2.3876994,\n        -3.388962,\n        -2.0128324,\n        -2.1810913,\n        -2.4652147,\n        -2.760965,\n        -2.1185374,\n        -2.4552193,\n        -1.9905087,\n        -2.6480858,\n        -2.1137474,\n        -2.64262,\n        -2.3192794,\n        -1.7611603,\n        -2.0723884,\n        -2.1697767,\n        -1.6992674,\n        -1.7801843,\n        -2.0863554,\n        -1.7544373,\n        -2.4301438,\n        -2.3560226,\n        -1.9150974,\n        -2.0139015,\n        -2.266037,\n        -2.0130007,\n        -2.3793597,\n        -2.4544587,\n        -1.9949479,\n        -1.9759467,\n        -1.9641852,\n        -3.6359153,\n        -2.1987228,\n        -2.1116462,\n        -2.1017444,\n        -2.194392,\n        -2.273187,\n        -1.8679112,\n        -2.1140149,\n        -3.5964894,\n        -2.7098334,\n        -3.1281495,\n        -1.8930235,\n        -2.5202293,\n        -2.903093,\n        -2.2792664,\n        -2.310557,\n        -2.0356898,\n        -2.211646,\n        -1.7527771,\n        -2.7364557,\n        -2.0347674,\n        -1.9049851,\n        -2.9417238,\n        -2.533384,\n        -3.1451507,\n        -3.901787,\n        -2.65957,\n        -3.1533628,\n        -4.368806,\n        -2.3373902,\n        -2.8215654,\n        -3.2734756,\n        -3.0295422,\n        -2.7848854,\n        -2.8667374,\n        -2.545905,\n        -2.3083808,\n        -2.7107656,\n        -3.1513333,\n        -7.0312195,\n        -2.4134543,\n        -4.3557734,\n        -2.4205604,\n        -2.0935204,\n        -2.3436384,\n        -3.6850507,\n        -2.594825,\n        -3.7675693,\n        -4.1618886,\n        -2.4946504,\n        -2.3559964,\n        -5.823009,\n        -3.4856424,\n        -3.8307014,\n        -3.9933295,\n        -3.2719202,\n        -2.831582,\n        -2.1212502,\n        -2.7761233,\n        -4.082748,\n        -2.6616712,\n        -2.6365888,\n        -2.0699306,\n        -5.487333,\n        -3.191381,\n        -3.5252602,\n        -2.469596,\n        -3.3871434,\n        -3.2145245,\n        -3.2415304,\n        -2.7563546,\n        -2.6392884,\n        -4.5272846,\n        -2.825336,\n        -3.0379832,\n        -3.686642,\n        -3.3420174,\n        -2.0666325,\n        -4.6929336,\n        -2.4952083,\n        -2.9958043,\n        -4.134448,\n        -2.3739684,\n        -1.9288124,\n        -2.210782,\n        -2.81132,\n        -3.663362,\n        -2.3939419,\n        -2.3698654,\n        -3.599692,\n        -3.1023452,\n        -3.5171332,\n        -3.2203267,\n        -2.2895916,\n        -3.1158347,\n        -2.0357246,\n        -5.3228574,\n        -3.8063614,\n        -4.1641517,\n        -2.4883325,\n        -3.3343816,\n        -2.5615892,\n        -3.0179958,\n        -3.1521022,\n        -2.9317567,\n        -2.186532,\n        -2.6344547,\n        -5.570923,\n        -4.1481714,\n        -2.3711085,\n        -2.6832128,\n        -2.4854255,\n        -3.639394,\n        -4.0815396,\n        -2.7622623,\n        -5.2310658,\n        -2.480021,\n        -3.1463213,\n        -3.8098817,\n        -3.2311997,\n        -3.0973024,\n        -4.378947,\n        -2.1186829,\n        -8.498683,\n        -3.7873533,\n        -2.9777024,\n        -3.6439102,\n        -3.3209803,\n        -4.437536,\n        -2.9741552,\n        -1.9432625,\n        -2.984181,\n        -2.892447,\n        -3.4080548,\n        -4.1191716,\n        -3.8573587,\n        -3.966833,\n        -4.4431806,\n        -2.609818,\n        -2.5466335,\n        -4.1361165,\n        -3.8374505,\n        -4.8389907,\n        -2.4686964,\n        -3.7323036,\n        -3.4660387,\n        -4.2187986,\n        -2.5719957,\n        -2.15731,\n        -2.0255377\n      ],\n      \"pointIndex\": [\n        0,\n        1253,\n        256,\n        1163611739,\n        961304106,\n        517944468,\n        562997821,\n        1727370439,\n        742897309,\n        396628742,\n        511453566,\n        1096347742,\n        1721157150,\n        983949737,\n        430285,\n        168060032,\n        140638341,\n        1326071702,\n        472850819,\n        852727987,\n        1611448073,\n        790833056,\n        1332688549,\n        1873278899,\n        549179258,\n        1330390782,\n        899569511,\n        975063105,\n        1675110494,\n        51875298,\n        1269258147,\n        1692507144,\n        1619167988,\n        221196918,\n        1071487429,\n        1831608236,\n        1639183998,\n        1759677201,\n        664469218,\n        1206540885,\n        1038238500,\n        1084208527,\n        1099085665,\n        1304517068,\n        1598745802,\n        1269019311,\n        42447411,\n        1505522038,\n        34865842,\n        1539704003,\n        1066873370,\n        233723665,\n        292835379,\n        1204790347,\n        806415541,\n        329431560,\n        89350503,\n        1736721386,\n        32635574,\n        592479413,\n        392883167,\n        1806938453,\n        1388860555,\n        433930275,\n        1481100752,\n        1934702137,\n        17665771,\n        1028187503,\n        1538389950,\n        1324475678,\n        1887458445,\n        1879803483,\n        1715261712,\n        184791230,\n        1878106178,\n        1137472729,\n        192408864,\n        1901791939,\n        1037148005,\n        486557147,\n        919595168,\n        950974538,\n        1178624483,\n        1125249092,\n        1179853800,\n        1285762245,\n        1768686352,\n        1860189661,\n        1247\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 2974389039810781744\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6764992,\n        -1.6958274,\n        -1.6906761,\n        -1.6997926,\n        -1.7197431,\n        -1.7105455,\n        -1.699538,\n        -1.7480061,\n        -1.7183363,\n        -1.7617513,\n        -1.7665355,\n        -1.7205622,\n        -1.7229751,\n        -1.7095234,\n        -1.712063,\n        -1.7610425,\n        -2.0025625,\n        -1.7190348,\n        -1.8428125,\n        -1.7734516,\n        -1.8737025,\n        -1.784302,\n        -1.7758138,\n        -1.8650142,\n        -1.9094932,\n        -1.7641912,\n        -1.7876068,\n        -1.770861,\n        -1.9609163,\n        -1.7481569,\n        -1.8737556,\n        -1.7909487,\n        -1.8254586,\n        -2.0344076,\n        -2.0933356,\n        -1.8535492,\n        -1.7756407,\n        -1.9534725,\n        -2.2208147,\n        -1.8998641,\n        -1.8039731,\n        -1.9330158,\n        -1.8930149,\n        -1.8418735,\n        -1.8315548,\n        -1.8492849,\n        -1.7767801,\n        -2.0619454,\n        -1.9183084,\n        -1.9914724,\n        -2.1994247,\n        -1.893529,\n        -1.9167924,\n        -1.8534486,\n        -1.8540635,\n        -1.8360285,\n        -1.8018483,\n        -2.0149078,\n        -2.156264,\n        -1.7783345,\n        -1.8414056,\n        -2.0906248,\n        -2.3711946,\n        -1.8415192,\n        -2.3714786,\n        -2.9022906,\n        -1.9636358,\n        -2.8221817,\n        -2.3486662,\n        -2.4554226,\n        -2.6083274,\n        -2.6525185,\n        -1.9475011,\n        -2.313022,\n        -1.8175609,\n        -2.8928878,\n        -2.000999,\n        -2.89704,\n        -2.7926826,\n        -2.1103365,\n        -1.9930633,\n        -2.6354682,\n        -2.0399613,\n        -2.033366,\n        -2.20878,\n        -2.487807,\n        -2.1843228,\n        -1.8659741,\n        -1.881563,\n        -2.3812628,\n        -1.9711432,\n        -2.1501844,\n        -2.2721488,\n        -1.813301,\n        -1.8368465,\n        -2.1128387,\n        -2.3040423,\n        -2.6563406,\n        -1.9628875,\n        -2.2360194,\n        -2.3045492,\n        -2.2451193,\n        -2.5775018,\n        -2.047439,\n        -2.3106627,\n        -2.1124313,\n        -1.9980546,\n        -2.2577305,\n        -1.9977155,\n        -2.0085564,\n        -2.1718853,\n        -3.9304676,\n        -1.8986434,\n        -1.9047749,\n        -1.9229028,\n        -2.1014469,\n        -2.537519,\n        -2.6529217,\n        -2.31025,\n        -1.9334949,\n        -1.796481,\n        -2.9063954,\n        -2.0648232,\n        -2.3255317,\n        -2.2250478,\n        -2.5736194,\n        -3.5998192,\n        -2.1379678,\n        -2.723008,\n        -2.939169,\n        -3.053105,\n        -3.9497592,\n        -3.1534941,\n        -3.3767736,\n        -2.1855912,\n        -3.4670146,\n        -6.3716393,\n        -4.2738733,\n        -2.434441,\n        -4.7714667,\n        -4.589805,\n        -4.959077,\n        -2.7561874,\n        -4.3232794,\n        -2.740808,\n        -2.8243487,\n        -3.9605234,\n        -3.4604313,\n        -2.7065961,\n        -6.068086,\n        -6.0786867,\n        -3.5492153,\n        -5.709071,\n        -2.1999042,\n        -4.1939697,\n        -3.6098812,\n        -5.782508,\n        -4.0327687,\n        -6.203905,\n        -2.1922278,\n        -2.2782393,\n        -2.4290404,\n        -2.4427545,\n        -3.199056,\n        -5.023345,\n        -2.2551022,\n        -2.9791067,\n        -3.2474163,\n        -2.243982,\n        -3.5441117,\n        -2.7690384,\n        -3.6095276,\n        -3.255176,\n        -2.6776273,\n        -2.6551788,\n        -2.4600167,\n        -2.2966177,\n        -1.9961336,\n        -2.4975839,\n        -2.5526073,\n        -3.0071547,\n        -2.8752391,\n        -7.322864,\n        -3.6169648,\n        -2.8751423,\n        -4.7001524,\n        -6.7427464,\n        -3.6106853,\n        -2.4639018,\n        -4.794964,\n        -3.2962768,\n        -2.7963517,\n        -2.5175323,\n        -3.3550315,\n        -3.2839267,\n        -4.0063868,\n        -2.938738,\n        -3.2653677,\n        -2.0275402,\n        -2.5693622,\n        -3.3391628,\n        -4.112069,\n        -2.4103444,\n        -2.5815146,\n        -2.4570982,\n        -2.9206357,\n        -2.6522524,\n        -2.232228,\n        -2.307984,\n        -5.9732804,\n        -4.825717,\n        -3.8391047,\n        -2.2225866,\n        -3.9098666,\n        -4.889138,\n        -3.6301153,\n        -2.7953954,\n        -2.2403986,\n        -2.09101,\n        -3.7887583,\n        -3.6717098,\n        -2.5485013,\n        -2.2754288,\n        -4.413657,\n        -4.2619123,\n        -3.6700647,\n        -3.6085973,\n        -2.2921927,\n        -3.1419306,\n        -2.2765403,\n        -2.431947,\n        -2.2881124,\n        -2.2225246,\n        -2.843642,\n        -2.7188275,\n        -2.9016843,\n        -3.3257384,\n        -2.4257905,\n        -4.3817286,\n        -2.0652525,\n        -2.0353162,\n        -3.4925709,\n        -2.8533604,\n        -3.0184164,\n        -2.9160662,\n        -3.452129,\n        -3.3308847,\n        -3.1446595,\n        -2.7629244,\n        -4.060753,\n        -3.3641934,\n        -4.5306907,\n        -2.7997391,\n        -5.0139904,\n        -3.9912064,\n        -2.5082939\n      ],\n      \"pointIndex\": [\n        0,\n        1255,\n        256,\n        1595624174,\n        420418603,\n        1336138779,\n        503927129,\n        1359566407,\n        280802615,\n        150136042,\n        3692996,\n        665504981,\n        949475038,\n        50286260,\n        1125686072,\n        1282042985,\n        1872162215,\n        157102042,\n        454994246,\n        1587410745,\n        609665789,\n        1200082062,\n        1171214805,\n        1902230760,\n        847331506,\n        1598252240,\n        1445118495,\n        1610068947,\n        1194442377,\n        1896710392,\n        359547984,\n        497749265,\n        282571950,\n        531963305,\n        678620578,\n        502828948,\n        1303457028,\n        733044781,\n        824345301,\n        1134757455,\n        1757924619,\n        1708658538,\n        1508296009,\n        949874839,\n        1764962526,\n        22071435,\n        112983632,\n        1479820342,\n        1893770907,\n        1318621710,\n        284053641,\n        552124488,\n        735926860,\n        438794502,\n        1284105615,\n        326809658,\n        1917781586,\n        1860744660,\n        1153253229,\n        1497199698,\n        1969057161,\n        1974720042,\n        1486644109,\n        174979513,\n        1658510367,\n        440166081,\n        1232412144,\n        1928663421,\n        500575328,\n        648431842,\n        769368113,\n        1235260006,\n        573095955,\n        658807788,\n        34545359,\n        208497870,\n        1805281587,\n        1692857443,\n        1679577101,\n        910172434,\n        805831328,\n        1941170410,\n        1772364771,\n        1400744872,\n        957287994,\n        1070402280,\n        1306036065,\n        1678467741,\n        1255\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -7325120448755709923\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4789896,\n        -1.4813277,\n        -1.4817868,\n        -1.4927266,\n        -1.4817274,\n        -1.494836,\n        -1.4951668,\n        -1.5189464,\n        -1.4971339,\n        -1.4891075,\n        -1.494705,\n        -1.5260731,\n        -1.6166315,\n        -1.5730482,\n        -1.5089144,\n        -1.5422033,\n        -1.5432531,\n        -1.6478055,\n        -1.5075778,\n        -1.4952298,\n        -1.5665935,\n        -1.583639,\n        -1.5436229,\n        -1.6401356,\n        -1.5861496,\n        -1.6939223,\n        -1.6734645,\n        -1.7411418,\n        -1.6076918,\n        -1.6060724,\n        -1.5620602,\n        -1.5942497,\n        -1.6047797,\n        -1.9373966,\n        -1.6813858,\n        -1.6566732,\n        -1.6520962,\n        -1.7416009,\n        -1.5672114,\n        -1.5435431,\n        -1.5410064,\n        -1.6136974,\n        -1.575288,\n        -1.8296115,\n        -1.635954,\n        -1.6432008,\n        -1.5914276,\n        -1.8453808,\n        -1.8873544,\n        -1.9382782,\n        -1.8738217,\n        -1.7060682,\n        -1.8990436,\n        -1.8289719,\n        -1.6985285,\n        -1.7595074,\n        -1.7536279,\n        -1.7418392,\n        -1.8090104,\n        -1.9695964,\n        -1.6294744,\n        -1.6043773,\n        -1.6309644,\n        -1.6044484,\n        -3.3081942,\n        -1.6289575,\n        -1.6165967,\n        -2.5297084,\n        -2.8675802,\n        -2.765154,\n        -1.864613,\n        -1.958626,\n        -2.2022676,\n        -1.6808785,\n        -1.8817226,\n        -1.7862926,\n        -2.30006,\n        -1.9910297,\n        -2.413693,\n        -1.6648751,\n        -1.8299508,\n        -1.547286,\n        -3.0589058,\n        -1.8049306,\n        -1.7710308,\n        -1.7064773,\n        -1.7509496,\n        -1.8867807,\n        -2.0005727,\n        -1.6649243,\n        -2.0950646,\n        -2.0428991,\n        -2.936637,\n        -2.86679,\n        -3.4534729,\n        -2.1623518,\n        -1.9664167,\n        -2.0203717,\n        -1.9363478,\n        -2.8894434,\n        -2.441115,\n        -2.0852993,\n        -2.1043887,\n        -1.7624108,\n        -1.8068472,\n        -2.2669864,\n        -1.9438679,\n        -2.0263252,\n        -2.809571,\n        -3.5156472,\n        -2.0535638,\n        -2.7014992,\n        -1.9866724,\n        -2.115246,\n        -2.4434855,\n        -1.7675318,\n        -2.2506127,\n        -1.8771663,\n        -1.8373984,\n        -1.99535,\n        -2.0801814,\n        -1.7359631,\n        -2.1916656,\n        -1.6493181,\n        -2.2410676,\n        -1.6915656,\n        -2.0914423,\n        -1.6744939,\n        -2.904893,\n        -4.338845,\n        -4.0180583,\n        -1.713513,\n        -2.0375726,\n        -3.845712,\n        -1.8471489,\n        -2.6868517,\n        -5.378029,\n        -5.954161,\n        -2.868096,\n        -3.3108563,\n        -4.0419126,\n        -5.047232,\n        -3.3756578,\n        -2.0724225,\n        -2.0070157,\n        -3.366248,\n        -2.2862413,\n        -7.0287,\n        -2.3656926,\n        -1.8847853,\n        -2.4822066,\n        -2.2740889,\n        -1.8520479,\n        -2.5333264,\n        -3.8444266,\n        -2.0047996,\n        -2.339066,\n        -3.0678396,\n        -6.6533227,\n        -2.544771,\n        -2.0255227,\n        -2.0628326,\n        -2.344547,\n        -3.2596714,\n        -1.5772738,\n        -3.762522,\n        -3.442151,\n        -2.8425343,\n        -2.5368123,\n        -3.4553957,\n        -3.7509146,\n        -1.9215556,\n        -2.4849117,\n        -1.809928,\n        -2.1047919,\n        -2.1177597,\n        -2.006969,\n        -2.1972163,\n        -2.0462108,\n        -1.9388325,\n        -1.9577699,\n        -2.6966898,\n        -2.8698642,\n        -3.760625,\n        -2.2737677,\n        -3.137625,\n        -3.1042125,\n        -2.9655306,\n        -3.3402255,\n        -8.947269,\n        -3.5496058,\n        -2.331477,\n        -7.22113,\n        -2.038032,\n        -2.2908099,\n        -2.40332,\n        -3.0953493,\n        -6.5161424,\n        -2.2471056,\n        -3.0264566,\n        -3.6173818,\n        -2.663741,\n        -3.1969995,\n        -4.6820235,\n        -2.3960974,\n        -3.5762146,\n        -3.4620936,\n        -4.7139273,\n        -3.4655447,\n        -1.925014,\n        -1.8825349,\n        -2.921921,\n        -2.4039183,\n        -2.037784,\n        -3.0472116,\n        -3.1644752,\n        -2.3709059,\n        -5.91918,\n        -3.0437999,\n        -3.6559024,\n        -5.0809813,\n        -3.1907015,\n        -2.965922,\n        -5.205322,\n        -3.1386008,\n        -2.574078,\n        -2.78269,\n        -3.4993951,\n        -2.2372167,\n        -4.0201955,\n        -3.0609505,\n        -2.824737,\n        -1.7775307,\n        -3.563109,\n        -2.7900605,\n        -2.907892,\n        -2.1786776,\n        -2.11636,\n        -3.7923527,\n        -2.48394,\n        -4.18271,\n        -3.610519,\n        -2.114958,\n        -3.14728,\n        -2.2880132,\n        -3.141728,\n        -2.349664,\n        -3.7127583,\n        -3.242941,\n        -2.8023453,\n        -2.5635731,\n        -2.2638948,\n        -2.2723649,\n        -2.7769027,\n        -3.9965684,\n        -1.709681\n      ],\n      \"pointIndex\": [\n        2,\n        1247,\n        256,\n        927118680,\n        1728068399,\n        275525989,\n        542996925,\n        1329648133,\n        746207253,\n        840245279,\n        470796809,\n        736120960,\n        426255950,\n        54291877,\n        546082091,\n        867037754,\n        997001643,\n        1670410354,\n        157329379,\n        571876769,\n        1448766970,\n        640676627,\n        312859989,\n        1820217497,\n        1073931095,\n        1711070860,\n        1409810180,\n        352270578,\n        1120091963,\n        443839731,\n        1122704792,\n        393898066,\n        78365861,\n        463014431,\n        1756508935,\n        609016580,\n        172879032,\n        1579063848,\n        824482771,\n        1418019519,\n        1904054526,\n        983596714,\n        615713350,\n        1407659374,\n        1592542167,\n        231319847,\n        5983785,\n        104215344,\n        815232617,\n        1515343063,\n        1089772401,\n        274186175,\n        740797040,\n        577096301,\n        1473655022,\n        1766792412,\n        1843099405,\n        1081775419,\n        1027182103,\n        377312116,\n        1860317336,\n        1114222603,\n        1314021632,\n        465250561,\n        1722105342,\n        1924589275,\n        498332310,\n        1918415625,\n        1874800468,\n        1760567206,\n        1731511184,\n        596688843,\n        176007985,\n        1576630510,\n        670559844,\n        1232878898,\n        1463803296,\n        1172106116,\n        854619453,\n        1599826603,\n        945325839,\n        1035374295,\n        1183946262,\n        1202664088,\n        1529404870,\n        1839767242,\n        1601917146,\n        9204005,\n        1245\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 5966322168077801063\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.359602,\n        -1.3747983,\n        -1.3669475,\n        -1.3917516,\n        -1.3749169,\n        -1.3978804,\n        -1.3743601,\n        -1.3946548,\n        -1.4078739,\n        -1.4120302,\n        -1.4068153,\n        -1.5031053,\n        -1.4189385,\n        -1.4294072,\n        -1.3918122,\n        -1.4187685,\n        -1.451527,\n        -1.5439616,\n        -1.572372,\n        -1.5431609,\n        -1.4799433,\n        -1.4809572,\n        -1.5019768,\n        -1.5062917,\n        -1.6155772,\n        -1.4287484,\n        -1.4429781,\n        -1.4321111,\n        -1.4374574,\n        -1.4256124,\n        -1.4440918,\n        -1.6143839,\n        -1.4548193,\n        -2.1818051,\n        -1.521527,\n        -1.7077321,\n        -1.594992,\n        -1.6788688,\n        -2.0228424,\n        -1.5561807,\n        -1.7651178,\n        -1.4828888,\n        -1.6438819,\n        -1.6252949,\n        -1.5230577,\n        -1.905284,\n        -1.8408743,\n        -1.5944843,\n        -1.599917,\n        -1.6622891,\n        -1.6484537,\n        -1.4462303,\n        -1.5112286,\n        -1.6332244,\n        -1.4575502,\n        -1.488177,\n        -1.7010106,\n        -1.6937976,\n        -1.5946316,\n        -1.4753292,\n        -1.438668,\n        -1.6429601,\n        -1.4513603,\n        -2.2772639,\n        -1.7143407,\n        -1.5759331,\n        -1.5740432,\n        -2.8094275,\n        -3.3058069,\n        -2.1396427,\n        -1.5359617,\n        -2.1293366,\n        -2.0036054,\n        -1.7648398,\n        -1.8915709,\n        -1.770045,\n        -1.9245359,\n        -2.1247864,\n        -2.9160671,\n        -1.7070603,\n        -1.9920924,\n        -2.032921,\n        -1.9603047,\n        -1.747028,\n        -1.8338137,\n        -2.3822255,\n        -1.9893866,\n        -1.8081368,\n        -2.1531112,\n        -1.7730501,\n        -2.1450498,\n        -3.132966,\n        -2.2067213,\n        -1.924489,\n        -2.756486,\n        -1.7515236,\n        -1.8227607,\n        -2.4800415,\n        -1.8187356,\n        -2.2551625,\n        -2.1342087,\n        -2.6814182,\n        -2.2624876,\n        -2.3270738,\n        -1.9789789,\n        -2.0078046,\n        -1.5552356,\n        -2.0232923,\n        -1.7029153,\n        -1.4964641,\n        -2.9953322,\n        -1.6763792,\n        -2.9508388,\n        -1.9122832,\n        -2.121463,\n        -1.8967429,\n        -1.9978342,\n        -1.6310147,\n        -1.7970418,\n        -2.0497434,\n        -1.4872304,\n        -1.9258558,\n        -2.6047144,\n        -2.3391123,\n        -1.666762,\n        -1.6153685,\n        -1.4929992,\n        -2.3954306,\n        -2.6757016,\n        -2.0115712,\n        -3.2982168,\n        -1.8071309,\n        -4.732093,\n        -2.86006,\n        -1.6296129,\n        -3.9723382,\n        -3.5085964,\n        -3.8349621,\n        -4.332474,\n        -3.9460185,\n        -2.1973403,\n        -2.2070367,\n        -3.469926,\n        -2.7214763,\n        -2.7626624,\n        -4.706231,\n        -2.5175548,\n        -2.3759713,\n        -5.3709946,\n        -2.9945693,\n        -3.9092703,\n        -2.2447438,\n        -1.8524231,\n        -1.9837755,\n        -3.076053,\n        -4.0829,\n        -4.77054,\n        -4.881027,\n        -4.1488724,\n        -4.4731345,\n        -2.218555,\n        -2.7092788,\n        -2.4048133,\n        -3.8219318,\n        -2.6914885,\n        -2.0651407,\n        -3.2814193,\n        -2.644386,\n        -2.9437237,\n        -1.9948936,\n        -4.206107,\n        -6.2308683,\n        -2.5094018,\n        -2.0590875,\n        -2.1312287,\n        -4.2151265,\n        -2.7633343,\n        -5.1526904,\n        -3.3049357,\n        -2.1771789,\n        -1.9143611,\n        -2.174454,\n        -2.895144,\n        -3.2811656,\n        -3.7764373,\n        -2.2945032,\n        -2.8516362,\n        -2.284171,\n        -2.7553618,\n        -3.522863,\n        -4.0175667,\n        -4.0974483,\n        -2.0300355,\n        -4.1277246,\n        -2.806212,\n        -3.3823714,\n        -3.3424401,\n        -2.6951935,\n        -2.4054852,\n        -3.0757685,\n        -4.821749,\n        -2.8912666,\n        -2.3331127,\n        -3.8103104,\n        -3.4520514,\n        -5.378011,\n        -2.5183861,\n        -3.231196,\n        -2.606206,\n        -2.0731435,\n        -2.005029,\n        -2.679055,\n        -2.581851,\n        -1.9455084,\n        -2.0369332,\n        -4.7684417,\n        -2.5067194,\n        -4.576556,\n        -4.450429,\n        -1.8643699,\n        -2.1515625,\n        -3.862825,\n        -5.5100384,\n        -2.1516352,\n        -2.974032,\n        -4.912022,\n        -3.0503857,\n        -3.2343678,\n        -2.8258865,\n        -2.246921,\n        -5.2116704,\n        -2.021993,\n        -1.9658217,\n        -3.0767105,\n        -2.3124611,\n        -4.3563266,\n        -2.8614416,\n        -2.0766013,\n        -5.007502,\n        -3.0026526,\n        -3.7666762,\n        -3.6385636,\n        -2.8427145,\n        -3.7251058,\n        -3.909793,\n        -3.4142978,\n        -3.6704612,\n        -2.6715941,\n        -3.6530285,\n        -1.9842077,\n        -1.8425122,\n        -4.820701,\n        -1.7397573,\n        -3.8442929\n      ],\n      \"pointIndex\": [\n        1,\n        1254,\n        254,\n        784888285,\n        1186575131,\n        789189176,\n        1400803504,\n        1772303693,\n        308336973,\n        67293351,\n        526444841,\n        1621772241,\n        1269871206,\n        899643397,\n        305167009,\n        310470336,\n        1651984956,\n        165952774,\n        513691375,\n        182197367,\n        1100310439,\n        978851783,\n        1191155089,\n        1861395843,\n        1034802101,\n        156803733,\n        551822515,\n        317361259,\n        1519332308,\n        523016884,\n        1462094261,\n        394615233,\n        1016367154,\n        1135725509,\n        3108653,\n        567819053,\n        186342505,\n        1236149782,\n        1837210046,\n        1685297421,\n        1297883652,\n        862509255,\n        1489238425,\n        1111998914,\n        1221306814,\n        1889841812,\n        1290135051,\n        267762769,\n        1818043055,\n        279002659,\n        25181910,\n        744215253,\n        1761708314,\n        485415160,\n        241274699,\n        340750885,\n        1918191838,\n        364652689,\n        755478218,\n        470482903,\n        121135392,\n        402187451,\n        547412642,\n        450747522,\n        1833931018,\n        493003335,\n        251741409,\n        844852725,\n        18667531,\n        1218005248,\n        1711986371,\n        720129522,\n        609568344,\n        1271094102,\n        1082130952,\n        680078710,\n        1337202443,\n        735316700,\n        244707662,\n        1587430410,\n        1913931677,\n        1640869653,\n        990635708,\n        986766945,\n        1560781711,\n        1507851713,\n        1526411016,\n        1572475\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 5863646346142641006\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.575608,\n        -1.5814409,\n        -1.5781481,\n        -1.6155031,\n        -1.5828708,\n        -1.6040213,\n        -1.5803695,\n        -1.6196128,\n        -1.6891334,\n        -1.5856794,\n        -1.6220452,\n        -1.6704509,\n        -1.6055237,\n        -1.6525494,\n        -1.5969791,\n        -1.6239997,\n        -1.7467704,\n        -1.8449363,\n        -2.0470545,\n        -1.812362,\n        -1.6207516,\n        -1.9919968,\n        -1.6685889,\n        -1.7008271,\n        -1.6975868,\n        -1.6878153,\n        -1.6768947,\n        -1.6593972,\n        -1.7077638,\n        -1.6490749,\n        -1.7063341,\n        -1.7464024,\n        -1.7496744,\n        -1.8403921,\n        -1.794143,\n        -1.863235,\n        -2.5615525,\n        -2.1757193,\n        -2.2156215,\n        -1.9555329,\n        -1.9124904,\n        -1.8627983,\n        -1.6415664,\n        -2.0704143,\n        -2.0065598,\n        -1.7881496,\n        -1.9410346,\n        -1.7241747,\n        -1.9910812,\n        -1.7615238,\n        -2.0226061,\n        -1.7233528,\n        -1.734961,\n        -1.8799304,\n        -2.1976762,\n        -1.8057204,\n        -1.6640658,\n        -1.7348346,\n        -1.7793133,\n        -1.7161245,\n        -1.9093564,\n        -1.7863426,\n        -1.782226,\n        -1.748099,\n        -1.9819493,\n        -1.9970762,\n        -2.3123357,\n        -1.8498224,\n        -2.2264159,\n        -1.8386749,\n        -3.345277,\n        -1.883858,\n        -1.9660522,\n        -2.8395278,\n        -2.9833143,\n        -2.3452234,\n        -2.2418785,\n        -2.795027,\n        -2.6312294,\n        -2.8163066,\n        -1.9591815,\n        -1.9430947,\n        -1.9477563,\n        -1.9072555,\n        -2.464852,\n        -1.7954881,\n        -2.0065253,\n        -2.4018254,\n        -3.7181277,\n        -2.1584997,\n        -2.0945082,\n        -2.143602,\n        -2.129317,\n        -2.2141273,\n        -2.14982,\n        -1.958308,\n        -2.2566347,\n        -2.0064788,\n        -2.3119383,\n        -1.8199244,\n        -1.8838389,\n        -2.1038854,\n        -2.8273778,\n        -2.204648,\n        -1.773783,\n        -2.058429,\n        -1.9341174,\n        -2.0440369,\n        -2.2040834,\n        -2.2547963,\n        -2.5701208,\n        -1.887871,\n        -2.0140364,\n        -2.740925,\n        -2.2461722,\n        -1.9907988,\n        -1.8508942,\n        -2.2053757,\n        -2.3237412,\n        -2.1764266,\n        -1.8121494,\n        -2.2599766,\n        -2.0563395,\n        -2.2131538,\n        -2.101571,\n        -1.8369926,\n        -2.1113522,\n        -1.9830061,\n        -1.8985038,\n        -2.0979133,\n        -5.289009,\n        -2.5313275,\n        -2.0532134,\n        -4.012346,\n        -3.6139667,\n        -2.0709507,\n        -1.9825712,\n        -2.4745681,\n        -2.5805273,\n        -4.426341,\n        -4.355678,\n        -3.5989287,\n        -4.8145404,\n        -2.1951988,\n        -2.3304834,\n        -3.2525547,\n        -4.0202074,\n        -3.2697027,\n        -3.6303856,\n        -4.2800465,\n        -3.1944683,\n        -3.215791,\n        -4.1126847,\n        -2.9608352,\n        -3.155402,\n        -3.1948025,\n        -3.4552639,\n        -3.7967749,\n        -3.6374478,\n        -3.9048827,\n        -3.5179439,\n        -3.2579017,\n        -3.5275407,\n        -2.9210572,\n        -4.9047728,\n        -3.7414958,\n        -2.8654935,\n        -2.240625,\n        -2.3643675,\n        -4.65246,\n        -3.8078144,\n        -1.9313331,\n        -3.3564284,\n        -2.3961694,\n        -3.6561205,\n        -4.623437,\n        -2.4091523,\n        -5.6450777,\n        -5.337534,\n        -2.49797,\n        -2.4037368,\n        -4.982056,\n        -5.2365403,\n        -2.202554,\n        -2.1648428,\n        -2.956364,\n        -3.4817119,\n        -4.1723804,\n        -3.3398435,\n        -3.7754629,\n        -2.468077,\n        -1.9774877,\n        -3.2842422,\n        -5.4654374,\n        -2.7166097,\n        -2.459306,\n        -3.7167258,\n        -3.2403567,\n        -3.3395982,\n        -2.4355524,\n        -2.1046832,\n        -2.7346094,\n        -2.687636,\n        -3.116037,\n        -3.7386253,\n        -4.0494804,\n        -6.3595433,\n        -2.8649182,\n        -2.4392982,\n        -2.4244318,\n        -2.0033848,\n        -3.9102988,\n        -2.9147055,\n        -3.494289,\n        -2.4981205,\n        -4.4003124,\n        -5.03912,\n        -2.496209,\n        -4.6680713,\n        -3.4900355,\n        -2.2696192,\n        -4.592746,\n        -4.233194,\n        -3.0151408,\n        -2.5300114,\n        -3.3627484,\n        -2.2335417,\n        -3.8549244,\n        -3.4596527,\n        -4.547544,\n        -2.9830463,\n        -2.4512615,\n        -6.091157,\n        -2.3038387,\n        -2.8788407,\n        -2.7567303,\n        -4.153861,\n        -3.0705404,\n        -3.3430862,\n        -2.7613983,\n        -4.5446625,\n        -2.7000647,\n        -2.2375746,\n        -2.679622,\n        -5.9046073,\n        -3.7500348,\n        -2.0826974,\n        -3.1326358,\n        -2.3658702,\n        -2.9701922,\n        -2.2291567,\n        -3.1797545,\n        -3.7251885,\n        -4.6901045,\n        -4.6262054,\n        -1.9968588\n      ],\n      \"pointIndex\": [\n        1,\n        1251,\n        256,\n        1306762992,\n        565865793,\n        124121482,\n        1738965356,\n        1408181594,\n        430654305,\n        1411568429,\n        1171867843,\n        1047777375,\n        1119647973,\n        266038704,\n        25368794,\n        1841871691,\n        1862650395,\n        1274069771,\n        1474173702,\n        1946760321,\n        1708540483,\n        1290361016,\n        1546358337,\n        1378278783,\n        1583166718,\n        740915715,\n        1456470874,\n        313253948,\n        62598705,\n        60392692,\n        229433700,\n        246820125,\n        1294601145,\n        1692135495,\n        523355825,\n        798583808,\n        1317097459,\n        695367492,\n        1427432701,\n        1262025497,\n        1529849795,\n        1466036129,\n        999579821,\n        1866554081,\n        1809138432,\n        910825491,\n        373589947,\n        507923371,\n        1182244497,\n        1074960353,\n        1532937894,\n        1157218619,\n        1302326523,\n        320144715,\n        1668549230,\n        788611575,\n        585858314,\n        362707653,\n        139653123,\n        396733880,\n        1690075672,\n        466833284,\n        1858004361,\n        191645971,\n        351239354,\n        502669945,\n        6692671,\n        1835161702,\n        554683535,\n        592019263,\n        588578366,\n        732981881,\n        689650548,\n        656897546,\n        664088272,\n        713102952,\n        1081854367,\n        1528558585,\n        1324596181,\n        1648569542,\n        1612941687,\n        1351288215,\n        469215660,\n        1033732722,\n        1114450720,\n        1333201431,\n        1577051617,\n        1384545,\n        1250\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -3485342436052657063\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4646834,\n        -1.4667517,\n        -1.4848248,\n        -1.4955488,\n        -1.4748328,\n        -1.4996586,\n        -1.5102067,\n        -1.5423312,\n        -1.5171639,\n        -1.5108787,\n        -1.5430261,\n        -1.5118963,\n        -1.5083525,\n        -1.5142311,\n        -1.5558296,\n        -1.58652,\n        -1.5521219,\n        -1.5663732,\n        -1.5962416,\n        -1.5707482,\n        -1.5287815,\n        -1.5511978,\n        -1.5490642,\n        -1.8354799,\n        -1.6619667,\n        -1.5551033,\n        -1.5756073,\n        -1.667111,\n        -1.5569456,\n        -1.6726259,\n        -1.564499,\n        -1.6257182,\n        -1.6528468,\n        -1.5773174,\n        -1.6714104,\n        -1.6499722,\n        -1.8766035,\n        -1.6815758,\n        -1.6395549,\n        -1.6178705,\n        -1.6063552,\n        -1.9306438,\n        -1.610218,\n        -1.64699,\n        -1.6621773,\n        -1.8209532,\n        -1.5760742,\n        -1.8630723,\n        -2.0983984,\n        -1.982195,\n        -1.7652756,\n        -1.7825636,\n        -1.6701146,\n        -1.6431956,\n        -1.6910659,\n        -1.7496774,\n        -1.8248948,\n        -1.5788943,\n        -1.7409449,\n        -1.7139764,\n        -1.7687455,\n        -2.3207808,\n        -1.5775722,\n        -2.337894,\n        -1.8376864,\n        -2.0159893,\n        -1.7816584,\n        -2.2645795,\n        -1.8502923,\n        -2.4336076,\n        -1.958431,\n        -1.9566456,\n        -1.6514288,\n        -1.9508808,\n        -2.0869696,\n        -1.8643857,\n        -1.8263228,\n        -1.9006462,\n        -1.9109508,\n        -2.375001,\n        -1.6502724,\n        -2.424987,\n        -1.9207541,\n        -1.9790317,\n        -2.4335063,\n        -1.9445292,\n        -1.767669,\n        -1.7854769,\n        -2.0634897,\n        -2.0707114,\n        -1.7828038,\n        -1.987555,\n        -1.9973936,\n        -1.7249879,\n        -1.617046,\n        -1.9723246,\n        -2.5366223,\n        -2.6620522,\n        -2.2095118,\n        -2.2030852,\n        -3.3440301,\n        -1.9094471,\n        -2.7955794,\n        -1.8991795,\n        -2.0969698,\n        -1.8752227,\n        -1.8043672,\n        -2.1406016,\n        -2.1403084,\n        -2.2700257,\n        -2.9699655,\n        -2.087489,\n        -2.1303475,\n        -1.8860812,\n        -1.9416108,\n        -1.6463115,\n        -2.4058745,\n        -1.9469516,\n        -1.7948481,\n        -1.9236622,\n        -1.968078,\n        -2.482212,\n        -2.043224,\n        -2.6402094,\n        -2.669888,\n        -1.9306325,\n        -1.6355859,\n        -2.4234571,\n        -4.2518926,\n        -2.8369408,\n        -2.070781,\n        -2.1696022,\n        -3.0118892,\n        -2.9100466,\n        -1.9155637,\n        -2.3898082,\n        -3.611016,\n        -2.1233342,\n        -2.417018,\n        -2.6857314,\n        -4.0778794,\n        -2.3300252,\n        -4.301172,\n        -4.639753,\n        -2.4882767,\n        -1.7226529,\n        -4.915581,\n        -2.089176,\n        -2.3308697,\n        -2.1675203,\n        -2.1050093,\n        -5.076467,\n        -2.0216632,\n        -2.3076777,\n        -1.8784783,\n        -4.519786,\n        -2.816605,\n        -6.4273553,\n        -3.5487063,\n        -2.7025511,\n        -2.7042325,\n        -2.0779943,\n        -1.8412688,\n        -2.822589,\n        -3.3284762,\n        -3.7763994,\n        -3.213167,\n        -2.7693353,\n        -1.9806886,\n        -2.5737548,\n        -3.1934195,\n        -2.1217012,\n        -2.1747608,\n        -1.9155222,\n        -2.1172097,\n        -2.39838,\n        -1.9219064,\n        -2.439878,\n        -2.5006132,\n        -5.60645,\n        -2.0933144,\n        -3.2653193,\n        -3.3058648,\n        -3.427139,\n        -3.3744586,\n        -2.867972,\n        -2.6194758,\n        -1.8476988,\n        -3.124068,\n        -2.5218716,\n        -1.6921393,\n        -5.984711,\n        -2.1221485,\n        -2.6376462,\n        -5.501952,\n        -2.9553864,\n        -2.7800412,\n        -4.575756,\n        -2.4023063,\n        -2.88531,\n        -3.1735258,\n        -6.147703,\n        -3.7064834,\n        -2.2638023,\n        -2.342982,\n        -3.5818238,\n        -5.544716,\n        -3.0300126,\n        -5.0124764,\n        -5.1720357,\n        -2.489125,\n        -2.1045215,\n        -2.5063248,\n        -2.5694091,\n        -2.4880934,\n        -4.0698066,\n        -3.3531225,\n        -3.9052632,\n        -3.8701982,\n        -2.6745663,\n        -3.0083475,\n        -4.558422,\n        -3.5705185,\n        -2.1124594,\n        -2.4647415,\n        -3.4744031,\n        -2.2037675,\n        -2.4264262,\n        -5.6101284,\n        -2.4054873,\n        -2.2422447,\n        -4.917252,\n        -1.9251585,\n        -2.590822,\n        -2.6595664,\n        -2.2246978,\n        -2.3974667,\n        -2.8268957,\n        -2.4082587,\n        -1.9532785,\n        -4.3572836,\n        -7.100543,\n        -1.9996116,\n        -2.8446536,\n        -3.6927261,\n        -3.9367738,\n        -2.9279373,\n        -3.4739842,\n        -4.2427716,\n        -4.3492055,\n        -3.573538,\n        -3.8667417,\n        -2.2903125,\n        -4.8911166,\n        -3.1270065\n      ],\n      \"pointIndex\": [\n        11,\n        1255,\n        255,\n        1125095316,\n        1225162176,\n        266281107,\n        145079851,\n        1823422636,\n        1444177912,\n        1468908580,\n        746670828,\n        1326526904,\n        1552504204,\n        1641656912,\n        1809083391,\n        42269167,\n        156643563,\n        500660808,\n        1613598470,\n        514128595,\n        1735397476,\n        943131624,\n        810301342,\n        1890323476,\n        1737317963,\n        1744212142,\n        1664960398,\n        1836727015,\n        827400922,\n        340694445,\n        1681448655,\n        997949940,\n        390304714,\n        1183284360,\n        1616489069,\n        462266295,\n        508799902,\n        930032643,\n        921531117,\n        1011764571,\n        718598253,\n        704388491,\n        1570811030,\n        972584919,\n        1400711983,\n        713078717,\n        789564898,\n        1610032627,\n        246278113,\n        1487608946,\n        1729543265,\n        130011520,\n        922436406,\n        1608818438,\n        1548858618,\n        1239620460,\n        322350255,\n        1023052825,\n        14181121,\n        349967450,\n        361023885,\n        369940490,\n        1467829099,\n        1132872206,\n        1336230914,\n        621519519,\n        449632149,\n        1814336297,\n        1208394719,\n        1073046822,\n        498435614,\n        658340617,\n        1352005,\n        1664189276,\n        1404052473,\n        1826399365,\n        616785564,\n        1837282565,\n        769356216,\n        970320150,\n        1367241621,\n        1334791867,\n        1112420348,\n        847842300,\n        995865010,\n        1056532534,\n        1374257984,\n        1928407706\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -566338866570349155\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6110169,\n        -1.6257101,\n        -1.6132237,\n        -1.6336024,\n        -1.6341721,\n        -1.6298529,\n        -1.632508,\n        -1.6436764,\n        -1.6744068,\n        -1.6673356,\n        -1.6571995,\n        -1.662185,\n        -1.6644176,\n        -1.7309585,\n        -1.6500595,\n        -1.670506,\n        -1.802056,\n        -1.7575324,\n        -1.80587,\n        -1.7163657,\n        -1.7433038,\n        -1.674397,\n        -1.9227827,\n        -1.8037908,\n        -1.8172027,\n        -1.6667875,\n        -1.7015543,\n        -1.8367963,\n        -1.7550453,\n        -1.6542269,\n        -1.697557,\n        -1.7011623,\n        -1.9053563,\n        -1.8065615,\n        -1.9608865,\n        -1.789415,\n        -2.0639627,\n        -2.0471902,\n        -2.0799065,\n        -1.8248667,\n        -1.9222915,\n        -1.8206872,\n        -1.8429751,\n        -1.7309147,\n        -1.6954039,\n        -2.3076153,\n        -1.9629672,\n        -2.556063,\n        -1.9714468,\n        -2.0777094,\n        -1.9243582,\n        -1.9282019,\n        -1.7226651,\n        -1.7366803,\n        -1.7797271,\n        -1.8666673,\n        -1.8559352,\n        -2.0115576,\n        -2.0343313,\n        -2.3465438,\n        -1.7615066,\n        -1.7925524,\n        -1.7505562,\n        -1.8233827,\n        -1.7639633,\n        -2.5499158,\n        -2.0778117,\n        -2.393876,\n        -2.1003664,\n        -2.4747236,\n        -2.0314233,\n        -2.6079473,\n        -2.0488465,\n        -2.0726097,\n        -2.420869,\n        -2.1173067,\n        -3.612475,\n        -2.2708921,\n        -2.2564926,\n        -2.619866,\n        -2.0709743,\n        -2.1717794,\n        -2.234867,\n        -3.5616586,\n        -1.8582594,\n        -2.2900496,\n        -2.5066276,\n        -1.7869142,\n        -2.2555428,\n        -1.739632,\n        -1.7395467,\n        -2.7097666,\n        -2.593255,\n        -2.9155746,\n        -2.1162503,\n        -2.8907096,\n        -3.022455,\n        -2.0814097,\n        -1.9743068,\n        -2.6041746,\n        -2.7876935,\n        -2.1046832,\n        -2.2939086,\n        -1.9349879,\n        -1.9833627,\n        -2.1678498,\n        -2.122495,\n        -1.9412091,\n        -1.8106226,\n        -1.8935112,\n        -2.7455416,\n        -2.1053557,\n        -2.0693269,\n        -1.9052274,\n        -1.8595452,\n        -3.2098026,\n        -2.1161914,\n        -2.6492712,\n        -2.1443713,\n        -2.5484862,\n        -2.5748754,\n        -1.8049053,\n        -1.8047763,\n        -2.359784,\n        -2.242467,\n        -2.145636,\n        -1.7720666,\n        -1.8494686,\n        -2.4786148,\n        -3.2393236,\n        -2.545857,\n        -2.5782166,\n        -3.0330493,\n        -3.2381384,\n        -2.7825084,\n        -4.427443,\n        -2.4144044,\n        -2.4751902,\n        -2.1578116,\n        -2.694748,\n        -4.1466045,\n        -3.1840563,\n        -2.1466255,\n        -2.6621218,\n        -3.1598535,\n        -2.0791852,\n        -2.5672572,\n        -2.3185058,\n        -2.7774062,\n        -4.564393,\n        -2.682265,\n        -2.536275,\n        -5.773803,\n        -3.7649782,\n        -3.791073,\n        -2.53297,\n        -2.7084506,\n        -3.979404,\n        -2.3734815,\n        -2.8669748,\n        -2.7112288,\n        -2.3446472,\n        -2.3289793,\n        -2.5032165,\n        -3.599362,\n        -2.8568487,\n        -2.863185,\n        -4.8855476,\n        -8.4651,\n        -2.6190147,\n        -3.374161,\n        -3.6828594,\n        -2.9956791,\n        -2.5724895,\n        -3.271289,\n        -2.27042,\n        -3.229629,\n        -2.7546244,\n        -3.3204,\n        -1.8723733,\n        -2.225646,\n        -6.912936,\n        -3.2054763,\n        -4.020286,\n        -8.263332,\n        -3.6173582,\n        -2.6950696,\n        -4.516153,\n        -4.0550213,\n        -4.3354163,\n        -2.9266644,\n        -3.224055,\n        -4.048556,\n        -4.0799303,\n        -3.4536102,\n        -2.6236393,\n        -2.9264743,\n        -4.9663267,\n        -2.129841,\n        -2.8336856,\n        -3.5423813,\n        -3.6727676,\n        -3.2670007,\n        -2.616068,\n        -3.2305954,\n        -3.4472659,\n        -3.2471404,\n        -2.6500194,\n        -2.8602796,\n        -1.9934101,\n        -3.1204846,\n        -2.2264602,\n        -2.9697616,\n        -6.267233,\n        -3.3636794,\n        -7.565011,\n        -3.5965343,\n        -1.9816792,\n        -2.0770934,\n        -1.9365543,\n        -2.0132627,\n        -2.8048935,\n        -4.533492,\n        -2.4005785,\n        -5.5340495,\n        -2.0961595,\n        -2.2101908,\n        -2.0628667,\n        -2.1616464,\n        -2.7200656,\n        -2.3370745,\n        -4.169037,\n        -6.5144353,\n        -2.5519133,\n        -5.150039,\n        -3.290528,\n        -4.880377,\n        -2.9813533,\n        -3.8787777,\n        -6.4337726,\n        -2.9560385,\n        -3.278851,\n        -3.3013442,\n        -3.0591831,\n        -2.538268,\n        -1.9257783,\n        -2.6350656,\n        -2.376288,\n        -4.180156,\n        -2.671247,\n        -6.815842,\n        -3.3597534,\n        -2.939458,\n        -4.6065555,\n        -2.9247332,\n        -1.9990978\n      ],\n      \"pointIndex\": [\n        6,\n        1255,\n        256,\n        856366176,\n        1331580477,\n        121162326,\n        1861397757,\n        1784140675,\n        722625927,\n        369512698,\n        999326733,\n        631722989,\n        1147316518,\n        1889250899,\n        676780164,\n        948684465,\n        823038541,\n        58269054,\n        495469040,\n        867805346,\n        849223592,\n        821504320,\n        558589563,\n        1557329491,\n        832437078,\n        29889774,\n        1526663079,\n        1698982164,\n        464463946,\n        1587517277,\n        69230377,\n        1550349704,\n        884708891,\n        1714424156,\n        1320667169,\n        158292080,\n        579023462,\n        611679432,\n        1030184143,\n        211940121,\n        795876162,\n        1703808486,\n        1738281610,\n        1786066150,\n        1465617222,\n        232889955,\n        23590000,\n        1347610470,\n        1502168760,\n        389273338,\n        548657299,\n        112569024,\n        1150635185,\n        749910194,\n        10760582,\n        1374598658,\n        928145740,\n        364345111,\n        534775007,\n        1345172731,\n        435641728,\n        1850689153,\n        1457779896,\n        577917354,\n        458279036,\n        520015476,\n        1256635884,\n        724898515,\n        1907397043,\n        548371635,\n        1238248680,\n        1598492494,\n        32791065,\n        639123069,\n        634188682,\n        1881397161,\n        674880170,\n        1385660437,\n        779881167,\n        1149958022,\n        864778067,\n        1885143112,\n        1006943369,\n        1209156139,\n        1638461945,\n        1223199723,\n        1902694839,\n        1917072287,\n        1249\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 4189210767075612828\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5809543,\n        -1.6052395,\n        -1.5824018,\n        -1.6341741,\n        -1.6220462,\n        -1.6090232,\n        -1.587091,\n        -1.672407,\n        -1.7943244,\n        -1.6540173,\n        -1.6431609,\n        -1.7253053,\n        -1.656684,\n        -1.7000749,\n        -1.5897003,\n        -1.7035328,\n        -1.7050048,\n        -1.7974013,\n        -1.8766737,\n        -1.7098727,\n        -1.8727963,\n        -1.710227,\n        -1.6779032,\n        -1.7963215,\n        -1.7875885,\n        -1.6611732,\n        -1.6593343,\n        -1.8127413,\n        -1.8638108,\n        -1.6368163,\n        -1.5975319,\n        -1.7131653,\n        -1.7733179,\n        -1.754794,\n        -1.7803135,\n        -1.8625493,\n        -1.8076078,\n        -2.101797,\n        -1.9411423,\n        -1.9826176,\n        -1.7513057,\n        -1.915989,\n        -2.0829697,\n        -1.7271876,\n        -2.0167289,\n        -1.9224572,\n        -2.0267813,\n        -1.8872782,\n        -1.9540212,\n        -1.8734661,\n        -1.8287128,\n        -1.8533505,\n        -1.6984493,\n        -1.7122878,\n        -1.767857,\n        -1.833617,\n        -1.8400757,\n        -1.9171269,\n        -1.8674517,\n        -1.67116,\n        -1.650148,\n        -2.213368,\n        -1.6337502,\n        -1.9484588,\n        -2.0833864,\n        -1.794819,\n        -1.8741286,\n        -1.7609497,\n        -1.8035247,\n        -2.120493,\n        -1.8810593,\n        -2.0289762,\n        -2.8589911,\n        -2.7678647,\n        -1.8345987,\n        -2.4043863,\n        -2.262942,\n        -1.956295,\n        -1.9800524,\n        -2.6081913,\n        -2.3866434,\n        -2.3147933,\n        -1.8962301,\n        -2.3161876,\n        -2.0217319,\n        -2.3371081,\n        -2.0991838,\n        -2.1103528,\n        -2.0742226,\n        -2.9598522,\n        -2.7799296,\n        -1.9263036,\n        -2.0695686,\n        -2.340692,\n        -3.773643,\n        -2.1007228,\n        -2.529532,\n        -2.0259535,\n        -2.5129123,\n        -2.2907164,\n        -1.8977658,\n        -1.9150738,\n        -1.8403314,\n        -2.215647,\n        -2.2819843,\n        -1.8791007,\n        -2.59138,\n        -1.7411674,\n        -2.092544,\n        -2.0886123,\n        -2.1413665,\n        -2.1140273,\n        -1.920856,\n        -1.8703722,\n        -2.0212836,\n        -2.1943898,\n        -2.301725,\n        -2.5547743,\n        -2.0424213,\n        -2.4508665,\n        -2.4707499,\n        -1.8075922,\n        -1.8725858,\n        -3.5640264,\n        -2.6105704,\n        -2.165559,\n        -2.1232579,\n        -2.6026196,\n        -3.8884327,\n        -2.1737967,\n        -2.728757,\n        -2.7798426,\n        -2.0043423,\n        -2.439043,\n        -2.4831007,\n        -1.8531009,\n        -2.0460627,\n        -2.9669995,\n        -3.371271,\n        -2.7603664,\n        -5.145329,\n        -3.5285287,\n        -1.9487286,\n        -2.1082284,\n        -2.2038436,\n        -4.115193,\n        -5.2521424,\n        -3.1030037,\n        -3.3301845,\n        -2.8608727,\n        -2.355765,\n        -2.9226215,\n        -5.292759,\n        -2.8541307,\n        -5.8843527,\n        -2.9329383,\n        -4.2045536,\n        -2.4997535,\n        -2.1682842,\n        -3.929541,\n        -2.8143623,\n        -3.2451994,\n        -4.985799,\n        -2.5752323,\n        -5.0215697,\n        -3.2431254,\n        -2.9068696,\n        -2.3326433,\n        -5.0619807,\n        -4.032271,\n        -2.251037,\n        -2.4568508,\n        -2.573857,\n        -3.296703,\n        -3.1517873,\n        -2.388365,\n        -3.1821537,\n        -2.3441288,\n        -3.32923,\n        -3.3475256,\n        -5.413077,\n        -4.935657,\n        -3.4767509,\n        -3.1256387,\n        -2.2770538,\n        -2.696921,\n        -4.8854313,\n        -3.588679,\n        -7.3792324,\n        -3.8347638,\n        -4.33273,\n        -3.9180882,\n        -2.7039533,\n        -2.640478,\n        -3.0467978,\n        -2.8800092,\n        -3.728069,\n        -2.8766658,\n        -3.2154396,\n        -3.0740054,\n        -2.8467777,\n        -3.4180677,\n        -2.7143333,\n        -2.076263,\n        -2.0485897,\n        -5.0210037,\n        -2.7091832,\n        -2.9298787,\n        -2.4252849,\n        -3.3718836,\n        -2.4471207,\n        -3.6353476,\n        -2.2861705,\n        -2.7411432,\n        -5.5288014,\n        -1.7537444,\n        -3.9634967,\n        -2.5614748,\n        -4.5685825,\n        -2.5693524,\n        -2.8589904,\n        -4.052606,\n        -3.0085058,\n        -8.247205,\n        -3.4645865,\n        -2.7214427,\n        -2.5838864,\n        -4.3286037,\n        -2.4010165,\n        -3.414822,\n        -2.885385,\n        -2.4666367,\n        -4.323651,\n        -3.8383358,\n        -3.139137,\n        -3.4275022,\n        -3.237697,\n        -3.1840107,\n        -2.252056,\n        -3.050251,\n        -3.1270928,\n        -2.7886403,\n        -3.3036432,\n        -3.7236898,\n        -4.4391513,\n        -3.3557675,\n        -2.2811415,\n        -4.0825143,\n        -3.5760353,\n        -3.5455575,\n        -2.8113878,\n        -3.084719,\n        -5.0754757,\n        -6.223174,\n        -2.2824337\n      ],\n      \"pointIndex\": [\n        1,\n        1255,\n        255,\n        1017284302,\n        1089734496,\n        10949292,\n        1652093439,\n        1596861574,\n        1262420369,\n        30885259,\n        1899089182,\n        702589526,\n        226343027,\n        650443794,\n        938940964,\n        204420704,\n        449388811,\n        684278980,\n        484967400,\n        1865413300,\n        624197316,\n        792817202,\n        1057627947,\n        1911730982,\n        49351379,\n        269342876,\n        1366723898,\n        465991636,\n        317674381,\n        568846759,\n        474593987,\n        25762502,\n        397721334,\n        876847266,\n        945371139,\n        786347852,\n        1099848013,\n        1463711320,\n        1779232933,\n        1218049435,\n        1038543087,\n        1886678781,\n        1338489441,\n        1355967911,\n        191822678,\n        73117530,\n        1172672466,\n        1929280355,\n        265143882,\n        276538306,\n        286271887,\n        508121392,\n        1622063844,\n        952218103,\n        130496378,\n        500479695,\n        333865593,\n        467046039,\n        1913019693,\n        1509216529,\n        384146715,\n        1617139672,\n        1314713586,\n        1846961643,\n        962770663,\n        1626111478,\n        456196157,\n        1804321800,\n        518594326,\n        547655169,\n        361125140,\n        92283015,\n        1293741157,\n        1103822982,\n        1531462748,\n        741344085,\n        721349132,\n        1695309884,\n        772484344,\n        435411073,\n        1857039100,\n        915276005,\n        979134019,\n        1420299320,\n        1245253122,\n        1506772618,\n        105372024,\n        1976398838\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 5716410955948198572\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4723679,\n        -1.4748684,\n        -1.4796734,\n        -1.4989817,\n        -1.4847839,\n        -1.5271605,\n        -1.4858755,\n        -1.6206623,\n        -1.5259806,\n        -1.5336944,\n        -1.5135266,\n        -1.5312008,\n        -1.544349,\n        -1.4907415,\n        -1.5200644,\n        -1.8607095,\n        -1.71595,\n        -1.6295044,\n        -1.6614683,\n        -1.5787373,\n        -1.5668296,\n        -1.6186606,\n        -1.7220145,\n        -1.6395398,\n        -1.637389,\n        -1.5587585,\n        -1.6250982,\n        -1.574669,\n        -1.5053121,\n        -1.5367,\n        -1.6281161,\n        -1.9685827,\n        -2.0332274,\n        -1.8441141,\n        -1.8605316,\n        -1.6806389,\n        -2.0616343,\n        -1.7017778,\n        -2.018361,\n        -1.6003023,\n        -1.7140663,\n        -1.5801271,\n        -1.5731896,\n        -1.6345347,\n        -2.0808349,\n        -1.9523935,\n        -1.7433275,\n        -1.9722502,\n        -1.6819129,\n        -1.7351564,\n        -1.6872911,\n        -1.6488663,\n        -1.6682271,\n        -1.6863096,\n        -1.7568988,\n        -1.6824553,\n        -1.9729172,\n        -1.5136033,\n        -1.5075728,\n        -1.9928776,\n        -1.7682136,\n        -1.9389695,\n        -1.6388328,\n        -2.048581,\n        -2.7856648,\n        -2.678215,\n        -2.0568538,\n        -2.279208,\n        -2.3106277,\n        -2.098067,\n        -2.046037,\n        -2.4830816,\n        -2.1151178,\n        -2.151959,\n        -2.2791963,\n        -1.7717863,\n        -2.2011366,\n        -2.4435546,\n        -2.215945,\n        -1.8173331,\n        -1.6686308,\n        -1.7595752,\n        -1.8025055,\n        -1.6080472,\n        -1.6296883,\n        -1.7254086,\n        -3.258416,\n        -2.5550792,\n        -1.8049688,\n        -2.0818071,\n        -3.0147269,\n        -2.116284,\n        -2.5157568,\n        -2.0794017,\n        -1.9321847,\n        -2.090745,\n        -2.0223753,\n        -1.7609588,\n        -1.833841,\n        -3.121823,\n        -2.0643542,\n        -2.269138,\n        -2.14882,\n        -2.6094937,\n        -2.3333528,\n        -2.1854448,\n        -1.7575346,\n        -1.717381,\n        -1.8458064,\n        -1.797531,\n        -1.922467,\n        -2.4076557,\n        -2.198741,\n        -2.005758,\n        -2.3674817,\n        -1.5723048,\n        -2.205375,\n        -1.9498025,\n        -1.6438376,\n        -2.7989624,\n        -2.2850306,\n        -2.0528595,\n        -1.8120686,\n        -2.024037,\n        -1.9523389,\n        -2.3633564,\n        -2.2165782,\n        -5.296845,\n        -2.6003857,\n        -3.3466606,\n        -3.048333,\n        -2.975929,\n        -4.850967,\n        -3.008884,\n        -2.6241615,\n        -3.4881067,\n        -7.5118446,\n        -3.2324533,\n        -2.4897316,\n        -2.1628304,\n        -3.6258216,\n        -3.848587,\n        -3.0612652,\n        -2.6791778,\n        -3.2420094,\n        -2.194795,\n        -2.1248574,\n        -2.9027042,\n        -2.3620207,\n        -2.8662558,\n        -2.6906807,\n        -1.7725453,\n        -2.0574784,\n        -4.02859,\n        -3.6893065,\n        -3.6238759,\n        -2.5562239,\n        -3.8399627,\n        -3.0008607,\n        -4.83231,\n        -2.5115073,\n        -5.7743516,\n        -2.8788157,\n        -2.198657,\n        -2.6013656,\n        -1.9853501,\n        -4.5019355,\n        -2.4918034,\n        -6.2223206,\n        -2.4242911,\n        -1.8496494,\n        -3.8475597,\n        -4.650009,\n        -3.535911,\n        -4.1198554,\n        -2.7007039,\n        -2.8500903,\n        -2.1353986,\n        -3.5506113,\n        -2.997891,\n        -4.204195,\n        -4.4949207,\n        -3.9670541,\n        -2.1987236,\n        -4.757135,\n        -5.226631,\n        -2.7338564,\n        -3.3281708,\n        -4.295948,\n        -2.6056557,\n        -2.1256652,\n        -3.6111758,\n        -2.6671848,\n        -3.766492,\n        -2.9153528,\n        -4.150468,\n        -4.9099946,\n        -2.6949108,\n        -1.8693603,\n        -4.0355754,\n        -4.0624247,\n        -2.5225918,\n        -2.5343404,\n        -2.8199022,\n        -3.2567928,\n        -2.29005,\n        -2.2990692,\n        -3.128523,\n        -3.4083765,\n        -4.047287,\n        -2.9685643,\n        -2.8527641,\n        -2.6012478,\n        -4.909524,\n        -2.082991,\n        -3.2168367,\n        -2.2418272,\n        -1.862048,\n        -2.1094995,\n        -2.2545705,\n        -3.5991924,\n        -2.4222345,\n        -3.9604158,\n        -2.5520163,\n        -3.1371665,\n        -3.4637997,\n        -2.9052608,\n        -2.9302106,\n        -3.5320182,\n        -3.6258898,\n        -2.5361264,\n        -4.311318,\n        -1.7770904,\n        -2.2072034,\n        -2.2768364,\n        -3.1182168,\n        -2.3346496,\n        -2.716515,\n        -2.167842,\n        -2.8909123,\n        -3.067276,\n        -5.121871,\n        -2.5801742,\n        -2.2827775,\n        -2.8184876,\n        -2.1159203,\n        -2.0943155,\n        -2.2802548,\n        -3.1016576,\n        -2.298147,\n        -5.552916,\n        -3.8240209,\n        -3.769819,\n        -4.703353,\n        -4.1772947\n      ],\n      \"pointIndex\": [\n        2,\n        1251,\n        255,\n        1580393585,\n        937796393,\n        369078070,\n        1088167617,\n        1820918241,\n        107317162,\n        702518978,\n        180405041,\n        1542201070,\n        1152682873,\n        525044586,\n        190877682,\n        1447823620,\n        389110928,\n        169082333,\n        225169030,\n        76478458,\n        673272180,\n        1582572407,\n        1170513083,\n        1761994507,\n        1762524214,\n        860487169,\n        1138576973,\n        989335400,\n        1211982194,\n        374589419,\n        866698654,\n        1165170093,\n        83589305,\n        1044900109,\n        1061977604,\n        366177616,\n        1673983651,\n        649306908,\n        82352927,\n        719766844,\n        788108580,\n        909109540,\n        1536814447,\n        1113609650,\n        1595319572,\n        1819558734,\n        748175170,\n        570534497,\n        291551019,\n        1665537188,\n        53056636,\n        118843013,\n        1841587709,\n        509988965,\n        1681295645,\n        1837604773,\n        727677842,\n        28256415,\n        1720623001,\n        554211226,\n        148756275,\n        822589756,\n        419589759,\n        1527317775,\n        161592381,\n        1866867503,\n        1169484848,\n        967047195,\n        539531095,\n        574220620,\n        1360574594,\n        1325502980,\n        199192459,\n        1139894476,\n        1418390616,\n        686476847,\n        214632009,\n        1699060011,\n        457644879,\n        830359452,\n        1326960,\n        1094603602,\n        1163385703,\n        1692138214,\n        1252348902,\n        1298375863,\n        1502677091,\n        1952872299\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -5848974963479365812\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.585127,\n        -1.5865705,\n        -1.5962614,\n        -1.6085825,\n        -1.596154,\n        -1.630115,\n        -1.6023626,\n        -1.6131486,\n        -1.6124258,\n        -1.6522865,\n        -1.6071877,\n        -1.7168533,\n        -1.6414491,\n        -1.6560875,\n        -1.6027256,\n        -1.7860086,\n        -1.6391898,\n        -1.7215892,\n        -1.6661743,\n        -1.7551733,\n        -1.6553258,\n        -1.8516773,\n        -1.7214888,\n        -1.797124,\n        -1.8066955,\n        -1.6430796,\n        -1.6840556,\n        -1.821842,\n        -1.715812,\n        -1.7092791,\n        -1.6264325,\n        -1.8273116,\n        -1.8516202,\n        -1.8577257,\n        -1.8426911,\n        -1.8244375,\n        -1.8282402,\n        -1.7095118,\n        -1.8318115,\n        -1.9583569,\n        -1.9977958,\n        -1.85578,\n        -1.6765971,\n        -2.2481804,\n        -2.2947147,\n        -1.9133995,\n        -1.8031982,\n        -2.179512,\n        -2.439674,\n        -1.8818381,\n        -1.8582158,\n        -1.9036092,\n        -1.7120684,\n        -1.8141488,\n        -1.7251884,\n        -2.0264452,\n        -1.8911572,\n        -1.8964896,\n        -2.2973378,\n        -2.332079,\n        -1.8439084,\n        -1.7138196,\n        -1.7435124,\n        -1.8923931,\n        -2.0120213,\n        -2.3480647,\n        -2.4271514,\n        -2.6368625,\n        -2.3307495,\n        -1.9175985,\n        -2.4014747,\n        -1.911802,\n        -1.8705333,\n        -2.248779,\n        -1.97523,\n        -1.9332904,\n        -2.081916,\n        -2.0845778,\n        -1.8409702,\n        -3.1433861,\n        -2.1671054,\n        -2.2489593,\n        -2.0810833,\n        -1.9405694,\n        -1.9257721,\n        -1.6870952,\n        -1.7625767,\n        -2.321876,\n        -2.5269024,\n        -2.659979,\n        -2.3002388,\n        -2.2201302,\n        -2.3069,\n        -2.1379435,\n        -3.4479458,\n        -2.259122,\n        -2.186968,\n        -2.7688255,\n        -2.7101648,\n        -1.8897277,\n        -2.687137,\n        -2.4541807,\n        -2.3081462,\n        -2.0229635,\n        -2.012898,\n        -1.8474618,\n        -2.2427373,\n        -1.9241083,\n        -2.318026,\n        -2.2848606,\n        -1.7845504,\n        -2.4813504,\n        -2.520892,\n        -2.1248918,\n        -1.9119974,\n        -1.9928011,\n        -3.4082627,\n        -2.4585147,\n        -2.3150163,\n        -2.4207702,\n        -2.5795302,\n        -2.2035196,\n        -2.4063442,\n        -1.8016686,\n        -2.0338738,\n        -2.1928742,\n        -1.7814916,\n        -2.5912824,\n        -3.2466595,\n        -2.373099,\n        -2.3038535,\n        -3.493652,\n        -3.802891,\n        -2.490586,\n        -2.7070038,\n        -2.9038296,\n        -3.8930063,\n        -2.906122,\n        -5.50004,\n        -2.2162237,\n        -1.9646659,\n        -2.7175615,\n        -3.1687071,\n        -2.3840399,\n        -2.0239818,\n        -1.9653404,\n        -6.275639,\n        -4.7269425,\n        -2.6616986,\n        -2.9380362,\n        -3.3301127,\n        -2.207981,\n        -2.511488,\n        -2.1947012,\n        -3.2296078,\n        -2.3584638,\n        -2.737618,\n        -3.1801414,\n        -4.861876,\n        -3.4092803,\n        -5.0118575,\n        -2.5117936,\n        -3.616494,\n        -3.2554538,\n        -3.9249454,\n        -2.7289634,\n        -3.2955859,\n        -5.0465126,\n        -5.5747933,\n        -4.9433146,\n        -2.879198,\n        -2.9027388,\n        -1.7449051,\n        -2.3635547,\n        -2.967493,\n        -4.3145137,\n        -2.5878572,\n        -2.690465,\n        -3.0125632,\n        -3.3281112,\n        -3.3097851,\n        -2.305855,\n        -3.4226742,\n        -3.9484427,\n        -4.643662,\n        -2.438772,\n        -4.3584895,\n        -3.1132748,\n        -5.7082057,\n        -3.675992,\n        -3.7442765,\n        -3.782986,\n        -2.5213556,\n        -4.175735,\n        -2.4869661,\n        -3.764512,\n        -3.3696184,\n        -3.76912,\n        -2.955294,\n        -2.455391,\n        -3.1660938,\n        -3.0430512,\n        -2.9759614,\n        -2.8468833,\n        -2.6680672,\n        -2.6251414,\n        -3.0337079,\n        -2.1241593,\n        -2.333803,\n        -2.6122713,\n        -2.257843,\n        -2.6162577,\n        -2.5634978,\n        -2.5290399,\n        -2.7698011,\n        -4.878759,\n        -2.4674652,\n        -4.0861626,\n        -2.8145604,\n        -2.60121,\n        -3.1464317,\n        -2.9476178,\n        -2.1492608,\n        -3.8819375,\n        -3.5344517,\n        -3.1717658,\n        -6.760232,\n        -2.2543252,\n        -2.871668,\n        -2.0001733,\n        -2.7306046,\n        -2.9972532,\n        -2.0358295,\n        -3.7240984,\n        -3.764603,\n        -4.331714,\n        -3.5213747,\n        -2.6330638,\n        -2.3694458,\n        -2.447882,\n        -2.9419396,\n        -4.247825,\n        -2.8564208,\n        -3.6029963,\n        -2.990521,\n        -3.9543157,\n        -2.5195994,\n        -2.9512708,\n        -1.9221497,\n        -2.3364508,\n        -2.380883,\n        -2.5922043,\n        -2.7262871,\n        -2.2678275\n      ],\n      \"pointIndex\": [\n        3,\n        1256,\n        254,\n        1198457468,\n        1360952450,\n        26435319,\n        922891573,\n        1706818842,\n        291132708,\n        351232349,\n        1635555717,\n        1039896802,\n        1655975837,\n        1270526413,\n        284925168,\n        119876470,\n        377448719,\n        422047276,\n        689131536,\n        1610442304,\n        764310952,\n        1031548427,\n        999931524,\n        1834911269,\n        608116459,\n        65495470,\n        1912444118,\n        669922830,\n        1541005390,\n        1176102810,\n        1607813297,\n        447779532,\n        1448688105,\n        826406687,\n        1845115835,\n        1829745202,\n        1675928325,\n        1163469776,\n        783631286,\n        1021157742,\n        838796421,\n        1029355220,\n        1576427283,\n        1479123821,\n        1553168870,\n        1255746814,\n        1802114069,\n        60950518,\n        161781672,\n        281234833,\n        1533432341,\n        1634075043,\n        1005880675,\n        1354111505,\n        1399435003,\n        1882301970,\n        334464726,\n        1125525131,\n        27011306,\n        658430381,\n        1673032764,\n        693793304,\n        833452546,\n        424407824,\n        165459134,\n        621642345,\n        1649361131,\n        216798098,\n        836256009,\n        1440847747,\n        555067145,\n        14824669,\n        1565147410,\n        1091818195,\n        672786451,\n        1580259905,\n        814844409,\n        5187055,\n        1264954531,\n        1034275911,\n        248635736,\n        1346824182,\n        1293371189,\n        1559256902,\n        1343891668,\n        1543671026,\n        1653822480,\n        1572339\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -7786205468076532075\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4614998,\n        -1.4620378,\n        -1.4752388,\n        -1.4637798,\n        -1.4655621,\n        -1.4821007,\n        -1.4969317,\n        -1.4724,\n        -1.4756373,\n        -1.4745526,\n        -1.5583972,\n        -1.4842486,\n        -1.5277389,\n        -1.5109748,\n        -1.5362148,\n        -1.5139849,\n        -1.6300763,\n        -1.5247805,\n        -1.5487496,\n        -1.5125206,\n        -1.6450458,\n        -1.7908546,\n        -1.5639207,\n        -1.6074495,\n        -1.5013287,\n        -1.6458435,\n        -1.5291392,\n        -1.5445955,\n        -1.6785212,\n        -1.7640712,\n        -1.5476627,\n        -1.5836003,\n        -1.5833039,\n        -1.6434343,\n        -2.0564253,\n        -1.5250986,\n        -1.5665632,\n        -1.6909691,\n        -1.5759987,\n        -1.7525314,\n        -1.5801365,\n        -1.9565464,\n        -1.7089354,\n        -1.8142347,\n        -1.9809933,\n        -1.5648323,\n        -1.5719103,\n        -1.6796517,\n        -1.616994,\n        -1.9254856,\n        -1.58162,\n        -2.0042083,\n        -1.918308,\n        -1.7683012,\n        -1.667526,\n        -1.5528598,\n        -1.7119304,\n        -1.7606535,\n        -1.7034885,\n        -1.8455615,\n        -2.1602297,\n        -1.7621607,\n        -1.6108712,\n        -1.6713388,\n        -1.9926528,\n        -1.9681088,\n        -1.595653,\n        -2.058484,\n        -1.7006544,\n        -2.3276198,\n        -2.0781496,\n        -2.2205381,\n        -1.5307153,\n        -1.6730161,\n        -1.6707381,\n        -2.025199,\n        -2.0504923,\n        -1.7690312,\n        -1.6877911,\n        -1.9043152,\n        -2.062953,\n        -1.6100355,\n        -1.7021533,\n        -2.6426606,\n        -2.976421,\n        -2.3672402,\n        -1.8924923,\n        -1.9037335,\n        -2.3610914,\n        -2.0577476,\n        -2.0118337,\n        -1.6866783,\n        -2.6741467,\n        -1.6573713,\n        -1.8338802,\n        -3.272017,\n        -1.6896793,\n        -1.8266575,\n        -1.6926943,\n        -2.1101823,\n        -2.249881,\n        -2.7457533,\n        -1.9911,\n        -2.1694999,\n        -2.8206563,\n        -1.9598428,\n        -1.9498309,\n        -1.8770298,\n        -2.8078392,\n        -1.747417,\n        -2.0108464,\n        -1.9295949,\n        -1.7438056,\n        -2.2001588,\n        -2.4090858,\n        -1.9628894,\n        -3.2266276,\n        -2.2227662,\n        -1.9527277,\n        -2.3317513,\n        -3.043289,\n        -2.3646955,\n        -2.2974114,\n        -2.3026936,\n        -2.0654562,\n        -2.101165,\n        -2.0241485,\n        -3.2078626,\n        -2.1115484,\n        -3.375203,\n        -3.4619694,\n        -3.1447875,\n        -3.0205579,\n        -2.9116719,\n        -3.0496647,\n        -5.631469,\n        -2.851554,\n        -1.872976,\n        -3.534869,\n        -2.4107833,\n        -2.4627678,\n        -3.9677198,\n        -2.5221946,\n        -4.2509003,\n        -4.347851,\n        -1.7428657,\n        -2.2097461,\n        -3.1804452,\n        -1.7241619,\n        -2.1284466,\n        -2.7373052,\n        -6.491934,\n        -5.604398,\n        -2.7174814,\n        -2.0809023,\n        -2.070087,\n        -3.1101959,\n        -2.0747929,\n        -1.7276424,\n        -2.1270373,\n        -2.531271,\n        -2.9381006,\n        -2.3071568,\n        -3.4840255,\n        -3.1525218,\n        -2.1313589,\n        -1.7235185,\n        -4.1241984,\n        -4.0011544,\n        -4.690316,\n        -3.1714647,\n        -4.109093,\n        -3.5499568,\n        -3.7116437,\n        -2.3376155,\n        -2.4696362,\n        -2.3397548,\n        -3.4042199,\n        -4.819347,\n        -2.3679986,\n        -4.512915,\n        -2.5042548,\n        -3.8902445,\n        -2.830531,\n        -1.718399,\n        -6.8237643,\n        -2.9122827,\n        -2.0612462,\n        -4.492936,\n        -2.6069682,\n        -2.9106145,\n        -5.3700852,\n        -4.678593,\n        -1.8033797,\n        -2.726209,\n        -2.9032097,\n        -2.6872823,\n        -2.6334465,\n        -2.3258445,\n        -3.0594003,\n        -2.6481984,\n        -3.4110737,\n        -2.656895,\n        -4.843449,\n        -2.7703316,\n        -3.7980182,\n        -2.426165,\n        -4.9782662,\n        -2.9232721,\n        -5.033615,\n        -3.3354278,\n        -2.5321763,\n        -2.607751,\n        -2.2415302,\n        -2.4685838,\n        -3.2702944,\n        -3.4449472,\n        -5.0143695,\n        -3.24149,\n        -2.8240068,\n        -2.4171066,\n        -3.502546,\n        -2.3196597,\n        -5.5846643,\n        -5.161543,\n        -1.9018593,\n        -3.8895326,\n        -3.0670464,\n        -2.640248,\n        -3.7918057,\n        -4.233302,\n        -4.47031,\n        -2.205188,\n        -3.6447008,\n        -4.1468525,\n        -3.8840435,\n        -3.281042,\n        -2.0309126,\n        -2.9352825,\n        -2.4804878,\n        -6.0481863,\n        -3.061815,\n        -7.0423317,\n        -3.2203004,\n        -3.7399385,\n        -5.456574,\n        -2.5922499,\n        -2.6433997,\n        -3.053929,\n        -2.068918,\n        -2.4161696,\n        -2.7882383,\n        -3.3886878,\n        -3.43396,\n        -3.1441133,\n        -3.9203095\n      ],\n      \"pointIndex\": [\n        0,\n        1226,\n        256,\n        849056237,\n        1330298459,\n        142932,\n        101177453,\n        916069392,\n        1286682669,\n        202402972,\n        950065220,\n        823981583,\n        900478237,\n        252922177,\n        317882587,\n        379601170,\n        1350790257,\n        1414449496,\n        675557575,\n        514428925,\n        629544045,\n        937023410,\n        993595804,\n        1643487381,\n        1162308875,\n        262947398,\n        1078715087,\n        716065612,\n        308881963,\n        1458484511,\n        31188202,\n        363072657,\n        395901610,\n        1628006938,\n        1133723048,\n        504229182,\n        1570891080,\n        833597151,\n        758041359,\n        1774356767,\n        1606186339,\n        818151225,\n        1802949039,\n        1044115799,\n        1482425745,\n        1123123331,\n        1421308938,\n        254357493,\n        112360527,\n        1735001320,\n        26132880,\n        287318462,\n        1675444151,\n        299646686,\n        1391296817,\n        378142025,\n        1386032200,\n        546779461,\n        343148943,\n        1475705046,\n        1842922609,\n        370456911,\n        1559704248,\n        71726850,\n        86227771,\n        782278457,\n        3370593,\n        1088871044,\n        369336061,\n        1304912692,\n        1540853432,\n        19260151,\n        559985974,\n        810439446,\n        940631817,\n        881244164,\n        713562624,\n        978060543,\n        724323672,\n        1191640444,\n        1110303134,\n        827206577,\n        1679472034,\n        945460625,\n        519113940,\n        1014641438,\n        1361991718,\n        1723655006,\n        1226\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -6279845217872163458\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5291942,\n        -1.5421118,\n        -1.5291986,\n        -1.5688084,\n        -1.5456706,\n        -1.5932994,\n        -1.5844495,\n        -1.6483736,\n        -1.5697545,\n        -1.602868,\n        -1.5526233,\n        -1.5970722,\n        -1.6185194,\n        -1.5882381,\n        -1.6166805,\n        -1.7459667,\n        -1.6640987,\n        -1.5835897,\n        -1.6019938,\n        -2.0524082,\n        -1.6345209,\n        -1.679469,\n        -1.5700582,\n        -1.6075442,\n        -1.6975076,\n        -1.643985,\n        -1.6761684,\n        -1.6274849,\n        -1.6280085,\n        -1.7651222,\n        -1.6474775,\n        -1.7666135,\n        -1.915021,\n        -1.6711018,\n        -1.9053582,\n        -1.6919777,\n        -1.5960433,\n        -1.6567732,\n        -1.7585815,\n        -2.2147882,\n        -2.2913113,\n        -1.6530101,\n        -1.761255,\n        -1.7261058,\n        -1.7055616,\n        -1.597077,\n        -2.0509222,\n        -1.6162806,\n        -1.957403,\n        -1.8172238,\n        -1.7253511,\n        -1.7148412,\n        -1.661545,\n        -1.7858291,\n        -1.7208059,\n        -1.8286492,\n        -1.8421198,\n        -1.7606053,\n        -1.6374458,\n        -1.9475904,\n        -1.8031894,\n        -1.7179695,\n        -1.653238,\n        -2.505366,\n        -1.8491501,\n        -1.93492,\n        -2.1802382,\n        -1.7455585,\n        -2.622658,\n        -1.9209028,\n        -1.9228863,\n        -2.0540936,\n        -1.7093084,\n        -2.35584,\n        -1.6626124,\n        -1.7910569,\n        -1.6780435,\n        -2.0860777,\n        -1.9547757,\n        -2.6014528,\n        -2.4743774,\n        -2.6695936,\n        -3.5200083,\n        -2.4638808,\n        -3.6596913,\n        -1.772456,\n        -2.8461657,\n        -2.3185823,\n        -2.1135888,\n        -1.922694,\n        -1.7556864,\n        -1.7284646,\n        -2.1148572,\n        -2.646042,\n        -2.5979016,\n        -2.307461,\n        -2.490812,\n        -2.1498966,\n        -2.2652745,\n        -1.9508317,\n        -3.3524008,\n        -2.6338513,\n        -1.8379326,\n        -2.0284708,\n        -1.9257252,\n        -1.780004,\n        -2.5826719,\n        -2.2029107,\n        -2.1302207,\n        -1.886495,\n        -1.8473617,\n        -1.9395412,\n        -1.908353,\n        -2.0144832,\n        -2.8429317,\n        -2.0034041,\n        -3.0488722,\n        -2.0849218,\n        -1.97875,\n        -2.5612664,\n        -2.8554602,\n        -1.9041004,\n        -2.3833334,\n        -1.9720411,\n        -2.2629402,\n        -2.0808895,\n        -1.6719692,\n        -2.763051,\n        -2.5273015,\n        -1.9659427,\n        -4.1709867,\n        -3.3429592,\n        -2.7491894,\n        -2.3433127,\n        -3.0188322,\n        -5.337598,\n        -2.0138423,\n        -3.548501,\n        -3.0431871,\n        -3.450968,\n        -2.0977263,\n        -3.5403953,\n        -5.311833,\n        -2.9877274,\n        -2.6960602,\n        -2.1924443,\n        -2.0940046,\n        -3.608719,\n        -3.313874,\n        -1.9078962,\n        -1.7662423,\n        -2.491588,\n        -2.115966,\n        -3.0886564,\n        -3.761888,\n        -3.0473785,\n        -2.3578377,\n        -3.1963868,\n        -2.0134742,\n        -3.3740778,\n        -3.6406064,\n        -3.475895,\n        -2.6934884,\n        -3.090138,\n        -2.8647952,\n        -4.240299,\n        -5.058313,\n        -3.3151815,\n        -2.7376516,\n        -4.3686676,\n        -4.85089,\n        -1.941006,\n        -3.5042632,\n        -2.961024,\n        -3.1213286,\n        -2.3416991,\n        -2.4683144,\n        -3.9237976,\n        -2.263143,\n        -2.2315419,\n        -3.6002877,\n        -4.234399,\n        -2.9994457,\n        -1.8865396,\n        -5.071279,\n        -3.388649,\n        -3.2096522,\n        -3.0233998,\n        -3.828761,\n        -4.304722,\n        -4.7664814,\n        -4.2962723,\n        -3.6441188,\n        -3.5908008,\n        -2.7482326,\n        -2.2110183,\n        -2.3329499,\n        -2.7433815,\n        -2.954774,\n        -2.034433,\n        -4.5096397,\n        -3.4059017,\n        -3.5646608,\n        -3.219465,\n        -2.679943,\n        -2.9957838,\n        -6.4067445,\n        -3.1729321,\n        -2.4494479,\n        -2.172322,\n        -2.711321,\n        -4.449049,\n        -3.6060262,\n        -5.8164296,\n        -2.6889763,\n        -2.3064306,\n        -5.6516323,\n        -4.252806,\n        -2.296698,\n        -2.504121,\n        -2.5867362,\n        -2.494824,\n        -2.026782,\n        -4.488038,\n        -2.5933194,\n        -2.6822002,\n        -2.3643074,\n        -2.945674,\n        -2.6898968,\n        -3.0333946,\n        -3.0024295,\n        -2.045695,\n        -2.069347,\n        -3.964504,\n        -6.586258,\n        -4.176414,\n        -3.3635936,\n        -2.273528,\n        -2.857833,\n        -3.2752182,\n        -3.1722167,\n        -2.9160614,\n        -3.245185,\n        -5.39917,\n        -2.0929382,\n        -2.39346,\n        -3.625814,\n        -4.11937,\n        -2.0917597,\n        -2.3497598,\n        -2.4011214,\n        -3.5981495,\n        -2.1429238,\n        -1.8076006\n      ],\n      \"pointIndex\": [\n        0,\n        1247,\n        254,\n        1207393395,\n        1282199790,\n        1517355764,\n        876092348,\n        1851235249,\n        1031800199,\n        382474110,\n        1568060881,\n        1438044522,\n        980294924,\n        1319389343,\n        300434136,\n        530712929,\n        1481478031,\n        413712687,\n        473858392,\n        544332911,\n        40496455,\n        750176439,\n        944546922,\n        1842198435,\n        264523950,\n        1185384991,\n        232310,\n        1422329646,\n        328917204,\n        1779388631,\n        674935981,\n        1746595928,\n        1637280667,\n        1198686133,\n        1793063872,\n        1330240837,\n        1433095016,\n        1837934942,\n        760884769,\n        690073014,\n        785842974,\n        1094420146,\n        956486752,\n        1191038798,\n        1807988695,\n        141741757,\n        1238555458,\n        355134539,\n        402731252,\n        1937625075,\n        777053484,\n        948757244,\n        1053514550,\n        155850445,\n        1903432337,\n        1606054283,\n        1619661043,\n        1739808340,\n        888819597,\n        1528127005,\n        1644004700,\n        971895458,\n        46555885,\n        1170026704,\n        443882870,\n        556088262,\n        1784744312,\n        1547430962,\n        492864216,\n        1693527228,\n        531323225,\n        547909999,\n        1213620485,\n        1055342901,\n        1667520910,\n        835400368,\n        1505096003,\n        710755318,\n        734421719,\n        887583508,\n        1880797967,\n        1782520631,\n        921168046,\n        1628462655,\n        1381801248,\n        1642730788,\n        1806492239,\n        1533567\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -6736865570805532185\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6270272,\n        -1.6364937,\n        -1.6316625,\n        -1.6581007,\n        -1.657373,\n        -1.6732012,\n        -1.6403781,\n        -1.666901,\n        -1.716661,\n        -1.7204353,\n        -1.6693875,\n        -1.6810037,\n        -1.6786739,\n        -1.6509911,\n        -1.6405945,\n        -1.6681459,\n        -1.7152675,\n        -1.9488813,\n        -1.7760447,\n        -1.9936274,\n        -1.7521713,\n        -1.7592715,\n        -1.902406,\n        -1.6905272,\n        -1.8479546,\n        -1.751047,\n        -1.7905741,\n        -1.6934325,\n        -1.6789583,\n        -1.6701736,\n        -1.6564382,\n        -1.7715598,\n        -2.0151927,\n        -1.9195334,\n        -1.8028382,\n        -1.9663247,\n        -2.1656272,\n        -2.296082,\n        -1.840917,\n        -2.1362383,\n        -2.0696712,\n        -1.8631237,\n        -1.767524,\n        -1.7651434,\n        -1.8536345,\n        -2.0365477,\n        -2.1101491,\n        -1.7590829,\n        -1.7541672,\n        -1.8802052,\n        -1.9907873,\n        -1.883789,\n        -1.9589584,\n        -1.937726,\n        -1.7951763,\n        -1.9418979,\n        -1.9566839,\n        -2.0244322,\n        -1.9384271,\n        -1.6788492,\n        -1.7451121,\n        -1.7861428,\n        -1.6852266,\n        -2.5215356,\n        -1.9013051,\n        -2.037079,\n        -2.0577483,\n        -3.0182664,\n        -1.9487162,\n        -1.9190514,\n        -1.8165746,\n        -2.0850487,\n        -2.0822523,\n        -2.2335765,\n        -2.4680552,\n        -2.363522,\n        -2.3838506,\n        -2.204269,\n        -4.3974123,\n        -3.3375227,\n        -2.7195897,\n        -2.4727077,\n        -2.1204584,\n        -2.6764693,\n        -2.1020103,\n        -1.989323,\n        -1.9542483,\n        -2.167528,\n        -1.769592,\n        -2.1437624,\n        -2.7654886,\n        -2.1714416,\n        -2.6895342,\n        -2.7113855,\n        -2.3920598,\n        -2.157461,\n        -1.9640378,\n        -1.7977059,\n        -1.7815706,\n        -1.9780982,\n        -2.2057226,\n        -2.116635,\n        -2.3563638,\n        -1.9742705,\n        -2.9931417,\n        -2.392908,\n        -2.28999,\n        -1.9452024,\n        -2.6425848,\n        -2.6401994,\n        -1.9202209,\n        -2.5856595,\n        -2.1273446,\n        -2.2240243,\n        -2.129439,\n        -3.3542998,\n        -2.2884893,\n        -2.4859428,\n        -2.2571838,\n        -1.7519403,\n        -1.9292167,\n        -1.8874947,\n        -2.1244519,\n        -1.9506923,\n        -2.0909913,\n        -2.3313808,\n        -1.7003025,\n        -2.9537804,\n        -3.5986166,\n        -2.7357392,\n        -2.283334,\n        -4.7310257,\n        -2.9838123,\n        -3.3083916,\n        -2.284723,\n        -4.025685,\n        -3.6454818,\n        -2.046851,\n        -3.136488,\n        -4.9756856,\n        -1.9540596,\n        -5.2797184,\n        -4.224498,\n        -3.102977,\n        -3.69534,\n        -2.6488783,\n        -3.182275,\n        -3.6669807,\n        -4.6564136,\n        -3.294526,\n        -2.7974358,\n        -3.3393862,\n        -3.445514,\n        -3.427641,\n        -3.7099073,\n        -2.8673236,\n        -3.67386,\n        -4.8217087,\n        -4.5653443,\n        -3.4503992,\n        -3.6306133,\n        -5.5046973,\n        -4.169761,\n        -2.6254187,\n        -4.3230076,\n        -3.2810805,\n        -2.551275,\n        -3.7663214,\n        -2.8015034,\n        -4.2319894,\n        -2.425944,\n        -2.796178,\n        -2.7693992,\n        -2.4642992,\n        -2.7877843,\n        -4.520319,\n        -3.8294418,\n        -4.1778226,\n        -2.1968145,\n        -2.6842058,\n        -5.268678,\n        -3.4234889,\n        -3.6759183,\n        -2.393645,\n        -4.843274,\n        -2.8643084,\n        -3.3054264,\n        -4.0527077,\n        -3.1848598,\n        -2.6404333,\n        -4.456477,\n        -2.7478602,\n        -2.5648475,\n        -2.6379373,\n        -4.831928,\n        -2.3397806,\n        -1.9601725,\n        -3.099352,\n        -2.1764588,\n        -3.9158375,\n        -2.2227128,\n        -2.9893708,\n        -2.8553996,\n        -2.6301746,\n        -2.118248,\n        -2.6538143,\n        -5.337449,\n        -1.9940683,\n        -2.7780764,\n        -3.9678357,\n        -3.0879936,\n        -3.0710263,\n        -3.4833937,\n        -2.4810443,\n        -4.2350316,\n        -4.8945765,\n        -3.2219045,\n        -2.769202,\n        -3.0021436,\n        -2.9778628,\n        -3.6210992,\n        -2.2141361,\n        -2.673273,\n        -3.2167678,\n        -3.5103796,\n        -8.597359,\n        -3.5799994,\n        -2.4545393,\n        -4.5522933,\n        -2.4809632,\n        -4.8563533,\n        -5.3535905,\n        -4.0583744,\n        -3.7414627,\n        -4.6650076,\n        -2.496172,\n        -5.1984487,\n        -2.5022283,\n        -3.1943207,\n        -2.0961633,\n        -4.3724174,\n        -2.932501,\n        -2.277216,\n        -2.0648792,\n        -2.928093,\n        -2.7486837,\n        -3.541168,\n        -2.4924767,\n        -2.1827936,\n        -2.2467203,\n        -3.017348,\n        -2.658529,\n        -3.090261,\n        -3.782347,\n        -1.7620225\n      ],\n      \"pointIndex\": [\n        5,\n        1255,\n        255,\n        1956468551,\n        1616157286,\n        1609746451,\n        488452057,\n        930233117,\n        1906460260,\n        1725851027,\n        786295560,\n        153864773,\n        25689970,\n        1908503004,\n        281243365,\n        1314661265,\n        1468990517,\n        1176275018,\n        1622263040,\n        577357105,\n        1464775084,\n        709924167,\n        889810156,\n        1877489160,\n        627129837,\n        60480977,\n        1256707707,\n        522403349,\n        1632370727,\n        1704381890,\n        887374661,\n        442182691,\n        1755137153,\n        535785731,\n        1750574627,\n        1588771769,\n        551572559,\n        1056180673,\n        630785850,\n        1720527106,\n        699769889,\n        208720950,\n        1142427647,\n        928764231,\n        1489358879,\n        1139773552,\n        1028238333,\n        1522940943,\n        100212860,\n        264566646,\n        839567687,\n        308532422,\n        704930029,\n        310113401,\n        1787203725,\n        357426754,\n        1167751160,\n        1145759838,\n        1240995885,\n        58971194,\n        1280434039,\n        1530888301,\n        1606015708,\n        52961117,\n        968535734,\n        653293164,\n        726161111,\n        926860836,\n        503605708,\n        1699615712,\n        264175794,\n        985695770,\n        554952958,\n        595565852,\n        629290955,\n        646856226,\n        197718828,\n        751350228,\n        1545208118,\n        1702895684,\n        762738313,\n        1039790708,\n        878663156,\n        1636895505,\n        1299674101,\n        1169950849,\n        1387813939,\n        1863833561\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 6873559043919805206\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5478095,\n        -1.5580735,\n        -1.5586025,\n        -1.588912,\n        -1.5624557,\n        -1.5931304,\n        -1.5787097,\n        -1.6516792,\n        -1.7199396,\n        -1.5835112,\n        -1.5690824,\n        -1.6260145,\n        -1.6317056,\n        -1.5840569,\n        -1.6037713,\n        -1.7980132,\n        -1.8133217,\n        -1.7965556,\n        -1.7716056,\n        -1.5857275,\n        -1.5960587,\n        -1.7305709,\n        -1.7159228,\n        -1.7437992,\n        -1.6637305,\n        -1.6374578,\n        -1.8194374,\n        -1.6424956,\n        -1.6265041,\n        -1.7596669,\n        -1.6393709,\n        -1.8227845,\n        -1.8477689,\n        -2.161068,\n        -2.0182776,\n        -1.8059037,\n        -2.1590114,\n        -1.9517924,\n        -1.7850544,\n        -1.6919967,\n        -1.6256903,\n        -1.7500316,\n        -1.6069838,\n        -1.7542928,\n        -1.9830415,\n        -1.963632,\n        -1.8992164,\n        -1.7973434,\n        -2.1416883,\n        -1.6798558,\n        -1.7398648,\n        -1.6805445,\n        -1.9159135,\n        -1.8302265,\n        -1.8452566,\n        -1.7396388,\n        -1.724929,\n        -1.8845452,\n        -1.6393856,\n        -1.9198812,\n        -1.799096,\n        -1.7613573,\n        -1.6581032,\n        -1.8339019,\n        -1.9424281,\n        -2.7757666,\n        -2.4357708,\n        -2.515062,\n        -2.4722068,\n        -2.082221,\n        -2.2053227,\n        -2.277163,\n        -1.8077579,\n        -2.7880044,\n        -2.4332013,\n        -2.4218073,\n        -2.0808065,\n        -1.8147348,\n        -2.215576,\n        -1.8046377,\n        -1.7593033,\n        -1.777868,\n        -1.7045307,\n        -2.2772882,\n        -1.9263648,\n        -1.8515847,\n        -1.6312764,\n        -1.9708276,\n        -2.3266296,\n        -2.1762302,\n        -2.0596678,\n        -2.0727565,\n        -2.1242344,\n        -1.955638,\n        -2.0269237,\n        -2.80806,\n        -4.395095,\n        -2.7257683,\n        -2.1824844,\n        -2.2855716,\n        -1.9568323,\n        -1.9669816,\n        -2.5572267,\n        -1.7785479,\n        -2.039832,\n        -2.551597,\n        -2.0521724,\n        -2.0065117,\n        -2.034989,\n        -1.9030262,\n        -2.016063,\n        -2.2545946,\n        -2.2300994,\n        -3.4435954,\n        -1.9981797,\n        -2.0935466,\n        -2.1631384,\n        -2.1183298,\n        -1.6487999,\n        -1.9733895,\n        -2.2379096,\n        -1.8148649,\n        -1.8340958,\n        -2.0860653,\n        -2.1912777,\n        -1.8835979,\n        -1.8014612,\n        -3.1797936,\n        -5.339011,\n        -3.2961295,\n        -2.7358942,\n        -2.8153527,\n        -3.3449876,\n        -3.0131137,\n        -3.142744,\n        -2.559853,\n        -2.584647,\n        -5.9994397,\n        -3.5015721,\n        -3.2383192,\n        -2.6384623,\n        -2.6504884,\n        -2.5027266,\n        -3.348065,\n        -2.329989,\n        -2.7779088,\n        -1.8919544,\n        -3.3424976,\n        -3.620949,\n        -3.2293108,\n        -2.8661213,\n        -2.9987667,\n        -3.2882886,\n        -2.2421741,\n        -2.9003553,\n        -2.4627187,\n        -3.7794378,\n        -2.3688264,\n        -3.7690911,\n        -2.6497767,\n        -3.440573,\n        -2.2225194,\n        -3.4665196,\n        -3.1266437,\n        -3.9963698,\n        -2.394353,\n        -2.536245,\n        -5.682287,\n        -2.7054744,\n        -3.5295506,\n        -3.164668,\n        -2.7427077,\n        -3.303546,\n        -1.8590448,\n        -2.207797,\n        -2.018024,\n        -2.0527718,\n        -3.4058433,\n        -4.380702,\n        -3.5663502,\n        -2.4080958,\n        -2.711812,\n        -4.338985,\n        -2.8664718,\n        -3.122034,\n        -2.5750837,\n        -2.2175708,\n        -4.8892264,\n        -3.1534848,\n        -2.092666,\n        -2.328548,\n        -3.0831187,\n        -3.136449,\n        -7.84776,\n        -6.210704,\n        -2.9239645,\n        -4.766644,\n        -2.2623096,\n        -3.40043,\n        -2.3112707,\n        -2.9403992,\n        -2.5001543,\n        -3.4736223,\n        -5.2151012,\n        -2.6916144,\n        -3.540305,\n        -4.12798,\n        -2.7393646,\n        -2.1685767,\n        -2.746256,\n        -2.8794262,\n        -4.1864066,\n        -4.8762226,\n        -2.3647995,\n        -3.9723904,\n        -2.4144828,\n        -4.3872066,\n        -2.2839453,\n        -2.4254746,\n        -2.037719,\n        -2.6945152,\n        -2.326007,\n        -2.9042437,\n        -4.6150465,\n        -4.993203,\n        -2.8662229,\n        -2.7774584,\n        -3.4765794,\n        -4.0050497,\n        -2.058355,\n        -2.7188106,\n        -2.258685,\n        -3.3639786,\n        -4.753893,\n        -4.302262,\n        -2.3484764,\n        -3.6190546,\n        -2.398666,\n        -5.012479,\n        -2.397243,\n        -4.971586,\n        -3.7006304,\n        -7.620705,\n        -2.0435278,\n        -2.7693655,\n        -4.205434,\n        -2.1007671,\n        -2.8852465,\n        -2.899561,\n        -2.9529257,\n        -3.2352219,\n        -2.7577133,\n        -2.4206066,\n        -3.002688,\n        -1.9380794\n      ],\n      \"pointIndex\": [\n        4,\n        1248,\n        255,\n        797085272,\n        917719120,\n        665845471,\n        611172034,\n        1738275277,\n        1155010306,\n        863175673,\n        502781864,\n        733921225,\n        1824565080,\n        798338759,\n        809817756,\n        333197546,\n        386844836,\n        879126480,\n        477362820,\n        1241127680,\n        629887312,\n        3698656,\n        1751740488,\n        1724705441,\n        486849765,\n        765800921,\n        302530205,\n        319812420,\n        1537887528,\n        1335616466,\n        715588314,\n        409087041,\n        1014476770,\n        131534629,\n        155377144,\n        519665801,\n        979042494,\n        556013783,\n        1262786447,\n        704272033,\n        1808017009,\n        1067124924,\n        983471314,\n        1115367593,\n        1777420339,\n        1551781689,\n        792743958,\n        1872401644,\n        1369229168,\n        176736649,\n        7387787,\n        1558023078,\n        191486482,\n        762348003,\n        1365673732,\n        341034796,\n        171526140,\n        25179000,\n        1076981259,\n        1260361787,\n        521298593,\n        866513695,\n        502099238,\n        90562059,\n        434354796,\n        151897813,\n        1186792304,\n        174131077,\n        1354707555,\n        1337800387,\n        964071028,\n        509614955,\n        1548920373,\n        1450283480,\n        818029985,\n        811502628,\n        1472466978,\n        1404935992,\n        924462353,\n        1755394524,\n        1192694550,\n        217950647,\n        1081151410,\n        1049347660,\n        649725313,\n        1372758617,\n        1480160371,\n        1929596516\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": -919230996305300857\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6287293,\n        -1.6435285,\n        -1.637755,\n        -1.6691914,\n        -1.645258,\n        -1.6442953,\n        -1.6410506,\n        -1.6916354,\n        -1.7190585,\n        -1.761682,\n        -1.6499045,\n        -1.6932083,\n        -1.729453,\n        -1.649677,\n        -1.6591613,\n        -1.6945941,\n        -1.8920058,\n        -1.7502831,\n        -1.7589148,\n        -1.844166,\n        -1.8389727,\n        -2.1142673,\n        -1.722116,\n        -1.7554877,\n        -1.8476304,\n        -1.7322166,\n        -1.7614912,\n        -1.7297312,\n        -1.6993097,\n        -2.0708926,\n        -1.6952051,\n        -1.7018081,\n        -1.7204852,\n        -1.9380832,\n        -1.9250269,\n        -2.1003716,\n        -1.810654,\n        -1.7863406,\n        -1.8420278,\n        -1.923125,\n        -2.0683916,\n        -1.8779187,\n        -2.0993567,\n        -2.2440252,\n        -2.4986584,\n        -2.1349447,\n        -1.8286045,\n        -1.8319627,\n        -2.963366,\n        -2.2319872,\n        -2.125827,\n        -1.977528,\n        -1.797223,\n        -1.8572384,\n        -1.9525493,\n        -2.1618528,\n        -1.7674576,\n        -1.7466911,\n        -1.8013276,\n        -2.0905416,\n        -2.21956,\n        -1.8042545,\n        -1.6955078,\n        -1.9114436,\n        -1.7379467,\n        -2.8870742,\n        -2.0274715,\n        -1.9450583,\n        -3.025044,\n        -1.9799057,\n        -2.2283165,\n        -2.344057,\n        -2.1447797,\n        -1.8842825,\n        -2.1390884,\n        -2.1928966,\n        -2.1094954,\n        -2.474272,\n        -2.218279,\n        -2.701682,\n        -2.0378191,\n        -2.3359075,\n        -2.3428192,\n        -1.9566844,\n        -2.5642297,\n        -2.1202443,\n        -2.4547656,\n        -2.5752928,\n        -2.4486563,\n        -2.5106273,\n        -2.9675372,\n        -2.722326,\n        -2.5456028,\n        -2.1745915,\n        -1.8693612,\n        -1.8518169,\n        -2.08939,\n        -3.7104192,\n        -2.977432,\n        -2.5043721,\n        -2.6941476,\n        -2.5780845,\n        -2.8901272,\n        -2.3614352,\n        -2.4190023,\n        -1.9482347,\n        -2.2259426,\n        -2.6558018,\n        -2.9252875,\n        -2.087803,\n        -2.0049474,\n        -2.4985342,\n        -2.263475,\n        -1.8067365,\n        -1.8575746,\n        -1.8400688,\n        -2.374721,\n        -2.1351123,\n        -2.0048347,\n        -2.1655703,\n        -2.787334,\n        -2.4745865,\n        -2.7357888,\n        -2.3446178,\n        -1.8706708,\n        -2.1889246,\n        -1.7161208,\n        -2.8049548,\n        -1.9561678,\n        -2.6307518,\n        -2.2830336,\n        -3.503847,\n        -3.7583747,\n        -2.5329049,\n        -4.5742164,\n        -3.3736012,\n        -2.9679334,\n        -3.1773055,\n        -3.1729085,\n        -5.4354944,\n        -4.8199077,\n        -2.8325222,\n        -2.62505,\n        -2.6489146,\n        -2.9634616,\n        -2.2020204,\n        -2.314265,\n        -2.578448,\n        -2.7460446,\n        -2.487839,\n        -2.949279,\n        -2.9059079,\n        -3.3980107,\n        -2.5645654,\n        -2.7129917,\n        -4.727363,\n        -2.5351973,\n        -3.9054053,\n        -2.6798823,\n        -2.7133641,\n        -6.961175,\n        -4.145108,\n        -2.8550694,\n        -2.4107604,\n        -4.073314,\n        -2.3849046,\n        -3.8040702,\n        -1.9988089,\n        -2.6826816,\n        -2.958969,\n        -2.8348746,\n        -2.4423566,\n        -3.2900076,\n        -3.5124567,\n        -3.0276008,\n        -2.6424477,\n        -3.222973,\n        -2.7755873,\n        -3.1440806,\n        -3.4846766,\n        -3.0851703,\n        -3.4664392,\n        -3.2701242,\n        -4.641508,\n        -5.2022886,\n        -5.167342,\n        -2.9217503,\n        -2.6460018,\n        -4.8421874,\n        -3.2255902,\n        -3.8385496,\n        -2.7864158,\n        -3.049552,\n        -4.733867,\n        -4.454487,\n        -3.9694974,\n        -4.1708393,\n        -4.4392476,\n        -3.2127802,\n        -3.1510036,\n        -2.6468732,\n        -2.9868522,\n        -4.7008367,\n        -3.4651322,\n        -2.6355107,\n        -4.7546005,\n        -3.0883682,\n        -2.952955,\n        -3.146033,\n        -2.7686331,\n        -2.429948,\n        -2.7047617,\n        -2.456339,\n        -3.1010394,\n        -2.527437,\n        -3.8453908,\n        -3.0544543,\n        -2.9318485,\n        -5.533993,\n        -2.0890853,\n        -2.7984624,\n        -5.6635385,\n        -4.536752,\n        -2.9831543,\n        -2.8688128,\n        -2.588757,\n        -2.9604492,\n        -2.4847069,\n        -2.9993773,\n        -4.0434513,\n        -3.3012896,\n        -2.394382,\n        -2.7405553,\n        -4.2170815,\n        -4.238273,\n        -3.1518064,\n        -4.3944273,\n        -2.0580623,\n        -2.0345478,\n        -4.6425953,\n        -2.7166886,\n        -4.0035863,\n        -3.0050988,\n        -6.148036,\n        -3.6872802,\n        -5.858176,\n        -2.7695863,\n        -3.1716938,\n        -2.6447742,\n        -2.1457007,\n        -2.0553026,\n        -2.922085,\n        -2.994673,\n        -2.9905138,\n        -1.9678352\n      ],\n      \"pointIndex\": [\n        3,\n        1253,\n        255,\n        1468939989,\n        348201086,\n        311481418,\n        1733845684,\n        1686180818,\n        280805918,\n        1795342310,\n        1372015301,\n        1359497846,\n        1486453725,\n        1260359975,\n        1033476187,\n        361534460,\n        1355030892,\n        1272463141,\n        805838947,\n        199255874,\n        972786110,\n        346438616,\n        1722030182,\n        1813644722,\n        191076355,\n        981070544,\n        284836642,\n        310910058,\n        989748829,\n        1799011465,\n        1134929181,\n        1911068180,\n        1831200079,\n        1413073718,\n        1628009934,\n        1675666577,\n        1909251535,\n        300972608,\n        78762105,\n        704813320,\n        309117930,\n        1038878914,\n        1271574588,\n        94968480,\n        1541144338,\n        1719937349,\n        1811721669,\n        471265264,\n        1530779806,\n        1218782107,\n        390946267,\n        296023026,\n        473573629,\n        1716169005,\n        331840464,\n        371289284,\n        644432640,\n        481715699,\n        1673291390,\n        394237884,\n        1112141393,\n        1866968764,\n        444447526,\n        1055838466,\n        1298556873,\n        526621749,\n        156912001,\n        163208658,\n        195519246,\n        1823671856,\n        1244638050,\n        1730451265,\n        1494404993,\n        634462173,\n        1163393280,\n        808705820,\n        1525716283,\n        736456317,\n        1566828723,\n        572959739,\n        1176229499,\n        1011709746,\n        1060291886,\n        1303034817,\n        1428442655,\n        1441529443,\n        1522581783,\n        1918611098\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 1235914033936469780\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5345111,\n        -1.5345854,\n        -1.5352143,\n        -1.5418011,\n        -1.5640353,\n        -1.5873953,\n        -1.5440509,\n        -1.5723784,\n        -1.5536164,\n        -1.6138204,\n        -1.564567,\n        -1.6089317,\n        -1.6009803,\n        -1.5548254,\n        -1.5872757,\n        -1.7854967,\n        -1.7077621,\n        -1.618875,\n        -1.5881951,\n        -1.7608906,\n        -1.74024,\n        -1.6480869,\n        -1.5943167,\n        -1.6137693,\n        -1.6191088,\n        -1.6046709,\n        -1.637026,\n        -1.5612868,\n        -1.5652598,\n        -1.5936134,\n        -1.656476,\n        -2.0284445,\n        -2.0513895,\n        -1.7272295,\n        -1.8274367,\n        -1.7010891,\n        -1.7508259,\n        -1.6084881,\n        -1.7268528,\n        -1.9210428,\n        -1.8405379,\n        -1.9598054,\n        -1.7992055,\n        -1.6998166,\n        -1.7354546,\n        -1.675926,\n        -1.6926972,\n        -1.8770659,\n        -1.8663892,\n        -1.7692633,\n        -1.6374545,\n        -1.6231381,\n        -1.8434621,\n        -1.7333738,\n        -1.948892,\n        -1.5747668,\n        -1.7588967,\n        -1.6761369,\n        -2.1003804,\n        -1.6753234,\n        -1.6181744,\n        -1.8790652,\n        -1.661982,\n        -2.0956454,\n        -2.200539,\n        -2.0995784,\n        -2.225362,\n        -2.2545896,\n        -1.9625192,\n        -1.8587768,\n        -1.8545741,\n        -1.7148182,\n        -1.8189396,\n        -1.7725823,\n        -1.8485171,\n        -2.118984,\n        -2.387815,\n        -2.064594,\n        -2.0679607,\n        -2.118417,\n        -2.3495748,\n        -2.288614,\n        -2.195366,\n        -2.6614716,\n        -2.2825482,\n        -1.9241642,\n        -3.0691502,\n        -1.7099928,\n        -1.9844245,\n        -1.9936858,\n        -2.0395646,\n        -1.6921089,\n        -1.9774458,\n        -3.1731887,\n        -1.8646569,\n        -1.8896581,\n        -2.0159445,\n        -1.9981244,\n        -2.1396825,\n        -1.8050421,\n        -1.7693808,\n        -1.7676467,\n        -1.754945,\n        -1.6373693,\n        -2.9621675,\n        -2.3039734,\n        -1.8951974,\n        -1.9601555,\n        -1.8143328,\n        -2.4989946,\n        -2.312548,\n        -1.8358022,\n        -2.3151543,\n        -2.063485,\n        -1.8310093,\n        -2.309061,\n        -1.8352809,\n        -2.2402127,\n        -2.1357348,\n        -1.9181471,\n        -2.1218538,\n        -2.4286084,\n        -1.619118,\n        -2.3334422,\n        -2.0407221,\n        -2.2614598,\n        -1.6817223,\n        -2.7123013,\n        -2.9958603,\n        -3.2728026,\n        -2.8368137,\n        -2.5721009,\n        -2.917388,\n        -2.8172667,\n        -5.1267014,\n        -2.5259824,\n        -3.426527,\n        -4.2924347,\n        -4.367054,\n        -3.8996263,\n        -2.4546254,\n        -2.8780682,\n        -3.369605,\n        -1.9061285,\n        -3.7905815,\n        -3.2067957,\n        -2.7068586,\n        -2.0716114,\n        -2.2800162,\n        -2.4191968,\n        -3.452595,\n        -2.120203,\n        -2.369795,\n        -4.5425644,\n        -2.4015663,\n        -2.3730764,\n        -2.5273366,\n        -3.0901675,\n        -2.9005687,\n        -2.5268888,\n        -2.7519774,\n        -2.4042478,\n        -2.5676286,\n        -3.3872604,\n        -2.3477516,\n        -2.5910897,\n        -2.490365,\n        -2.697967,\n        -3.0550823,\n        -3.553559,\n        -2.7715716,\n        -3.4370391,\n        -3.0085537,\n        -3.6120863,\n        -4.689063,\n        -1.7739089,\n        -2.0211194,\n        -2.5067132,\n        -2.825091,\n        -2.0644937,\n        -2.060346,\n        -3.5862482,\n        -2.9754694,\n        -2.7869902,\n        -2.8265479,\n        -2.8258586,\n        -2.4236476,\n        -5.5222154,\n        -4.011036,\n        -5.4852524,\n        -2.257591,\n        -2.3536267,\n        -2.1434913,\n        -3.0729537,\n        -2.503225,\n        -3.1518953,\n        -3.8945484,\n        -3.5541847,\n        -3.574473,\n        -3.5645008,\n        -1.8395015,\n        -2.1069777,\n        -2.280898,\n        -1.9562199,\n        -1.8099992,\n        -2.681795,\n        -4.0627627,\n        -2.3272598,\n        -3.1536226,\n        -3.6603744,\n        -5.376876,\n        -4.4856424,\n        -2.3590813,\n        -2.0735247,\n        -2.5921485,\n        -3.2830138,\n        -2.987672,\n        -5.36235,\n        -2.7352164,\n        -2.5040228,\n        -3.3372958,\n        -3.66952,\n        -2.796431,\n        -1.9083732,\n        -4.185178,\n        -3.1007457,\n        -2.485229,\n        -2.1849375,\n        -2.8226488,\n        -2.0973449,\n        -2.3647096,\n        -3.5712779,\n        -3.2152066,\n        -2.2329075,\n        -2.6364183,\n        -2.7903652,\n        -2.5476396,\n        -2.842387,\n        -2.2279704,\n        -2.3730114,\n        -2.0485094,\n        -3.2631235,\n        -3.3598142,\n        -2.7192006,\n        -2.5139086,\n        -2.722003,\n        -2.3466024,\n        -2.7279682,\n        -3.6686144,\n        -2.3619401,\n        -3.394032,\n        -2.5690548,\n        -3.4673102,\n        -2.3773215\n      ],\n      \"pointIndex\": [\n        3,\n        1249,\n        254,\n        749619764,\n        515508889,\n        953230363,\n        644120088,\n        1783281798,\n        1438421982,\n        1006494365,\n        1449632781,\n        933462234,\n        1241790507,\n        263002233,\n        291621465,\n        1311156237,\n        1622877794,\n        593024514,\n        160824440,\n        84490796,\n        633171281,\n        823183843,\n        1353930027,\n        1936991999,\n        101048797,\n        501282098,\n        1527536864,\n        55971400,\n        1902732214,\n        1733274098,\n        203359947,\n        140136544,\n        1712388138,\n        1071987294,\n        471161193,\n        522293587,\n        621210748,\n        1601017186,\n        1920764524,\n        1363619146,\n        1807520155,\n        461573448,\n        983873570,\n        1331965006,\n        1549976524,\n        825674653,\n        525980398,\n        270665261,\n        683061037,\n        1187318760,\n        855226838,\n        581585821,\n        1425257349,\n        1051440673,\n        224458911,\n        361881947,\n        239661597,\n        1076350786,\n        1396027055,\n        234628851,\n        1629539413,\n        401512819,\n        1390505173,\n        427967097,\n        1304488839,\n        445090758,\n        969427552,\n        1095121632,\n        33234972,\n        1633348644,\n        80685532,\n        1329840547,\n        1835358407,\n        590560865,\n        726010070,\n        1284999414,\n        1598481684,\n        744808929,\n        757904728,\n        1398773041,\n        961639521,\n        1918522845,\n        1505982507,\n        1142351156,\n        588675351,\n        1779866454,\n        1923068132,\n        1554810\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 5961685213686464613\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.5257835,\n        -1.532419,\n        -1.526319,\n        -1.5459954,\n        -1.5403318,\n        -1.5273062,\n        -1.5318718,\n        -1.5854601,\n        -1.600429,\n        -1.5905855,\n        -1.576832,\n        -1.5444384,\n        -1.53936,\n        -1.6083705,\n        -1.5532775,\n        -1.7179025,\n        -1.6046104,\n        -1.6268706,\n        -1.6039013,\n        -1.5928894,\n        -1.8332025,\n        -1.6485378,\n        -1.6818303,\n        -1.5466363,\n        -1.5505174,\n        -1.5745901,\n        -1.5889266,\n        -1.676313,\n        -1.6876225,\n        -1.680915,\n        -1.6252509,\n        -1.8780425,\n        -2.0918608,\n        -1.6844703,\n        -1.7588052,\n        -1.9396161,\n        -1.6796147,\n        -1.7520231,\n        -1.6064339,\n        -1.7949163,\n        -1.6063402,\n        -1.9180142,\n        -1.8818871,\n        -1.6793885,\n        -1.67244,\n        -1.9063159,\n        -1.9686803,\n        -1.6347135,\n        -1.8534031,\n        -1.6582611,\n        -1.7758393,\n        -1.9086692,\n        -1.6868783,\n        -1.6774342,\n        -1.6174681,\n        -1.8093826,\n        -1.827288,\n        -1.8499967,\n        -1.7659829,\n        -1.7565888,\n        -1.8133397,\n        -1.6693784,\n        -1.648273,\n        -2.521096,\n        -2.2045913,\n        -2.2039254,\n        -2.3295937,\n        -2.1437054,\n        -2.4531891,\n        -2.0813184,\n        -1.8641522,\n        -2.979186,\n        -2.1612687,\n        -2.1348293,\n        -2.16697,\n        -1.8916192,\n        -1.9236215,\n        -1.7352973,\n        -2.264334,\n        -1.8202388,\n        -2.2695205,\n        -1.9896787,\n        -1.6497238,\n        -2.0752726,\n        -1.9621438,\n        -2.7148943,\n        -1.9871827,\n        -2.7365506,\n        -2.356889,\n        -1.6996434,\n        -1.7351856,\n        -2.150795,\n        -2.0257459,\n        -2.240612,\n        -2.4337668,\n        -1.8312027,\n        -2.4079015,\n        -1.8643912,\n        -1.9720566,\n        -2.5028517,\n        -1.9584169,\n        -2.0469992,\n        -2.5444942,\n        -2.001135,\n        -3.569225,\n        -2.0345218,\n        -1.8616279,\n        -2.5987465,\n        -2.119698,\n        -2.0288913,\n        -1.7859378,\n        -2.6630232,\n        -1.8724684,\n        -2.778035,\n        -2.3339639,\n        -2.0099356,\n        -2.07525,\n        -1.8595737,\n        -1.7922893,\n        -2.3891761,\n        -2.560148,\n        -2.8437333,\n        -2.0655832,\n        -1.7084434,\n        -2.1388345,\n        -2.6832392,\n        -1.7777342,\n        -2.556761,\n        -3.1471772,\n        -2.4486399,\n        -3.2626753,\n        -2.743352,\n        -2.6989517,\n        -2.662875,\n        -4.866611,\n        -4.188736,\n        -2.7151356,\n        -2.8587973,\n        -2.47859,\n        -3.8088737,\n        -2.5240817,\n        -1.9957222,\n        -3.138415,\n        -3.2717984,\n        -3.1498153,\n        -2.743579,\n        -2.2526274,\n        -2.50212,\n        -3.9200556,\n        -4.119592,\n        -3.521935,\n        -2.277947,\n        -2.908566,\n        -2.2615526,\n        -3.7282572,\n        -3.1510122,\n        -1.7886689,\n        -3.250285,\n        -2.5849755,\n        -1.9076731,\n        -2.898602,\n        -3.594523,\n        -2.5473993,\n        -2.3753328,\n        -2.273242,\n        -5.8790035,\n        -2.177618,\n        -2.3663774,\n        -2.6003609,\n        -2.8216631,\n        -2.1137898,\n        -2.9474978,\n        -3.3929472,\n        -2.0721483,\n        -2.0494266,\n        -4.468507,\n        -3.115191,\n        -4.9723973,\n        -3.4666471,\n        -2.448805,\n        -2.201985,\n        -2.967275,\n        -2.3713338,\n        -3.3682654,\n        -4.0202723,\n        -3.386346,\n        -2.711816,\n        -2.4446042,\n        -4.2688355,\n        -3.407556,\n        -2.7723808,\n        -3.4934607,\n        -2.4770794,\n        -4.2625604,\n        -3.5663607,\n        -2.7192361,\n        -2.441945,\n        -2.3298247,\n        -2.1854405,\n        -2.5347698,\n        -5.375053,\n        -3.7296784,\n        -2.2532272,\n        -2.3139634,\n        -2.177199,\n        -5.497376,\n        -6.019287,\n        -4.070514,\n        -2.4715588,\n        -3.6453009,\n        -7.0692167,\n        -2.936739,\n        -2.3720882,\n        -4.3689184,\n        -2.1131155,\n        -3.1359951,\n        -5.7391376,\n        -2.3628266,\n        -2.1523256,\n        -2.4543364,\n        -2.521942,\n        -2.490915,\n        -3.972962,\n        -6.118891,\n        -3.3457332,\n        -2.1690152,\n        -2.7532642,\n        -3.3654513,\n        -3.868791,\n        -3.8921962,\n        -2.535788,\n        -3.98272,\n        -2.2380013,\n        -2.293273,\n        -3.3237154,\n        -2.5618138,\n        -2.3382132,\n        -3.2277465,\n        -3.8876097,\n        -4.109236,\n        -4.762749,\n        -3.6733208,\n        -2.9050758,\n        -3.2219455,\n        -2.8485487,\n        -2.9965434,\n        -3.7959926,\n        -3.4480162,\n        -2.7039433,\n        -3.3766913,\n        -3.6542602,\n        -3.55788,\n        -4.494692,\n        -4.54066,\n        -2.5475478\n      ],\n      \"pointIndex\": [\n        1,\n        1250,\n        255,\n        1614311906,\n        1037541425,\n        1422066171,\n        762040836,\n        1923048011,\n        150118905,\n        953173356,\n        76440872,\n        194981059,\n        999426963,\n        1706086193,\n        1594445842,\n        864392687,\n        351906466,\n        1636326003,\n        1659863674,\n        1654131598,\n        588612615,\n        1268906687,\n        1679779764,\n        1574393566,\n        1011133818,\n        264827597,\n        1867083960,\n        299138962,\n        865870158,\n        657862061,\n        1240158043,\n        1277437500,\n        1474414712,\n        158764031,\n        673816356,\n        609030105,\n        173575334,\n        774861641,\n        1670772511,\n        1562991630,\n        1103180464,\n        794742985,\n        971023046,\n        1177154699,\n        1549743205,\n        1225277499,\n        251542656,\n        258018327,\n        263774789,\n        1283647599,\n        871791224,\n        291457405,\n        295674933,\n        1157160941,\n        102459636,\n        314173493,\n        1723731252,\n        342455699,\n        1839343407,\n        398751663,\n        631047258,\n        1210030052,\n        1710892087,\n        1924823826,\n        903475066,\n        456327805,\n        491520307,\n        506106568,\n        1158303350,\n        892079952,\n        536437045,\n        1117345195,\n        564512855,\n        1395768170,\n        1925479604,\n        1765493362,\n        1627026636,\n        1751327113,\n        1188066073,\n        1917447254,\n        827535050,\n        1642368473,\n        903014297,\n        1441126861,\n        1130810732,\n        1224744483,\n        1811933289,\n        1935752407\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 3094355644247862573\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.4739364,\n        -1.4854518,\n        -1.4760784,\n        -1.4875935,\n        -1.4953938,\n        -1.4984169,\n        -1.4879522,\n        -1.5093524,\n        -1.4926078,\n        -1.5171238,\n        -1.5125519,\n        -1.5012732,\n        -1.5166982,\n        -1.4958954,\n        -1.5366755,\n        -1.5484987,\n        -1.5611908,\n        -1.5979398,\n        -1.5603845,\n        -1.6203203,\n        -1.5704353,\n        -1.6633575,\n        -1.5579628,\n        -1.55296,\n        -1.795618,\n        -1.6760824,\n        -1.5176575,\n        -1.5144597,\n        -1.5277035,\n        -1.5477281,\n        -1.7140696,\n        -1.6394001,\n        -1.6954261,\n        -2.1438859,\n        -1.8238163,\n        -1.6423,\n        -1.6687416,\n        -1.8934213,\n        -1.6380768,\n        -1.9984225,\n        -1.8914065,\n        -1.9348294,\n        -1.8795712,\n        -1.7462192,\n        -1.8659137,\n        -1.8379618,\n        -1.5810261,\n        -1.7192446,\n        -1.7008163,\n        -1.8184123,\n        -1.8110999,\n        -2.1787684,\n        -2.2982676,\n        -1.5314186,\n        -1.7548642,\n        -2.2257524,\n        -1.5917569,\n        -1.5637295,\n        -1.5673133,\n        -1.6050491,\n        -1.7135774,\n        -2.049922,\n        -2.0660481,\n        -1.7038921,\n        -1.8500408,\n        -1.9410862,\n        -2.1084352,\n        -2.2458594,\n        -2.4118128,\n        -2.025019,\n        -1.9305323,\n        -2.0011356,\n        -2.5528111,\n        -2.0804389,\n        -1.7682977,\n        -2.9587407,\n        -2.0774496,\n        -2.0376956,\n        -1.8831936,\n        -2.3587713,\n        -2.2914603,\n        -2.080167,\n        -2.398474,\n        -2.1165843,\n        -2.2489762,\n        -3.3198617,\n        -2.0660028,\n        -1.7743989,\n        -2.0366867,\n        -1.9493996,\n        -2.1968048,\n        -1.9214082,\n        -1.865101,\n        -1.6677953,\n        -1.7295773,\n        -1.7557685,\n        -1.7215736,\n        -2.1169577,\n        -3.8750894,\n        -1.8312486,\n        -2.2261262,\n        -1.9203904,\n        -2.4061456,\n        -2.5151784,\n        -2.3806129,\n        -2.3084571,\n        -2.506206,\n        -1.9816073,\n        -2.0561907,\n        -1.980592,\n        -2.1949437,\n        -2.2723653,\n        -2.3749077,\n        -1.7950431,\n        -1.8964046,\n        -2.211347,\n        -2.4007275,\n        -1.5689955,\n        -1.6514187,\n        -3.7187233,\n        -1.6301489,\n        -2.086895,\n        -2.1163597,\n        -2.6241238,\n        -3.9407856,\n        -2.7351525,\n        -2.1547961,\n        -2.0345523,\n        -2.2548847,\n        -2.1943324,\n        -4.6028876,\n        -2.1080987,\n        -2.2166712,\n        -2.4134626,\n        -4.2891135,\n        -3.1646965,\n        -3.283529,\n        -3.0363867,\n        -2.654098,\n        -2.5246723,\n        -2.9533222,\n        -2.7589624,\n        -6.8319306,\n        -3.45097,\n        -2.302588,\n        -4.1987743,\n        -3.018338,\n        -3.9248986,\n        -2.2258532,\n        -3.859081,\n        -2.3803658,\n        -4.1095366,\n        -3.1288927,\n        -2.689985,\n        -4.6837077,\n        -3.3727543,\n        -2.7500443,\n        -3.597516,\n        -2.7279165,\n        -2.3633702,\n        -2.3965745,\n        -3.7145114,\n        -3.6214883,\n        -2.0868714,\n        -2.301757,\n        -3.4451625,\n        -4.5945196,\n        -3.219573,\n        -3.4956493,\n        -2.6333444,\n        -3.1672225,\n        -3.5964112,\n        -4.2114186,\n        -3.3155928,\n        -2.5097873,\n        -3.0633156,\n        -1.8225551,\n        -2.144348,\n        -2.2268782,\n        -2.6874378,\n        -2.5341742,\n        -4.0607824,\n        -2.2209594,\n        -2.2272894,\n        -4.5724764,\n        -2.147491,\n        -2.140911,\n        -1.6880952,\n        -6.806554,\n        -2.9362721,\n        -2.1946857,\n        -3.549542,\n        -1.7758566,\n        -1.8011234,\n        -3.9622927,\n        -3.233758,\n        -2.7135792,\n        -4.252686,\n        -4.0482345,\n        -1.9278389,\n        -1.9454994,\n        -2.5433922,\n        -2.2774193,\n        -4.3522453,\n        -2.2904599,\n        -3.0752342,\n        -2.6221926,\n        -5.464108,\n        -4.4305067,\n        -3.8604193,\n        -2.6281645,\n        -2.3101032,\n        -2.92189,\n        -2.6759644,\n        -2.7982519,\n        -2.4083624,\n        -2.7625816,\n        -4.315713,\n        -2.2733822,\n        -5.1711693,\n        -3.2551575,\n        -2.2869558,\n        -2.6063263,\n        -2.4167163,\n        -3.0222101,\n        -6.9500675,\n        -3.7502477,\n        -3.0935733,\n        -2.0001183,\n        -3.9397662,\n        -2.036932,\n        -5.9469604,\n        -3.63629,\n        -3.1122005,\n        -2.8454168,\n        -3.0099926,\n        -1.9705479,\n        -2.064685,\n        -1.900059,\n        -4.272635,\n        -3.928694,\n        -1.9878477,\n        -1.6362039,\n        -2.645227,\n        -2.5863435,\n        -4.9102893,\n        -2.174294,\n        -3.0281172,\n        -3.4842854,\n        -5.2957344,\n        -5.0918097,\n        -3.1096747,\n        -6.0454197,\n        -3.172589,\n        -2.2439806\n      ],\n      \"pointIndex\": [\n        4,\n        1256,\n        255,\n        1223797928,\n        1623677654,\n        1728694488,\n        1360127196,\n        1237665187,\n        300008325,\n        23900434,\n        533047755,\n        1038903454,\n        1296350950,\n        416240210,\n        954777070,\n        871612658,\n        159631247,\n        440691661,\n        552496932,\n        410244749,\n        1009372704,\n        1600468221,\n        1374403740,\n        1918402956,\n        695135605,\n        1803858985,\n        1315684846,\n        1679339055,\n        1636784866,\n        1699017295,\n        672251182,\n        369725123,\n        1413805818,\n        517289020,\n        1133930449,\n        1906422654,\n        856131894,\n        865341588,\n        447977189,\n        1608462439,\n        1478336717,\n        824417366,\n        1036955096,\n        1376703219,\n        1627803349,\n        1415150729,\n        1522913492,\n        1806293611,\n        1160439800,\n        78400409,\n        1722060119,\n        1655226301,\n        1957291000,\n        315667447,\n        52729117,\n        1006468,\n        340123706,\n        1055091144,\n        1208275682,\n        377204605,\n        1179474234,\n        1073440127,\n        2998993,\n        986081357,\n        805778975,\n        23156479,\n        1704113892,\n        854499111,\n        1629578411,\n        586232179,\n        1886933506,\n        1077656819,\n        645784931,\n        279521799,\n        1182801783,\n        719702406,\n        1683648103,\n        771515289,\n        1543232884,\n        1777928976,\n        308431624,\n        1199169854,\n        1663530999,\n        1137583284,\n        1505515416,\n        1819345697,\n        1817788662,\n        1965613555\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1256,\n      \"compressed\": true,\n      \"randomSeed\": 7829655809554688956\n    }\n  ],\n  \"compactRandomCutTreeStates\": [\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          482049803,\n          309553323,\n          126599654,\n          183052701,\n          152692025,\n          810097597,\n          520795982,\n          186960441,\n          354890868,\n          654637203,\n          858723802,\n          771537355,\n          487196625,\n          141715079,\n          715197657,\n          744288717,\n          532366533,\n          700948526,\n          485148609,\n          589528758,\n          874417663,\n          862655872,\n          420262554,\n          126768542,\n          760361961,\n          122808912,\n          768002537,\n          756806186,\n          510626389,\n          378520593,\n          85155340,\n          371617678,\n          122624128,\n          270573733,\n          40487961,\n          543923644,\n          238230115,\n          298782203,\n          141902733,\n          518077810,\n          126914412,\n          523006182,\n          5565\n        ],\n        \"cutValueData\": [\n          66,\n          -110,\n          90,\n          -115,\n          66,\n          -109,\n          119,\n          -68,\n          66,\n          -112,\n          -121,\n          60,\n          66,\n          68,\n          45,\n          29,\n          66,\n          103,\n          4,\n          88,\n          66,\n          -107,\n          -60,\n          23,\n          66,\n          -112,\n          -105,\n          45,\n          66,\n          -120,\n          26,\n          -107,\n          66,\n          -106,\n          95,\n          -105,\n          66,\n          -101,\n          -27,\n          13,\n          66,\n          -99,\n          -69,\n          -91,\n          66,\n          -124,\n          25,\n          -27,\n          66,\n          -126,\n          -61,\n          -35,\n          66,\n          -118,\n          -60,\n          108,\n          66,\n          -97,\n          102,\n          -78,\n          66,\n          -109,\n          -63,\n          28,\n          66,\n          -97,\n          23,\n          105,\n          66,\n          117,\n          119,\n          -84,\n          66,\n          112,\n          -6,\n          -37,\n          66,\n          -82,\n          31,\n          113,\n          66,\n          -116,\n          56,\n          -48,\n          66,\n          102,\n          -65,\n          -50,\n          66,\n          -128,\n          -126,\n          -98,\n          66,\n          110,\n          -121,\n          56,\n          66,\n          -96,\n          -25,\n          -68,\n          66,\n          -89,\n          -86,\n          49,\n          66,\n          103,\n          87,\n          -128,\n          66,\n          -118,\n          -91,\n          75,\n          66,\n          126,\n          -72,\n          50,\n          66,\n          -86,\n          -88,\n          -61,\n          66,\n          -84,\n          119,\n          16,\n          66,\n          -76,\n          2,\n          50,\n          66,\n          -81,\n          -100,\n          21,\n          66,\n          108,\n          49,\n          -47,\n          66,\n          -69,\n          -12,\n          0,\n          66,\n          -104,\n          -91,\n          13,\n          66,\n          -119,\n          3,\n          50,\n          66,\n          110,\n          33,\n          -114,\n          66,\n          -76,\n          51,\n          -58,\n          66,\n          89,\n          66,\n          68,\n          66,\n          -78,\n          59,\n          39,\n          66,\n          -110,\n          70,\n          5,\n          66,\n          -119,\n          -55,\n          -114,\n          66,\n          101,\n          -124,\n          121,\n          66,\n          -72,\n          117,\n          -100,\n          66,\n          -68,\n          38,\n          77,\n          66,\n          94,\n          -106,\n          15,\n          66,\n          -97,\n          -62,\n          -75,\n          66,\n          87,\n          89,\n          33,\n          66,\n          -93,\n          -3,\n          122,\n          66,\n          -122,\n          87,\n          77,\n          66,\n          -118,\n          -11,\n          -12,\n          66,\n          -95,\n          -127,\n          90,\n          66,\n          -67,\n          77,\n          24,\n          66,\n          72,\n          108,\n          -65,\n          66,\n          95,\n          62,\n          -64,\n          66,\n          -72,\n          5,\n          119,\n          66,\n          -112,\n          -99,\n          -53,\n          66,\n          118,\n          50,\n          -43,\n          66,\n          -80,\n          -99,\n          4,\n          66,\n          98,\n          12,\n          13,\n          66,\n          73,\n          -36,\n          -116,\n          66,\n          118,\n          -42,\n          32,\n          66,\n          -78,\n          -16,\n          -59,\n          66,\n          -100,\n          68,\n          10,\n          66,\n          -100,\n          -119,\n          69,\n          66,\n          97,\n          -34,\n          -94,\n          66,\n          70,\n          -54,\n          -29,\n          66,\n          118,\n          -62,\n          96,\n          66,\n          -122,\n          95,\n          -113,\n          66,\n          125,\n          74,\n          85,\n          66,\n          -104,\n          102,\n          63,\n          66,\n          -108,\n          -47,\n          26,\n          66,\n          -73,\n          -94,\n          -123,\n          66,\n          91,\n          114,\n          -124,\n          66,\n          -88,\n          104,\n          16,\n          66,\n          -109,\n          -34,\n          80,\n          66,\n          -124,\n          78,\n          -73,\n          66,\n          -99,\n          -49,\n          45,\n          66,\n          112,\n          -127,\n          10,\n          66,\n          -108,\n          -85,\n          18,\n          66,\n          89,\n          97,\n          73,\n          66,\n          -74,\n          -98,\n          -24,\n          66,\n          -115,\n          -12,\n          -19,\n          66,\n          -89,\n          -41,\n          -63,\n          66,\n          -120,\n          -50,\n          -53,\n          66,\n          85,\n          118,\n          116,\n          66,\n          -90,\n          -90,\n          107,\n          66,\n          112,\n          -109,\n          111,\n          66,\n          -115,\n          5,\n          0,\n          66,\n          -84,\n          123,\n          14,\n          66,\n          -85,\n          -77,\n          29,\n          66,\n          -98,\n          73,\n          -58,\n          66,\n          -112,\n          101,\n          -38,\n          66,\n          -104,\n          -68,\n          -17,\n          66,\n          -119,\n          -76,\n          -122,\n          66,\n          -67,\n          -81,\n          -66,\n          66,\n          111,\n          86,\n          -59,\n          66,\n          -123,\n          114,\n          110,\n          66,\n          -107,\n          20,\n          -100,\n          66,\n          -88,\n          -76,\n          -13,\n          66,\n          104,\n          41,\n          28,\n          66,\n          -123,\n          -104,\n          -119,\n          66,\n          96,\n          28,\n          36,\n          66,\n          -118,\n          10,\n          -86,\n          66,\n          -112,\n          -31,\n          61,\n          66,\n          122,\n          -92,\n          -18,\n          66,\n          -121,\n          -5,\n          -6,\n          66,\n          89,\n          43,\n          -63,\n          66,\n          80,\n          95,\n          53,\n          66,\n          -84,\n          51,\n          -91,\n          66,\n          -72,\n          1,\n          127,\n          66,\n          -78,\n          -36,\n          -70,\n          66,\n          79,\n          -126,\n          -2,\n          66,\n          95,\n          119,\n          127,\n          66,\n          108,\n          19,\n          60,\n          66,\n          114,\n          -51,\n          -90,\n          66,\n          -118,\n          96,\n          -58,\n          66,\n          108,\n          122,\n          -82,\n          66,\n          119,\n          -33,\n          -112,\n          66,\n          88,\n          -9,\n          85,\n          66,\n          -113,\n          111,\n          27,\n          66,\n          -125,\n          123,\n          60,\n          66,\n          -120,\n          -91,\n          -13,\n          66,\n          -97,\n          -40,\n          -5,\n          66,\n          83,\n          43,\n          -51,\n          66,\n          -105,\n          3,\n          6,\n          66,\n          -81,\n          -13,\n          126,\n          66,\n          -88,\n          -27,\n          39,\n          66,\n          -123,\n          23,\n          -111,\n          66,\n          -109,\n          116,\n          -85,\n          66,\n          -86,\n          -33,\n          -116,\n          66,\n          98,\n          28,\n          127,\n          66,\n          -81,\n          67,\n          50,\n          66,\n          -122,\n          -34,\n          30,\n          66,\n          111,\n          111,\n          -11,\n          66,\n          -78,\n          114,\n          -37,\n          66,\n          119,\n          55,\n          89,\n          66,\n          -85,\n          -97,\n          46,\n          66,\n          -100,\n          108,\n          -88,\n          66,\n          -97,\n          16,\n          -40,\n          66,\n          -105,\n          46,\n          17,\n          66,\n          99,\n          12,\n          -96,\n          66,\n          113,\n          16,\n          -13,\n          66,\n          -82,\n          -85,\n          -46,\n          66,\n          112,\n          -13,\n          -59,\n          66,\n          -126,\n          25,\n          10,\n          66,\n          -111,\n          -4,\n          74,\n          66,\n          -78,\n          -86,\n          61,\n          66,\n          -82,\n          29,\n          123,\n          66,\n          -83,\n          97,\n          -34,\n          66,\n          -88,\n          102,\n          126,\n          66,\n          -93,\n          126,\n          -99,\n          66,\n          -81,\n          51,\n          70,\n          66,\n          -79,\n          89,\n          72,\n          66,\n          -97,\n          -97,\n          -36,\n          66,\n          -57,\n          88,\n          122,\n          66,\n          -102,\n          79,\n          6,\n          66,\n          82,\n          65,\n          -86,\n          66,\n          97,\n          -7,\n          71,\n          66,\n          114,\n          89,\n          77,\n          66,\n          88,\n          40,\n          114,\n          66,\n          103,\n          -3,\n          -1,\n          66,\n          -69,\n          64,\n          94,\n          66,\n          119,\n          74,\n          117,\n          66,\n          -93,\n          -41,\n          51,\n          66,\n          120,\n          -86,\n          52,\n          66,\n          -117,\n          80,\n          -103,\n          66,\n          -92,\n          19,\n          -29,\n          66,\n          -61,\n          81,\n          77,\n          66,\n          -62,\n          -6,\n          13,\n          66,\n          -92,\n          75,\n          3,\n          66,\n          -100,\n          -109,\n          24,\n          66,\n          -116,\n          16,\n          64,\n          66,\n          71,\n          -85,\n          -97,\n          66,\n          -84,\n          14,\n          119,\n          66,\n          -103,\n          -9,\n          -32,\n          66,\n          -103,\n          -119,\n          -124,\n          66,\n          -86,\n          61,\n          122,\n          66,\n          -118,\n          23,\n          -64,\n          66,\n          -94,\n          -38,\n          85,\n          66,\n          -88,\n          -92,\n          35,\n          66,\n          83,\n          71,\n          62,\n          66,\n          -109,\n          93,\n          -14,\n          66,\n          -114,\n          -71,\n          -88,\n          66,\n          -83,\n          119,\n          -116,\n          66,\n          -71,\n          -123,\n          102,\n          66,\n          -95,\n          -77,\n          6,\n          66,\n          -113,\n          71,\n          -12,\n          66,\n          -125,\n          97,\n          -93,\n          66,\n          -126,\n          88,\n          -31,\n          66,\n          -99,\n          -121,\n          66,\n          66,\n          -106,\n          -85,\n          -47,\n          66,\n          -110,\n          -100,\n          -48,\n          66,\n          -106,\n          -15,\n          67,\n          66,\n          121,\n          34,\n          -43,\n          66,\n          -98,\n          62,\n          119,\n          66,\n          -120,\n          68,\n          72,\n          66,\n          110,\n          -27,\n          107,\n          66,\n          -109,\n          124,\n          86,\n          66,\n          -106,\n          -21,\n          -96,\n          66,\n          -125,\n          11,\n          66,\n          66,\n          -86,\n          98,\n          -56,\n          66,\n          -102,\n          -103,\n          106,\n          66,\n          -89,\n          -3,\n          -125,\n          66,\n          119,\n          -14,\n          -6,\n          66,\n          -80,\n          115,\n          -122,\n          66,\n          -79,\n          90,\n          -26,\n          66,\n          -107,\n          -60,\n          -44,\n          66,\n          -96,\n          48,\n          15,\n          66,\n          -74,\n          -88,\n          -30,\n          66,\n          -69,\n          56,\n          -115,\n          66,\n          -118,\n          -86,\n          -69,\n          66,\n          -125,\n          51,\n          -103,\n          66,\n          -92,\n          -64,\n          -1,\n          66,\n          -119,\n          53,\n          -32,\n          66,\n          -72,\n          28,\n          8,\n          66,\n          -121,\n          -81,\n          8,\n          66,\n          103,\n          -101,\n          122,\n          66,\n          -96,\n          -60,\n          -73,\n          66,\n          -88,\n          102,\n          15,\n          66,\n          -118,\n          127,\n          -58,\n          66,\n          -123,\n          -37,\n          -87,\n          66,\n          -60,\n          19,\n          -48,\n          66,\n          117,\n          -78,\n          53,\n          66,\n          -88,\n          -42,\n          -114,\n          66,\n          -88,\n          104,\n          -128,\n          66,\n          -111,\n          -122,\n          -62,\n          66,\n          -115,\n          -20,\n          -92,\n          66,\n          -111,\n          -112,\n          -66,\n          66,\n          106,\n          75,\n          94,\n          66,\n          116,\n          -125,\n          -68,\n          66,\n          -77,\n          93,\n          -112,\n          66,\n          106,\n          -50,\n          65,\n          66,\n          -128,\n          32,\n          -114,\n          66,\n          -127,\n          94,\n          -124,\n          66,\n          120,\n          12,\n          -7,\n          66,\n          86,\n          94,\n          -86,\n          66,\n          -123,\n          124,\n          34,\n          66,\n          114,\n          93,\n          -11,\n          66,\n          96,\n          -82,\n          40,\n          66,\n          123,\n          59,\n          10,\n          66,\n          125,\n          -57,\n          91,\n          66,\n          -93,\n          52,\n          83,\n          66,\n          -100,\n          37,\n          94,\n          66,\n          -109,\n          55,\n          86,\n          66,\n          -112,\n          -2,\n          97,\n          66,\n          -117,\n          -15,\n          43,\n          66,\n          -103,\n          36,\n          79,\n          66,\n          -97,\n          87,\n          -90,\n          66,\n          -77,\n          -117,\n          -116,\n          66,\n          -121,\n          -29,\n          -83,\n          66,\n          121,\n          -16,\n          -1,\n          66,\n          127,\n          113,\n          29,\n          66,\n          -73,\n          34,\n          18\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1072824191,\n          486529471,\n          132093450,\n          892461317,\n          153379350,\n          35233988,\n          21617329,\n          152299184,\n          16\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1054997887,\n          467661783,\n          83623930,\n          637379628,\n          136901326,\n          58821506,\n          541822520,\n          957235200,\n          515\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -4423608042667571926,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          508926833,\n          376812577,\n          1073405887,\n          803272566,\n          383352682,\n          796178119,\n          752602543,\n          483882278,\n          39054066,\n          867271797,\n          34768183,\n          627817038,\n          462470014,\n          215063649,\n          469595442,\n          514132970,\n          40872030,\n          36873947,\n          581342778,\n          359845187,\n          64406823,\n          107591233,\n          131545002,\n          93441401,\n          748471629,\n          173630950,\n          840804685,\n          401046562,\n          380335662,\n          37067065,\n          749545335,\n          899780331,\n          50157867,\n          935015887,\n          737232222,\n          192223463,\n          484480234,\n          708107174,\n          882356085,\n          203460394,\n          216864482,\n          182243245,\n          23\n        ],\n        \"cutValueData\": [\n          66,\n          -94,\n          -88,\n          -9,\n          66,\n          118,\n          -7,\n          -36,\n          66,\n          96,\n          48,\n          -96,\n          66,\n          -88,\n          64,\n          53,\n          66,\n          108,\n          -41,\n          -102,\n          66,\n          -98,\n          20,\n          -88,\n          66,\n          71,\n          -107,\n          -87,\n          66,\n          70,\n          3,\n          -93,\n          66,\n          -103,\n          98,\n          -70,\n          66,\n          84,\n          -66,\n          20,\n          66,\n          -105,\n          -114,\n          -81,\n          66,\n          -121,\n          -36,\n          46,\n          66,\n          -85,\n          127,\n          20,\n          66,\n          -127,\n          -61,\n          70,\n          66,\n          -95,\n          70,\n          109,\n          66,\n          95,\n          125,\n          71,\n          66,\n          -107,\n          75,\n          89,\n          66,\n          -119,\n          49,\n          -18,\n          66,\n          99,\n          -65,\n          9,\n          66,\n          -83,\n          -3,\n          114,\n          66,\n          -91,\n          4,\n          -102,\n          66,\n          99,\n          30,\n          -53,\n          66,\n          -120,\n          -123,\n          -42,\n          66,\n          -119,\n          116,\n          89,\n          66,\n          -99,\n          -29,\n          -25,\n          66,\n          -116,\n          -51,\n          19,\n          66,\n          -103,\n          -42,\n          53,\n          66,\n          83,\n          -56,\n          -2,\n          66,\n          112,\n          -39,\n          101,\n          66,\n          -77,\n          -10,\n          111,\n          66,\n          -100,\n          -50,\n          -108,\n          66,\n          125,\n          -42,\n          -22,\n          66,\n          -98,\n          -22,\n          84,\n          66,\n          -125,\n          43,\n          77,\n          66,\n          -98,\n          63,\n          124,\n          66,\n          -84,\n          43,\n          -66,\n          66,\n          115,\n          86,\n          -120,\n          66,\n          -103,\n          -77,\n          -27,\n          66,\n          -95,\n          18,\n          -23,\n          66,\n          109,\n          -8,\n          -111,\n          66,\n          -113,\n          81,\n          14,\n          66,\n          -111,\n          -125,\n          -18,\n          66,\n          82,\n          20,\n          126,\n          66,\n          124,\n          -99,\n          -52,\n          66,\n          -113,\n          -61,\n          118,\n          66,\n          -76,\n          13,\n          109,\n          66,\n          118,\n          61,\n          56,\n          66,\n          84,\n          91,\n          -73,\n          66,\n          -63,\n          -94,\n          -51,\n          66,\n          -119,\n          29,\n          -109,\n          66,\n          112,\n          -86,\n          59,\n          66,\n          -121,\n          30,\n          109,\n          66,\n          -99,\n          93,\n          -65,\n          66,\n          -105,\n          -115,\n          48,\n          66,\n          -115,\n          2,\n          -55,\n          66,\n          -126,\n          -12,\n          72,\n          66,\n          103,\n          -65,\n          20,\n          66,\n          -75,\n          -107,\n          -58,\n          66,\n          -100,\n          51,\n          -18,\n          66,\n          -100,\n          95,\n          88,\n          66,\n          110,\n          -99,\n          38,\n          66,\n          125,\n          -42,\n          -126,\n          66,\n          -110,\n          97,\n          -75,\n          66,\n          -121,\n          -2,\n          102,\n          66,\n          -118,\n          -20,\n          6,\n          66,\n          -100,\n          88,\n          4,\n          66,\n          -94,\n          -21,\n          85,\n          66,\n          122,\n          5,\n          110,\n          66,\n          -93,\n          -4,\n          35,\n          66,\n          85,\n          84,\n          -68,\n          66,\n          -84,\n          -105,\n          4,\n          66,\n          -76,\n          -49,\n          -102,\n          66,\n          -86,\n          -13,\n          -11,\n          66,\n          86,\n          66,\n          107,\n          66,\n          -98,\n          117,\n          53,\n          66,\n          98,\n          30,\n          87,\n          66,\n          -94,\n          125,\n          90,\n          66,\n          125,\n          101,\n          -29,\n          66,\n          -117,\n          -34,\n          124,\n          66,\n          -88,\n          1,\n          -26,\n          66,\n          117,\n          121,\n          -12,\n          66,\n          -116,\n          16,\n          -58,\n          66,\n          -119,\n          -38,\n          125,\n          66,\n          -123,\n          127,\n          -47,\n          66,\n          89,\n          40,\n          -26,\n          66,\n          85,\n          25,\n          -33,\n          66,\n          94,\n          96,\n          125,\n          66,\n          112,\n          76,\n          72,\n          66,\n          87,\n          -81,\n          17,\n          66,\n          -116,\n          -2,\n          -33,\n          66,\n          -96,\n          76,\n          -109,\n          66,\n          106,\n          -113,\n          83,\n          66,\n          -128,\n          8,\n          -121,\n          66,\n          -120,\n          -112,\n          -69,\n          66,\n          -89,\n          -91,\n          104,\n          66,\n          115,\n          -76,\n          -77,\n          66,\n          118,\n          95,\n          -73,\n          66,\n          119,\n          45,\n          -89,\n          66,\n          -121,\n          120,\n          -41,\n          66,\n          110,\n          36,\n          -82,\n          66,\n          -113,\n          -44,\n          -93,\n          66,\n          -116,\n          -57,\n          118,\n          66,\n          -84,\n          -99,\n          67,\n          66,\n          -114,\n          101,\n          58,\n          66,\n          -92,\n          -100,\n          58,\n          66,\n          -74,\n          -61,\n          11,\n          66,\n          -112,\n          107,\n          -100,\n          66,\n          -113,\n          -81,\n          11,\n          66,\n          -88,\n          68,\n          -17,\n          66,\n          -88,\n          -1,\n          -44,\n          66,\n          -119,\n          92,\n          -30,\n          66,\n          -104,\n          82,\n          35,\n          66,\n          -75,\n          -65,\n          -52,\n          66,\n          -91,\n          6,\n          -47,\n          66,\n          -79,\n          -21,\n          0,\n          66,\n          103,\n          -66,\n          119,\n          66,\n          -110,\n          -34,\n          -27,\n          66,\n          -94,\n          10,\n          -89,\n          66,\n          105,\n          75,\n          -109,\n          66,\n          -73,\n          -128,\n          105,\n          66,\n          -100,\n          -37,\n          -76,\n          66,\n          112,\n          -111,\n          37,\n          66,\n          74,\n          34,\n          59,\n          66,\n          -94,\n          -124,\n          77,\n          66,\n          -91,\n          -102,\n          109,\n          66,\n          95,\n          -104,\n          -42,\n          66,\n          -112,\n          -110,\n          18,\n          66,\n          -128,\n          60,\n          34,\n          66,\n          -103,\n          -88,\n          -55,\n          66,\n          85,\n          -11,\n          91,\n          66,\n          -128,\n          -1,\n          25,\n          66,\n          -125,\n          -116,\n          -125,\n          66,\n          -98,\n          11,\n          -113,\n          66,\n          -102,\n          45,\n          -51,\n          66,\n          -87,\n          37,\n          -126,\n          66,\n          -87,\n          -60,\n          -60,\n          66,\n          -109,\n          -112,\n          -59,\n          66,\n          -86,\n          -31,\n          102,\n          66,\n          -116,\n          -49,\n          65,\n          66,\n          -114,\n          21,\n          1,\n          66,\n          -97,\n          -60,\n          -127,\n          66,\n          111,\n          -73,\n          64,\n          66,\n          -82,\n          52,\n          -21,\n          66,\n          -105,\n          -76,\n          -125,\n          66,\n          -102,\n          5,\n          -11,\n          66,\n          -112,\n          19,\n          110,\n          66,\n          -74,\n          104,\n          92,\n          66,\n          -93,\n          26,\n          -97,\n          66,\n          74,\n          115,\n          92,\n          66,\n          -94,\n          -36,\n          -60,\n          66,\n          -114,\n          0,\n          18,\n          66,\n          -76,\n          -69,\n          107,\n          66,\n          -87,\n          -62,\n          -33,\n          66,\n          -92,\n          111,\n          121,\n          66,\n          125,\n          -62,\n          -33,\n          66,\n          121,\n          -72,\n          -10,\n          66,\n          92,\n          -65,\n          2,\n          66,\n          -107,\n          -22,\n          -108,\n          66,\n          -110,\n          -92,\n          -105,\n          66,\n          100,\n          -28,\n          100,\n          66,\n          -105,\n          8,\n          -16,\n          66,\n          73,\n          -80,\n          83,\n          66,\n          115,\n          -106,\n          -86,\n          66,\n          122,\n          -51,\n          42,\n          66,\n          -75,\n          -50,\n          -42,\n          66,\n          112,\n          44,\n          52,\n          66,\n          -111,\n          21,\n          119,\n          66,\n          -113,\n          10,\n          102,\n          66,\n          -86,\n          -70,\n          9,\n          66,\n          105,\n          63,\n          85,\n          66,\n          -121,\n          -73,\n          111,\n          66,\n          -94,\n          -6,\n          -38,\n          66,\n          -72,\n          -4,\n          92,\n          66,\n          -100,\n          84,\n          51,\n          66,\n          -92,\n          -110,\n          -46,\n          66,\n          -125,\n          -94,\n          121,\n          66,\n          -86,\n          24,\n          -83,\n          66,\n          -103,\n          120,\n          34,\n          66,\n          108,\n          30,\n          14,\n          66,\n          69,\n          -62,\n          -56,\n          66,\n          106,\n          -27,\n          100,\n          66,\n          -100,\n          66,\n          103,\n          66,\n          -127,\n          124,\n          48,\n          66,\n          -112,\n          89,\n          103,\n          66,\n          95,\n          37,\n          -54,\n          66,\n          125,\n          -29,\n          -9,\n          66,\n          98,\n          -57,\n          -61,\n          66,\n          78,\n          -48,\n          -9,\n          66,\n          -109,\n          37,\n          -103,\n          66,\n          -77,\n          50,\n          -41,\n          66,\n          -74,\n          -84,\n          93,\n          66,\n          -89,\n          102,\n          109,\n          66,\n          123,\n          -38,\n          95,\n          66,\n          -94,\n          97,\n          115,\n          66,\n          -96,\n          -51,\n          49,\n          66,\n          83,\n          69,\n          127,\n          66,\n          -122,\n          25,\n          -93,\n          66,\n          101,\n          -23,\n          -114,\n          66,\n          -106,\n          5,\n          -107,\n          66,\n          84,\n          100,\n          126,\n          66,\n          -92,\n          -95,\n          82,\n          66,\n          112,\n          77,\n          -81,\n          66,\n          -92,\n          -108,\n          -112,\n          66,\n          125,\n          -4,\n          -111,\n          66,\n          -104,\n          55,\n          125,\n          66,\n          -122,\n          -38,\n          -65,\n          66,\n          -108,\n          112,\n          -95,\n          66,\n          99,\n          -22,\n          -77,\n          66,\n          -119,\n          -66,\n          -101,\n          66,\n          -78,\n          -73,\n          28,\n          66,\n          -80,\n          -41,\n          -66,\n          66,\n          -101,\n          -107,\n          77,\n          66,\n          -88,\n          111,\n          124,\n          66,\n          108,\n          69,\n          -63,\n          66,\n          -86,\n          97,\n          120,\n          66,\n          -84,\n          63,\n          21,\n          66,\n          -60,\n          123,\n          107,\n          66,\n          -87,\n          -127,\n          -84,\n          66,\n          -61,\n          125,\n          -119,\n          66,\n          -96,\n          -63,\n          -47,\n          66,\n          127,\n          53,\n          3,\n          66,\n          -88,\n          78,\n          101,\n          66,\n          110,\n          -114,\n          -91,\n          66,\n          109,\n          97,\n          -74,\n          66,\n          91,\n          116,\n          79,\n          66,\n          104,\n          115,\n          -57,\n          66,\n          87,\n          16,\n          -54,\n          66,\n          -121,\n          -120,\n          95,\n          66,\n          122,\n          -6,\n          17,\n          66,\n          -107,\n          82,\n          -44,\n          66,\n          117,\n          -99,\n          -16,\n          66,\n          120,\n          83,\n          -65,\n          66,\n          -109,\n          -123,\n          114,\n          66,\n          -85,\n          -124,\n          2,\n          66,\n          -79,\n          -80,\n          103,\n          66,\n          -112,\n          -13,\n          75,\n          66,\n          -121,\n          -74,\n          -84,\n          66,\n          -117,\n          28,\n          24,\n          66,\n          -105,\n          9,\n          -26,\n          66,\n          -102,\n          79,\n          90,\n          66,\n          -121,\n          -95,\n          12,\n          66,\n          -107,\n          -37,\n          -3,\n          66,\n          -98,\n          22,\n          -122,\n          66,\n          -90,\n          123,\n          -22,\n          66,\n          -117,\n          -101,\n          -92,\n          66,\n          -98,\n          76,\n          -114,\n          66,\n          -92,\n          61,\n          -59,\n          66,\n          -69,\n          3,\n          111,\n          66,\n          117,\n          -63,\n          -97,\n          66,\n          -98,\n          87,\n          49,\n          66,\n          -103,\n          50,\n          -38,\n          66,\n          -122,\n          39,\n          93,\n          66,\n          -126,\n          50,\n          90,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1104862922,\n          1160430947,\n          602631239,\n          973599817,\n          1112041301,\n          769349326,\n          601860200,\n          1016410184,\n          715592624,\n          1097691496,\n          1155855487,\n          581151415,\n          985085023,\n          446\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162261466,\n          1162084076,\n          645166417,\n          1140984142,\n          725231066,\n          643375763,\n          602043649,\n          1159956112,\n          774043901,\n          643311625,\n          710468725,\n          586090876,\n          585972841,\n          364\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5623816131219899495,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          530380726,\n          858261536,\n          498543680,\n          480659814,\n          272727935,\n          742689430,\n          688554251,\n          656105280,\n          524438278,\n          244566623,\n          525113071,\n          2223965,\n          459501897,\n          806978539,\n          380482694,\n          6057741,\n          857166212,\n          519700115,\n          169625971,\n          156948473,\n          368423804,\n          428012101,\n          372187640,\n          262130689,\n          427730813,\n          745130370,\n          481473952,\n          459312062,\n          498523251,\n          702664939,\n          459256921,\n          865174426,\n          635585514,\n          181906227,\n          118811245,\n          394423262,\n          238339688,\n          538393390,\n          852456440,\n          696402172,\n          358755081,\n          385617401,\n          6349\n        ],\n        \"cutValueData\": [\n          66,\n          119,\n          -60,\n          -124,\n          66,\n          -126,\n          73,\n          98,\n          66,\n          111,\n          86,\n          -35,\n          66,\n          -90,\n          51,\n          -113,\n          66,\n          -81,\n          -11,\n          -121,\n          66,\n          90,\n          74,\n          48,\n          66,\n          99,\n          -121,\n          -26,\n          66,\n          -127,\n          -104,\n          7,\n          66,\n          85,\n          -101,\n          -13,\n          66,\n          112,\n          8,\n          -114,\n          66,\n          -107,\n          -65,\n          60,\n          66,\n          96,\n          -18,\n          -31,\n          66,\n          97,\n          97,\n          -32,\n          66,\n          118,\n          57,\n          89,\n          66,\n          -110,\n          42,\n          127,\n          66,\n          -73,\n          67,\n          82,\n          66,\n          84,\n          57,\n          77,\n          66,\n          -127,\n          -55,\n          -32,\n          66,\n          74,\n          -110,\n          91,\n          66,\n          -79,\n          116,\n          -50,\n          66,\n          -122,\n          0,\n          -99,\n          66,\n          -125,\n          -77,\n          31,\n          66,\n          -114,\n          57,\n          43,\n          66,\n          -123,\n          86,\n          80,\n          66,\n          -114,\n          64,\n          85,\n          66,\n          81,\n          42,\n          114,\n          66,\n          -109,\n          -45,\n          -116,\n          66,\n          -105,\n          -23,\n          37,\n          66,\n          -71,\n          -104,\n          -73,\n          66,\n          96,\n          52,\n          -10,\n          66,\n          -100,\n          58,\n          24,\n          66,\n          -91,\n          82,\n          86,\n          66,\n          83,\n          92,\n          -67,\n          66,\n          -87,\n          -98,\n          -5,\n          66,\n          -72,\n          -7,\n          -68,\n          66,\n          -102,\n          -59,\n          69,\n          66,\n          -91,\n          14,\n          89,\n          66,\n          -86,\n          -23,\n          24,\n          66,\n          100,\n          -118,\n          55,\n          66,\n          -113,\n          83,\n          19,\n          66,\n          -104,\n          117,\n          -1,\n          66,\n          -63,\n          -14,\n          -127,\n          66,\n          -69,\n          -34,\n          78,\n          66,\n          -73,\n          97,\n          -128,\n          66,\n          -124,\n          -96,\n          2,\n          66,\n          -109,\n          53,\n          -13,\n          66,\n          -122,\n          -93,\n          23,\n          66,\n          -90,\n          -6,\n          29,\n          66,\n          121,\n          -76,\n          -96,\n          66,\n          -93,\n          88,\n          -13,\n          66,\n          -77,\n          -6,\n          -2,\n          66,\n          -102,\n          74,\n          103,\n          66,\n          -113,\n          -69,\n          88,\n          66,\n          122,\n          -56,\n          -105,\n          66,\n          -115,\n          -80,\n          27,\n          66,\n          76,\n          52,\n          -40,\n          66,\n          102,\n          103,\n          -90,\n          66,\n          126,\n          -47,\n          -102,\n          66,\n          121,\n          39,\n          121,\n          66,\n          -125,\n          62,\n          -58,\n          66,\n          -92,\n          30,\n          48,\n          66,\n          97,\n          -90,\n          72,\n          66,\n          -104,\n          -98,\n          11,\n          66,\n          104,\n          21,\n          42,\n          66,\n          -106,\n          -28,\n          -10,\n          66,\n          -109,\n          115,\n          80,\n          66,\n          106,\n          -40,\n          -14,\n          66,\n          -120,\n          -72,\n          26,\n          66,\n          -112,\n          -123,\n          74,\n          66,\n          -104,\n          116,\n          -53,\n          66,\n          -127,\n          -41,\n          -107,\n          66,\n          103,\n          90,\n          71,\n          66,\n          -110,\n          59,\n          35,\n          66,\n          -115,\n          83,\n          -76,\n          66,\n          -112,\n          23,\n          109,\n          66,\n          -95,\n          124,\n          -32,\n          66,\n          99,\n          88,\n          -111,\n          66,\n          -83,\n          76,\n          58,\n          66,\n          -67,\n          86,\n          -107,\n          66,\n          102,\n          91,\n          -53,\n          66,\n          73,\n          -32,\n          55,\n          66,\n          -99,\n          -60,\n          -82,\n          66,\n          -111,\n          119,\n          102,\n          66,\n          -111,\n          90,\n          71,\n          66,\n          72,\n          -23,\n          -54,\n          66,\n          -121,\n          -81,\n          111,\n          66,\n          -72,\n          81,\n          54,\n          66,\n          -95,\n          121,\n          -95,\n          66,\n          -116,\n          -81,\n          27,\n          66,\n          -124,\n          53,\n          -15,\n          66,\n          -75,\n          -30,\n          -65,\n          66,\n          -92,\n          -71,\n          68,\n          66,\n          -78,\n          57,\n          114,\n          66,\n          -119,\n          -94,\n          -71,\n          66,\n          -77,\n          87,\n          -71,\n          66,\n          -110,\n          -123,\n          79,\n          66,\n          105,\n          -72,\n          -57,\n          66,\n          -103,\n          -72,\n          65,\n          66,\n          -68,\n          -27,\n          -82,\n          66,\n          77,\n          108,\n          95,\n          66,\n          105,\n          107,\n          63,\n          66,\n          -75,\n          105,\n          83,\n          66,\n          -65,\n          81,\n          26,\n          66,\n          -119,\n          -87,\n          -52,\n          66,\n          -125,\n          64,\n          -44,\n          66,\n          97,\n          -62,\n          -39,\n          66,\n          -107,\n          -37,\n          -61,\n          66,\n          118,\n          -2,\n          -52,\n          66,\n          113,\n          79,\n          85,\n          66,\n          -111,\n          44,\n          -29,\n          66,\n          -105,\n          74,\n          16,\n          66,\n          -84,\n          38,\n          16,\n          66,\n          105,\n          3,\n          12,\n          66,\n          -128,\n          49,\n          -110,\n          66,\n          114,\n          -78,\n          -7,\n          66,\n          -72,\n          50,\n          -25,\n          66,\n          -79,\n          82,\n          -2,\n          66,\n          -120,\n          10,\n          -49,\n          66,\n          -117,\n          -94,\n          -122,\n          66,\n          84,\n          86,\n          -7,\n          66,\n          119,\n          15,\n          59,\n          66,\n          -128,\n          -35,\n          24,\n          66,\n          -95,\n          23,\n          6,\n          66,\n          -123,\n          -46,\n          -22,\n          66,\n          -126,\n          49,\n          98,\n          66,\n          -68,\n          14,\n          -11,\n          66,\n          95,\n          110,\n          -39,\n          66,\n          84,\n          -86,\n          -127,\n          66,\n          -103,\n          1,\n          -113,\n          66,\n          117,\n          -22,\n          -68,\n          66,\n          -83,\n          -36,\n          -51,\n          66,\n          -66,\n          -122,\n          -33,\n          66,\n          -76,\n          111,\n          99,\n          66,\n          -77,\n          113,\n          1,\n          66,\n          -94,\n          101,\n          4,\n          66,\n          -121,\n          -93,\n          -102,\n          66,\n          -112,\n          -9,\n          -13,\n          66,\n          -95,\n          -77,\n          -110,\n          66,\n          -71,\n          90,\n          -9,\n          66,\n          77,\n          -93,\n          45,\n          66,\n          -112,\n          -31,\n          125,\n          66,\n          -77,\n          -106,\n          -103,\n          66,\n          -121,\n          120,\n          97,\n          66,\n          -63,\n          83,\n          90,\n          66,\n          -106,\n          116,\n          -69,\n          66,\n          79,\n          10,\n          -55,\n          66,\n          -93,\n          126,\n          -78,\n          66,\n          -114,\n          -112,\n          -54,\n          66,\n          107,\n          8,\n          16,\n          66,\n          -116,\n          -94,\n          4,\n          66,\n          -116,\n          27,\n          -22,\n          66,\n          -106,\n          -106,\n          -88,\n          66,\n          104,\n          125,\n          -98,\n          66,\n          -99,\n          124,\n          104,\n          66,\n          -80,\n          -86,\n          107,\n          66,\n          -96,\n          57,\n          55,\n          66,\n          -107,\n          71,\n          -105,\n          66,\n          -113,\n          -58,\n          107,\n          66,\n          122,\n          -44,\n          -39,\n          66,\n          -101,\n          -66,\n          5,\n          66,\n          -92,\n          71,\n          81,\n          66,\n          96,\n          -70,\n          -42,\n          66,\n          -68,\n          26,\n          75,\n          66,\n          -117,\n          102,\n          13,\n          66,\n          -105,\n          31,\n          108,\n          66,\n          -100,\n          109,\n          -35,\n          66,\n          -115,\n          51,\n          68,\n          66,\n          -103,\n          -41,\n          121,\n          66,\n          81,\n          88,\n          -71,\n          66,\n          -102,\n          0,\n          56,\n          66,\n          -80,\n          -28,\n          -75,\n          66,\n          -83,\n          -55,\n          -39,\n          66,\n          -128,\n          103,\n          -108,\n          66,\n          -109,\n          127,\n          -70,\n          66,\n          -111,\n          74,\n          43,\n          66,\n          -111,\n          78,\n          -80,\n          66,\n          -102,\n          15,\n          -120,\n          66,\n          99,\n          -114,\n          -74,\n          66,\n          -116,\n          -107,\n          -8,\n          66,\n          -120,\n          -122,\n          -113,\n          66,\n          80,\n          38,\n          -49,\n          66,\n          -71,\n          124,\n          16,\n          66,\n          -69,\n          85,\n          -104,\n          66,\n          -109,\n          -62,\n          -13,\n          66,\n          -104,\n          54,\n          72,\n          66,\n          -119,\n          -85,\n          -73,\n          66,\n          -74,\n          126,\n          69,\n          66,\n          125,\n          12,\n          86,\n          66,\n          -91,\n          -38,\n          12,\n          66,\n          -114,\n          -14,\n          -114,\n          66,\n          -106,\n          28,\n          54,\n          66,\n          -92,\n          -36,\n          34,\n          66,\n          114,\n          -25,\n          54,\n          66,\n          -78,\n          38,\n          91,\n          66,\n          -115,\n          -117,\n          64,\n          66,\n          -101,\n          114,\n          90,\n          66,\n          -125,\n          -66,\n          -101,\n          66,\n          -61,\n          114,\n          -113,\n          66,\n          -126,\n          -59,\n          -125,\n          66,\n          -108,\n          105,\n          -27,\n          66,\n          -126,\n          -4,\n          62,\n          66,\n          -112,\n          75,\n          -119,\n          66,\n          -97,\n          119,\n          -17,\n          66,\n          -95,\n          -8,\n          70,\n          66,\n          119,\n          42,\n          115,\n          66,\n          110,\n          -95,\n          19,\n          66,\n          -80,\n          84,\n          -5,\n          66,\n          -117,\n          -22,\n          57,\n          66,\n          -108,\n          -3,\n          43,\n          66,\n          -97,\n          113,\n          18,\n          66,\n          -119,\n          43,\n          -27,\n          66,\n          -70,\n          85,\n          39,\n          66,\n          -91,\n          99,\n          5,\n          66,\n          -100,\n          -51,\n          26,\n          66,\n          -65,\n          -59,\n          112,\n          66,\n          -106,\n          -111,\n          -68,\n          66,\n          116,\n          -19,\n          -58,\n          66,\n          -105,\n          31,\n          -90,\n          66,\n          -120,\n          61,\n          15,\n          66,\n          119,\n          65,\n          -5,\n          66,\n          -126,\n          55,\n          -28,\n          66,\n          -122,\n          -105,\n          57,\n          66,\n          -93,\n          -46,\n          -123,\n          66,\n          -110,\n          -22,\n          -84,\n          66,\n          -87,\n          109,\n          116,\n          66,\n          -106,\n          -76,\n          -17,\n          66,\n          94,\n          -72,\n          122,\n          66,\n          91,\n          88,\n          -85,\n          66,\n          -119,\n          -75,\n          -26,\n          66,\n          -115,\n          48,\n          12,\n          66,\n          -102,\n          100,\n          87,\n          66,\n          -92,\n          -64,\n          -105,\n          66,\n          97,\n          24,\n          -82,\n          66,\n          -97,\n          -115,\n          32,\n          66,\n          -97,\n          91,\n          -73,\n          66,\n          -98,\n          -78,\n          87,\n          66,\n          104,\n          -116,\n          33,\n          66,\n          121,\n          -19,\n          -73,\n          66,\n          -89,\n          -25,\n          7,\n          66,\n          -118,\n          107,\n          0,\n          66,\n          -100,\n          -85,\n          70,\n          66,\n          -101,\n          103,\n          86,\n          66,\n          -87,\n          -86,\n          59,\n          66,\n          -100,\n          17,\n          -6,\n          66,\n          -108,\n          65,\n          -15,\n          66,\n          -87,\n          -55,\n          -45,\n          66,\n          -79,\n          46,\n          85,\n          66,\n          -93,\n          80,\n          -52,\n          66,\n          -126,\n          -104,\n          92,\n          66,\n          110,\n          18,\n          67,\n          66,\n          -120,\n          0,\n          -15,\n          66,\n          -113,\n          -98,\n          94,\n          66,\n          -86,\n          52,\n          -60,\n          66,\n          -114,\n          50,\n          -65,\n          66,\n          -121,\n          -115,\n          113\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1033566207,\n          7846973,\n          1067573159,\n          37132719,\n          212909427,\n          549106746,\n          2877018,\n          179473574,\n          8193\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1071480575,\n          185058845,\n          999947994,\n          310774698,\n          198563097,\n          1880176,\n          34223184,\n          172131500,\n          561\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3899095098967794180,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          654261202,\n          1005930677,\n          728332218,\n          216766382,\n          858449742,\n          527600474,\n          330466978,\n          526456695,\n          115145981,\n          781878902,\n          976599139,\n          1002015797,\n          572987187,\n          980024407,\n          325359835,\n          981135163,\n          918014794,\n          712759738,\n          86695769,\n          263579555,\n          865767219,\n          610786366,\n          379166434,\n          597656865,\n          623486427,\n          207309618,\n          213166626,\n          127002862,\n          225038326,\n          749726009,\n          938727258,\n          82094013,\n          259106650,\n          666086890,\n          670115431,\n          51767507,\n          304123445,\n          632864178,\n          99801802,\n          1009329397,\n          402628166,\n          202757719,\n          959\n        ],\n        \"cutValueData\": [\n          66,\n          69,\n          55,\n          27,\n          66,\n          -119,\n          29,\n          -25,\n          66,\n          -82,\n          -110,\n          -41,\n          66,\n          -69,\n          -51,\n          99,\n          66,\n          -126,\n          -50,\n          30,\n          66,\n          -95,\n          -123,\n          12,\n          66,\n          -109,\n          -108,\n          39,\n          66,\n          -82,\n          -51,\n          60,\n          66,\n          -114,\n          48,\n          7,\n          66,\n          -85,\n          -94,\n          -89,\n          66,\n          89,\n          60,\n          -85,\n          66,\n          -81,\n          -99,\n          -55,\n          66,\n          -113,\n          -65,\n          2,\n          66,\n          -126,\n          -11,\n          125,\n          66,\n          -90,\n          -104,\n          -45,\n          66,\n          109,\n          67,\n          22,\n          66,\n          -125,\n          -112,\n          -59,\n          66,\n          -60,\n          -14,\n          -47,\n          66,\n          -69,\n          -27,\n          116,\n          66,\n          98,\n          -37,\n          35,\n          66,\n          93,\n          43,\n          -108,\n          66,\n          100,\n          118,\n          -106,\n          66,\n          96,\n          44,\n          90,\n          66,\n          -115,\n          85,\n          -11,\n          66,\n          -109,\n          -14,\n          121,\n          66,\n          -101,\n          -126,\n          -81,\n          66,\n          -104,\n          -64,\n          89,\n          66,\n          -122,\n          31,\n          -62,\n          66,\n          -104,\n          85,\n          -67,\n          66,\n          -111,\n          35,\n          48,\n          66,\n          -95,\n          -10,\n          58,\n          66,\n          -88,\n          -15,\n          36,\n          66,\n          -99,\n          16,\n          116,\n          66,\n          -118,\n          -39,\n          -71,\n          66,\n          102,\n          65,\n          -55,\n          66,\n          100,\n          78,\n          -32,\n          66,\n          -112,\n          -53,\n          57,\n          66,\n          97,\n          -65,\n          -88,\n          66,\n          -120,\n          -71,\n          -121,\n          66,\n          -69,\n          -100,\n          33,\n          66,\n          -97,\n          35,\n          10,\n          66,\n          124,\n          30,\n          63,\n          66,\n          -116,\n          -58,\n          -88,\n          66,\n          -74,\n          -23,\n          56,\n          66,\n          -91,\n          70,\n          -120,\n          66,\n          83,\n          101,\n          15,\n          66,\n          87,\n          -97,\n          21,\n          66,\n          -109,\n          -124,\n          -44,\n          66,\n          -86,\n          95,\n          -75,\n          66,\n          -79,\n          84,\n          -49,\n          66,\n          -118,\n          -65,\n          2,\n          66,\n          -90,\n          21,\n          6,\n          66,\n          -76,\n          -52,\n          84,\n          66,\n          -102,\n          -77,\n          86,\n          66,\n          106,\n          -122,\n          23,\n          66,\n          -112,\n          8,\n          -43,\n          66,\n          -118,\n          10,\n          124,\n          66,\n          -86,\n          127,\n          102,\n          66,\n          -69,\n          -88,\n          97,\n          66,\n          -91,\n          -125,\n          -22,\n          66,\n          94,\n          -88,\n          -9,\n          66,\n          -127,\n          53,\n          -128,\n          66,\n          96,\n          109,\n          -78,\n          66,\n          -124,\n          123,\n          84,\n          66,\n          -128,\n          2,\n          -81,\n          66,\n          126,\n          96,\n          79,\n          66,\n          -98,\n          76,\n          -67,\n          66,\n          -108,\n          112,\n          -24,\n          66,\n          -117,\n          -61,\n          10,\n          66,\n          -109,\n          22,\n          -102,\n          66,\n          -92,\n          22,\n          -121,\n          66,\n          -118,\n          68,\n          -5,\n          66,\n          -112,\n          -26,\n          86,\n          66,\n          82,\n          27,\n          -7,\n          66,\n          -126,\n          108,\n          91,\n          66,\n          -93,\n          -58,\n          29,\n          66,\n          -84,\n          36,\n          -80,\n          66,\n          -119,\n          -72,\n          31,\n          66,\n          -123,\n          56,\n          3,\n          66,\n          96,\n          -52,\n          -84,\n          66,\n          -109,\n          -109,\n          -70,\n          66,\n          -92,\n          3,\n          54,\n          66,\n          -89,\n          92,\n          31,\n          66,\n          -104,\n          21,\n          -85,\n          66,\n          -64,\n          -108,\n          -9,\n          66,\n          -100,\n          0,\n          58,\n          66,\n          119,\n          80,\n          -33,\n          66,\n          116,\n          -125,\n          97,\n          66,\n          -66,\n          -70,\n          -81,\n          66,\n          91,\n          65,\n          -14,\n          66,\n          -111,\n          -12,\n          34,\n          66,\n          75,\n          -111,\n          42,\n          66,\n          -127,\n          -10,\n          72,\n          66,\n          114,\n          -13,\n          66,\n          66,\n          -108,\n          45,\n          -25,\n          66,\n          -105,\n          -122,\n          -70,\n          66,\n          -67,\n          54,\n          111,\n          66,\n          -78,\n          46,\n          77,\n          66,\n          -68,\n          62,\n          -33,\n          66,\n          -100,\n          87,\n          -112,\n          66,\n          -87,\n          -99,\n          -15,\n          66,\n          -63,\n          -29,\n          -47,\n          66,\n          -80,\n          -119,\n          -47,\n          66,\n          -66,\n          -5,\n          -103,\n          66,\n          97,\n          63,\n          99,\n          66,\n          -63,\n          -48,\n          118,\n          66,\n          92,\n          -3,\n          75,\n          66,\n          -74,\n          6,\n          -36,\n          66,\n          -89,\n          95,\n          -76,\n          66,\n          -93,\n          21,\n          -92,\n          66,\n          -116,\n          -16,\n          -48,\n          66,\n          -107,\n          39,\n          -78,\n          66,\n          -99,\n          2,\n          3,\n          66,\n          -77,\n          -64,\n          -122,\n          66,\n          -92,\n          -6,\n          51,\n          66,\n          -78,\n          -4,\n          61,\n          66,\n          -85,\n          -120,\n          76,\n          66,\n          -116,\n          120,\n          -38,\n          66,\n          -116,\n          -14,\n          106,\n          66,\n          -82,\n          41,\n          75,\n          66,\n          92,\n          85,\n          -2,\n          66,\n          -128,\n          104,\n          -90,\n          66,\n          -123,\n          -24,\n          52,\n          66,\n          -122,\n          11,\n          -99,\n          66,\n          -86,\n          99,\n          119,\n          66,\n          -96,\n          52,\n          34,\n          66,\n          -110,\n          80,\n          54,\n          66,\n          110,\n          50,\n          -94,\n          66,\n          -101,\n          68,\n          82,\n          66,\n          -87,\n          104,\n          114,\n          66,\n          93,\n          -126,\n          19,\n          66,\n          -91,\n          11,\n          104,\n          66,\n          92,\n          -73,\n          -18,\n          66,\n          -98,\n          -119,\n          101,\n          66,\n          109,\n          -82,\n          94,\n          66,\n          -111,\n          76,\n          -90,\n          66,\n          -126,\n          -6,\n          52,\n          66,\n          -120,\n          -29,\n          -15,\n          66,\n          -89,\n          79,\n          -52,\n          66,\n          -117,\n          -9,\n          -48,\n          66,\n          -128,\n          -122,\n          58,\n          66,\n          -81,\n          59,\n          -126,\n          66,\n          -100,\n          86,\n          -31,\n          66,\n          -92,\n          78,\n          4,\n          66,\n          -79,\n          -53,\n          -121,\n          66,\n          -71,\n          52,\n          -31,\n          66,\n          82,\n          -89,\n          -111,\n          66,\n          -86,\n          -8,\n          -92,\n          66,\n          -99,\n          121,\n          118,\n          66,\n          -102,\n          -3,\n          -16,\n          66,\n          -102,\n          64,\n          -67,\n          66,\n          -111,\n          -67,\n          79,\n          66,\n          -118,\n          73,\n          -53,\n          66,\n          -124,\n          -120,\n          -79,\n          66,\n          -112,\n          11,\n          73,\n          66,\n          -99,\n          -69,\n          74,\n          66,\n          -66,\n          -21,\n          105,\n          66,\n          -94,\n          -32,\n          83,\n          66,\n          -114,\n          89,\n          -125,\n          66,\n          -95,\n          95,\n          27,\n          66,\n          -102,\n          56,\n          -57,\n          66,\n          -64,\n          -43,\n          -33,\n          66,\n          -106,\n          -66,\n          68,\n          66,\n          -62,\n          105,\n          69,\n          66,\n          -108,\n          96,\n          -64,\n          66,\n          -102,\n          4,\n          77,\n          66,\n          -115,\n          -40,\n          -97,\n          66,\n          -118,\n          -29,\n          58,\n          66,\n          -60,\n          64,\n          -38,\n          66,\n          112,\n          -100,\n          -47,\n          66,\n          -108,\n          -82,\n          -116,\n          66,\n          -105,\n          -70,\n          9,\n          66,\n          -113,\n          114,\n          -77,\n          66,\n          -78,\n          34,\n          -10,\n          66,\n          -94,\n          127,\n          29,\n          66,\n          -81,\n          -34,\n          61,\n          66,\n          -85,\n          24,\n          -125,\n          66,\n          -61,\n          -15,\n          111,\n          66,\n          -119,\n          -30,\n          -81,\n          66,\n          84,\n          -64,\n          45,\n          66,\n          -126,\n          39,\n          -72,\n          66,\n          -96,\n          -4,\n          5,\n          66,\n          -85,\n          122,\n          -118,\n          66,\n          -90,\n          23,\n          122,\n          66,\n          -126,\n          -90,\n          -106,\n          66,\n          122,\n          -43,\n          -52,\n          66,\n          120,\n          -127,\n          -69,\n          66,\n          -119,\n          68,\n          -23,\n          66,\n          -74,\n          2,\n          -35,\n          66,\n          -108,\n          -108,\n          78,\n          66,\n          -116,\n          -52,\n          -127,\n          66,\n          -98,\n          -47,\n          53,\n          66,\n          106,\n          52,\n          72,\n          66,\n          -127,\n          -109,\n          127,\n          66,\n          -81,\n          -60,\n          11,\n          66,\n          -93,\n          85,\n          49,\n          66,\n          -105,\n          -78,\n          -84,\n          66,\n          -110,\n          -101,\n          -13,\n          66,\n          -108,\n          -30,\n          122,\n          66,\n          -92,\n          89,\n          -72,\n          66,\n          -92,\n          102,\n          -47,\n          66,\n          -93,\n          34,\n          -49,\n          66,\n          -125,\n          69,\n          114,\n          66,\n          -69,\n          -16,\n          -6,\n          66,\n          -85,\n          13,\n          29,\n          66,\n          -81,\n          -65,\n          -109,\n          66,\n          -111,\n          -79,\n          -76,\n          66,\n          -81,\n          63,\n          -6,\n          66,\n          87,\n          -8,\n          -106,\n          66,\n          -92,\n          80,\n          5,\n          66,\n          -120,\n          -10,\n          -38,\n          66,\n          -69,\n          -50,\n          -92,\n          66,\n          -104,\n          -9,\n          47,\n          66,\n          125,\n          4,\n          90,\n          66,\n          -117,\n          114,\n          81,\n          66,\n          -93,\n          122,\n          66,\n          66,\n          -128,\n          34,\n          99,\n          66,\n          -85,\n          -10,\n          -89,\n          66,\n          -94,\n          28,\n          -126,\n          66,\n          -96,\n          -12,\n          2,\n          66,\n          -66,\n          8,\n          -59,\n          66,\n          -115,\n          -75,\n          -20,\n          66,\n          -81,\n          -26,\n          -115,\n          66,\n          118,\n          72,\n          -54,\n          66,\n          118,\n          -119,\n          -57,\n          66,\n          115,\n          -88,\n          -52,\n          66,\n          -97,\n          -34,\n          122,\n          66,\n          89,\n          -60,\n          -1,\n          66,\n          -126,\n          -48,\n          -115,\n          66,\n          109,\n          -23,\n          86,\n          66,\n          -116,\n          57,\n          -12,\n          66,\n          -126,\n          -49,\n          97,\n          66,\n          124,\n          -97,\n          93,\n          66,\n          -91,\n          125,\n          -117,\n          66,\n          -108,\n          -54,\n          -59,\n          66,\n          -119,\n          19,\n          93,\n          66,\n          -77,\n          -65,\n          65,\n          66,\n          -89,\n          -78,\n          79,\n          66,\n          -89,\n          27,\n          -117,\n          66,\n          -102,\n          -67,\n          12,\n          66,\n          86,\n          -97,\n          103,\n          66,\n          -95,\n          -66,\n          -65,\n          66,\n          -128,\n          102,\n          50,\n          66,\n          -90,\n          -1,\n          46,\n          66,\n          109,\n          -47,\n          -109,\n          66,\n          -102,\n          -15,\n          23,\n          66,\n          112,\n          77,\n          106,\n          66,\n          -105,\n          -13,\n          94,\n          66,\n          -92,\n          -55,\n          -62,\n          66,\n          -81,\n          17,\n          -115,\n          66,\n          -114,\n          -103,\n          -38,\n          66,\n          -127,\n          -29,\n          -39,\n          66,\n          119,\n          90,\n          64,\n          66,\n          -109,\n          12,\n          -127,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162260734,\n          1033121303,\n          975725665,\n          1099472426,\n          1097956745,\n          731607295,\n          645677852,\n          1011604586,\n          716650712,\n          731549203,\n          710864306,\n          712422943,\n          710349719,\n          1120\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1033120547,\n          1161730016,\n          601059770,\n          1160056957,\n          1157450056,\n          774221296,\n          630621112,\n          970145554,\n          587515571,\n          600459695,\n          1098281905,\n          581721952,\n          581130817,\n          1336\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7971869639547712875,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          978921285,\n          994007269,\n          471914914,\n          452950318,\n          129979958,\n          83547427,\n          480812537,\n          174955119,\n          366916790,\n          379754205,\n          70045301,\n          593870679,\n          324643107,\n          82025451,\n          975680853,\n          112561835,\n          240470142,\n          765319901,\n          1016262463,\n          1024589503,\n          605260962,\n          593682359,\n          464855001,\n          368409769,\n          223430526,\n          489252134,\n          620297641,\n          254379710,\n          313257399,\n          463113138,\n          648330745,\n          468004651,\n          103794910,\n          339715569,\n          465107301,\n          443099255,\n          328904566,\n          849832795,\n          582960731,\n          98542957,\n          233622623,\n          81833694,\n          189\n        ],\n        \"cutValueData\": [\n          66,\n          -93,\n          -26,\n          21,\n          66,\n          -86,\n          71,\n          90,\n          66,\n          -61,\n          -103,\n          106,\n          66,\n          74,\n          63,\n          -50,\n          66,\n          -115,\n          59,\n          76,\n          66,\n          -81,\n          92,\n          -30,\n          66,\n          -67,\n          34,\n          -95,\n          66,\n          96,\n          -106,\n          78,\n          66,\n          -125,\n          80,\n          19,\n          66,\n          125,\n          -60,\n          127,\n          66,\n          82,\n          -48,\n          19,\n          66,\n          95,\n          -109,\n          -38,\n          66,\n          -86,\n          96,\n          -78,\n          66,\n          121,\n          80,\n          -77,\n          66,\n          94,\n          -64,\n          -88,\n          66,\n          -128,\n          17,\n          -120,\n          66,\n          -97,\n          -74,\n          117,\n          66,\n          91,\n          -75,\n          0,\n          66,\n          110,\n          39,\n          -50,\n          66,\n          -101,\n          -127,\n          18,\n          66,\n          -87,\n          125,\n          -29,\n          66,\n          -86,\n          38,\n          64,\n          66,\n          -91,\n          -73,\n          -87,\n          66,\n          -71,\n          -36,\n          55,\n          66,\n          116,\n          -113,\n          -56,\n          66,\n          116,\n          -17,\n          63,\n          66,\n          119,\n          52,\n          -85,\n          66,\n          -92,\n          -117,\n          -12,\n          66,\n          -89,\n          -128,\n          -102,\n          66,\n          91,\n          27,\n          95,\n          66,\n          -77,\n          110,\n          64,\n          66,\n          -105,\n          113,\n          -127,\n          66,\n          -112,\n          -104,\n          94,\n          66,\n          -90,\n          104,\n          -59,\n          66,\n          -77,\n          69,\n          29,\n          66,\n          87,\n          112,\n          -113,\n          66,\n          -110,\n          84,\n          -61,\n          66,\n          -108,\n          -55,\n          -107,\n          66,\n          -103,\n          66,\n          -75,\n          66,\n          -88,\n          112,\n          -52,\n          66,\n          -121,\n          8,\n          -28,\n          66,\n          93,\n          -12,\n          -7,\n          66,\n          -110,\n          91,\n          102,\n          66,\n          -109,\n          18,\n          -79,\n          66,\n          -110,\n          -57,\n          47,\n          66,\n          -123,\n          82,\n          58,\n          66,\n          120,\n          -52,\n          34,\n          66,\n          -123,\n          -80,\n          -70,\n          66,\n          95,\n          38,\n          -46,\n          66,\n          100,\n          -88,\n          -62,\n          66,\n          79,\n          127,\n          -74,\n          66,\n          -105,\n          62,\n          -9,\n          66,\n          110,\n          1,\n          60,\n          66,\n          114,\n          9,\n          110,\n          66,\n          -79,\n          -74,\n          -67,\n          66,\n          81,\n          -38,\n          76,\n          66,\n          105,\n          17,\n          -60,\n          66,\n          119,\n          -100,\n          31,\n          66,\n          -96,\n          118,\n          35,\n          66,\n          -90,\n          109,\n          -108,\n          66,\n          -120,\n          -94,\n          17,\n          66,\n          121,\n          89,\n          -121,\n          66,\n          -95,\n          -62,\n          116,\n          66,\n          -125,\n          -81,\n          3,\n          66,\n          -120,\n          -23,\n          -70,\n          66,\n          -114,\n          -37,\n          53,\n          66,\n          -121,\n          29,\n          35,\n          66,\n          -114,\n          43,\n          88,\n          66,\n          -116,\n          71,\n          42,\n          66,\n          -90,\n          -125,\n          105,\n          66,\n          118,\n          92,\n          104,\n          66,\n          118,\n          -46,\n          -121,\n          66,\n          126,\n          -102,\n          1,\n          66,\n          119,\n          34,\n          -20,\n          66,\n          -68,\n          78,\n          59,\n          66,\n          109,\n          -56,\n          42,\n          66,\n          -103,\n          -70,\n          -35,\n          66,\n          115,\n          94,\n          67,\n          66,\n          -82,\n          3,\n          56,\n          66,\n          -92,\n          -7,\n          88,\n          66,\n          -84,\n          11,\n          51,\n          66,\n          -107,\n          38,\n          1,\n          66,\n          -104,\n          -38,\n          -89,\n          66,\n          121,\n          97,\n          -15,\n          66,\n          99,\n          -71,\n          -126,\n          66,\n          104,\n          76,\n          -15,\n          66,\n          -85,\n          -96,\n          67,\n          66,\n          -93,\n          -30,\n          45,\n          66,\n          -106,\n          58,\n          -90,\n          66,\n          119,\n          6,\n          -29,\n          66,\n          -79,\n          -121,\n          -14,\n          66,\n          -108,\n          5,\n          -43,\n          66,\n          -67,\n          -39,\n          39,\n          66,\n          -77,\n          -97,\n          -35,\n          66,\n          -127,\n          59,\n          22,\n          66,\n          -84,\n          -44,\n          106,\n          66,\n          -100,\n          -25,\n          -87,\n          66,\n          -100,\n          -119,\n          89,\n          66,\n          -124,\n          10,\n          115,\n          66,\n          -91,\n          -72,\n          81,\n          66,\n          -128,\n          -16,\n          103,\n          66,\n          113,\n          -106,\n          77,\n          66,\n          -113,\n          101,\n          86,\n          66,\n          80,\n          101,\n          15,\n          66,\n          120,\n          106,\n          -77,\n          66,\n          117,\n          17,\n          41,\n          66,\n          118,\n          -1,\n          -88,\n          66,\n          87,\n          -46,\n          78,\n          66,\n          106,\n          -72,\n          55,\n          66,\n          94,\n          12,\n          80,\n          66,\n          92,\n          106,\n          -123,\n          66,\n          -111,\n          -96,\n          116,\n          66,\n          -119,\n          -9,\n          126,\n          66,\n          114,\n          94,\n          120,\n          66,\n          -69,\n          -29,\n          -76,\n          66,\n          -124,\n          -122,\n          -125,\n          66,\n          -62,\n          107,\n          -91,\n          66,\n          98,\n          -126,\n          -112,\n          66,\n          -98,\n          29,\n          63,\n          66,\n          -72,\n          125,\n          5,\n          66,\n          -71,\n          -28,\n          46,\n          66,\n          101,\n          -112,\n          -12,\n          66,\n          -102,\n          -16,\n          -124,\n          66,\n          86,\n          39,\n          55,\n          66,\n          -121,\n          82,\n          10,\n          66,\n          -102,\n          81,\n          -14,\n          66,\n          -106,\n          30,\n          -47,\n          66,\n          -127,\n          -85,\n          -118,\n          66,\n          -117,\n          -30,\n          22,\n          66,\n          -107,\n          -41,\n          12,\n          66,\n          109,\n          -121,\n          57,\n          66,\n          -98,\n          -84,\n          -39,\n          66,\n          -123,\n          107,\n          6,\n          66,\n          -98,\n          13,\n          -38,\n          66,\n          -61,\n          52,\n          -55,\n          66,\n          -89,\n          11,\n          46,\n          66,\n          -118,\n          85,\n          20,\n          66,\n          113,\n          -62,\n          48,\n          66,\n          -126,\n          49,\n          7,\n          66,\n          96,\n          -4,\n          98,\n          66,\n          110,\n          -72,\n          -86,\n          66,\n          -101,\n          -50,\n          29,\n          66,\n          -109,\n          99,\n          83,\n          66,\n          -69,\n          104,\n          110,\n          66,\n          -90,\n          42,\n          122,\n          66,\n          -99,\n          30,\n          -73,\n          66,\n          117,\n          -28,\n          107,\n          66,\n          -86,\n          -57,\n          27,\n          66,\n          116,\n          -48,\n          80,\n          66,\n          -114,\n          98,\n          -36,\n          66,\n          -118,\n          41,\n          -64,\n          66,\n          -105,\n          -49,\n          -91,\n          66,\n          -111,\n          -109,\n          55,\n          66,\n          -107,\n          -63,\n          56,\n          66,\n          -65,\n          -28,\n          20,\n          66,\n          117,\n          89,\n          25,\n          66,\n          -120,\n          -107,\n          27,\n          66,\n          -112,\n          -63,\n          -25,\n          66,\n          -69,\n          -107,\n          -121,\n          66,\n          -119,\n          -31,\n          34,\n          66,\n          -126,\n          -98,\n          63,\n          66,\n          -106,\n          64,\n          -41,\n          66,\n          -95,\n          -60,\n          76,\n          66,\n          105,\n          -113,\n          -36,\n          66,\n          -97,\n          -104,\n          64,\n          66,\n          -80,\n          -50,\n          86,\n          66,\n          -111,\n          -92,\n          -107,\n          66,\n          -108,\n          -93,\n          114,\n          66,\n          -84,\n          80,\n          -105,\n          66,\n          125,\n          -41,\n          -105,\n          66,\n          90,\n          -76,\n          -60,\n          66,\n          -72,\n          40,\n          112,\n          66,\n          -101,\n          86,\n          44,\n          66,\n          -103,\n          37,\n          -7,\n          66,\n          122,\n          -118,\n          -45,\n          66,\n          -70,\n          117,\n          61,\n          66,\n          123,\n          -24,\n          20,\n          66,\n          100,\n          -16,\n          -104,\n          66,\n          -107,\n          35,\n          -96,\n          66,\n          -91,\n          95,\n          78,\n          66,\n          -122,\n          122,\n          64,\n          66,\n          -90,\n          -41,\n          103,\n          66,\n          -114,\n          -38,\n          90,\n          66,\n          -71,\n          58,\n          -43,\n          66,\n          -96,\n          17,\n          -48,\n          66,\n          -70,\n          103,\n          88,\n          66,\n          -116,\n          53,\n          -54,\n          66,\n          -91,\n          104,\n          -47,\n          66,\n          -121,\n          10,\n          -120,\n          66,\n          110,\n          -97,\n          -121,\n          66,\n          -85,\n          74,\n          46,\n          66,\n          88,\n          122,\n          19,\n          66,\n          -81,\n          -108,\n          -80,\n          66,\n          -108,\n          -77,\n          -44,\n          66,\n          96,\n          64,\n          82,\n          66,\n          -102,\n          -71,\n          -23,\n          66,\n          -103,\n          80,\n          89,\n          66,\n          -120,\n          86,\n          -104,\n          66,\n          -77,\n          101,\n          -80,\n          66,\n          -96,\n          -9,\n          109,\n          66,\n          -109,\n          -71,\n          -40,\n          66,\n          -95,\n          21,\n          10,\n          66,\n          -114,\n          13,\n          101,\n          66,\n          -101,\n          87,\n          2,\n          66,\n          122,\n          -27,\n          8,\n          66,\n          118,\n          50,\n          -116,\n          66,\n          -117,\n          83,\n          -108,\n          66,\n          -98,\n          42,\n          24,\n          66,\n          -119,\n          -19,\n          35,\n          66,\n          -94,\n          18,\n          25,\n          66,\n          -117,\n          79,\n          86,\n          66,\n          -103,\n          90,\n          -44,\n          66,\n          101,\n          108,\n          34,\n          66,\n          -91,\n          48,\n          78,\n          66,\n          -67,\n          -52,\n          -115,\n          66,\n          -124,\n          -30,\n          -62,\n          66,\n          -74,\n          31,\n          43,\n          66,\n          -91,\n          100,\n          20,\n          66,\n          -115,\n          0,\n          86,\n          66,\n          -82,\n          -12,\n          -26,\n          66,\n          -114,\n          26,\n          8,\n          66,\n          91,\n          26,\n          105,\n          66,\n          124,\n          39,\n          56,\n          66,\n          77,\n          -61,\n          -60,\n          66,\n          -109,\n          123,\n          -95,\n          66,\n          -107,\n          64,\n          -95,\n          66,\n          -124,\n          -12,\n          -82,\n          66,\n          69,\n          70,\n          -69,\n          66,\n          -114,\n          10,\n          -29,\n          66,\n          -121,\n          89,\n          117,\n          66,\n          -124,\n          101,\n          2,\n          66,\n          -108,\n          -3,\n          91,\n          66,\n          123,\n          47,\n          -107,\n          66,\n          -97,\n          -84,\n          -31,\n          66,\n          -109,\n          98,\n          89,\n          66,\n          -112,\n          -51,\n          11,\n          66,\n          98,\n          -72,\n          -7,\n          66,\n          -92,\n          -80,\n          25,\n          66,\n          -94,\n          33,\n          95,\n          66,\n          -111,\n          78,\n          124,\n          66,\n          125,\n          -94,\n          -112,\n          66,\n          -82,\n          -81,\n          -13,\n          66,\n          102,\n          113,\n          -102,\n          66,\n          -112,\n          95,\n          -113,\n          66,\n          -79,\n          107,\n          -119,\n          66,\n          -100,\n          102,\n          124,\n          66,\n          -79,\n          -73,\n          -5,\n          66,\n          -113,\n          96,\n          115,\n          66,\n          98,\n          57,\n          -23,\n          66,\n          -114,\n          -56,\n          -79,\n          66,\n          -109,\n          55,\n          24,\n          66,\n          -84,\n          86,\n          -102,\n          66,\n          -112,\n          -13,\n          120,\n          66,\n          -83,\n          -29,\n          87,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          768463685,\n          1028309171,\n          631350899,\n          1119188465,\n          755098891,\n          597318908,\n          970174987,\n          600796250,\n          754973449,\n          1155678233,\n          754991735,\n          710529692,\n          970211435,\n          1094\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1157477768,\n          774832199,\n          631350935,\n          1032528356,\n          1143129229,\n          716893402,\n          970855216,\n          973511581,\n          1097759536,\n          968617804,\n          1097692441,\n          968551981,\n          1112217467,\n          1096\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5616425619850912141,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          510656361,\n          600278606,\n          253326725,\n          595140644,\n          831584705,\n          585219198,\n          728696459,\n          41477397,\n          870711578,\n          44988593,\n          54787746,\n          14824058,\n          750086845,\n          380106249,\n          599335497,\n          180759439,\n          806212701,\n          310562823,\n          403506273,\n          421878371,\n          653367040,\n          357206430,\n          79900404,\n          380801954,\n          404657871,\n          601496835,\n          847113032,\n          878399556,\n          401445895,\n          154883848,\n          26336039,\n          614143973,\n          269541196,\n          369730541,\n          384029582,\n          488060360,\n          7562094,\n          36883374,\n          234619561,\n          832217179,\n          172982005,\n          531182728,\n          2082\n        ],\n        \"cutValueData\": [\n          66,\n          -116,\n          -46,\n          3,\n          66,\n          -120,\n          -122,\n          -64,\n          66,\n          -59,\n          109,\n          80,\n          66,\n          69,\n          4,\n          -20,\n          66,\n          -72,\n          29,\n          -42,\n          66,\n          125,\n          -26,\n          -90,\n          66,\n          -104,\n          -102,\n          56,\n          66,\n          110,\n          -19,\n          -125,\n          66,\n          -125,\n          28,\n          -21,\n          66,\n          -94,\n          -94,\n          -57,\n          66,\n          -91,\n          -110,\n          9,\n          66,\n          -104,\n          16,\n          53,\n          66,\n          -89,\n          -41,\n          61,\n          66,\n          108,\n          110,\n          29,\n          66,\n          -80,\n          -56,\n          -81,\n          66,\n          -104,\n          -104,\n          42,\n          66,\n          124,\n          -56,\n          101,\n          66,\n          85,\n          96,\n          -64,\n          66,\n          -70,\n          -36,\n          110,\n          66,\n          -67,\n          -36,\n          27,\n          66,\n          -64,\n          124,\n          75,\n          66,\n          -72,\n          -85,\n          -58,\n          66,\n          -69,\n          30,\n          77,\n          66,\n          -117,\n          -100,\n          -79,\n          66,\n          -98,\n          53,\n          -70,\n          66,\n          113,\n          96,\n          -64,\n          66,\n          88,\n          -7,\n          67,\n          66,\n          -71,\n          -119,\n          53,\n          66,\n          91,\n          -115,\n          -23,\n          66,\n          127,\n          -59,\n          86,\n          66,\n          -72,\n          -15,\n          -35,\n          66,\n          -105,\n          93,\n          -77,\n          66,\n          -71,\n          66,\n          99,\n          66,\n          -83,\n          93,\n          -28,\n          66,\n          100,\n          92,\n          8,\n          66,\n          -102,\n          15,\n          74,\n          66,\n          78,\n          90,\n          -98,\n          66,\n          -124,\n          65,\n          3,\n          66,\n          83,\n          3,\n          23,\n          66,\n          -124,\n          -53,\n          -26,\n          66,\n          -66,\n          -38,\n          -52,\n          66,\n          -109,\n          -120,\n          109,\n          66,\n          -87,\n          27,\n          37,\n          66,\n          -105,\n          -54,\n          -42,\n          66,\n          -81,\n          91,\n          -112,\n          66,\n          -61,\n          48,\n          -121,\n          66,\n          98,\n          -85,\n          -64,\n          66,\n          -127,\n          15,\n          109,\n          66,\n          -92,\n          117,\n          -65,\n          66,\n          -94,\n          -47,\n          -19,\n          66,\n          -110,\n          -67,\n          -20,\n          66,\n          -111,\n          -12,\n          -13,\n          66,\n          107,\n          95,\n          -67,\n          66,\n          -113,\n          -108,\n          -86,\n          66,\n          78,\n          -127,\n          -89,\n          66,\n          -108,\n          25,\n          -26,\n          66,\n          116,\n          -42,\n          -25,\n          66,\n          92,\n          119,\n          45,\n          66,\n          -83,\n          -44,\n          -24,\n          66,\n          81,\n          -60,\n          -47,\n          66,\n          -74,\n          81,\n          -119,\n          66,\n          95,\n          37,\n          65,\n          66,\n          -73,\n          29,\n          95,\n          66,\n          86,\n          -30,\n          44,\n          66,\n          -116,\n          96,\n          -70,\n          66,\n          -110,\n          109,\n          109,\n          66,\n          93,\n          33,\n          -48,\n          66,\n          108,\n          1,\n          73,\n          66,\n          115,\n          98,\n          -107,\n          66,\n          104,\n          -31,\n          26,\n          66,\n          -121,\n          46,\n          -126,\n          66,\n          -118,\n          -50,\n          -6,\n          66,\n          94,\n          76,\n          62,\n          66,\n          -85,\n          -51,\n          -26,\n          66,\n          -79,\n          -64,\n          -51,\n          66,\n          -127,\n          51,\n          -93,\n          66,\n          -127,\n          57,\n          -35,\n          66,\n          91,\n          12,\n          -21,\n          66,\n          -118,\n          113,\n          75,\n          66,\n          99,\n          56,\n          106,\n          66,\n          -108,\n          -124,\n          77,\n          66,\n          126,\n          123,\n          -75,\n          66,\n          124,\n          -41,\n          17,\n          66,\n          -86,\n          -99,\n          -123,\n          66,\n          115,\n          76,\n          125,\n          66,\n          -77,\n          -31,\n          -7,\n          66,\n          -117,\n          -14,\n          -50,\n          66,\n          -114,\n          94,\n          -26,\n          66,\n          -101,\n          -109,\n          -12,\n          66,\n          -119,\n          36,\n          80,\n          66,\n          -99,\n          -34,\n          -65,\n          66,\n          -108,\n          -98,\n          -97,\n          66,\n          -76,\n          62,\n          31,\n          66,\n          -75,\n          -81,\n          -4,\n          66,\n          -61,\n          -81,\n          6,\n          66,\n          119,\n          122,\n          23,\n          66,\n          -123,\n          99,\n          -60,\n          66,\n          -107,\n          55,\n          23,\n          66,\n          -75,\n          28,\n          -26,\n          66,\n          110,\n          22,\n          5,\n          66,\n          87,\n          -120,\n          -24,\n          66,\n          -115,\n          83,\n          112,\n          66,\n          105,\n          -28,\n          -108,\n          66,\n          -123,\n          77,\n          -2,\n          66,\n          111,\n          -122,\n          66,\n          66,\n          -93,\n          -65,\n          116,\n          66,\n          88,\n          -57,\n          60,\n          66,\n          -86,\n          27,\n          64,\n          66,\n          -115,\n          93,\n          -100,\n          66,\n          -105,\n          21,\n          -51,\n          66,\n          104,\n          -68,\n          46,\n          66,\n          -64,\n          116,\n          3,\n          66,\n          -117,\n          74,\n          -58,\n          66,\n          -81,\n          8,\n          15,\n          66,\n          -94,\n          55,\n          -78,\n          66,\n          121,\n          -108,\n          -86,\n          66,\n          -88,\n          106,\n          -45,\n          66,\n          -81,\n          -2,\n          44,\n          66,\n          -113,\n          86,\n          -123,\n          66,\n          -91,\n          65,\n          40,\n          66,\n          -115,\n          13,\n          -3,\n          66,\n          -83,\n          54,\n          -42,\n          66,\n          -75,\n          -68,\n          40,\n          66,\n          -114,\n          69,\n          118,\n          66,\n          -77,\n          126,\n          109,\n          66,\n          -120,\n          -24,\n          -105,\n          66,\n          112,\n          64,\n          -73,\n          66,\n          -123,\n          78,\n          59,\n          66,\n          -126,\n          56,\n          -56,\n          66,\n          -121,\n          -120,\n          -61,\n          66,\n          -122,\n          -59,\n          -39,\n          66,\n          -86,\n          94,\n          -118,\n          66,\n          107,\n          -123,\n          -50,\n          66,\n          -119,\n          -29,\n          -122,\n          66,\n          -109,\n          -104,\n          -40,\n          66,\n          79,\n          127,\n          115,\n          66,\n          86,\n          -24,\n          3,\n          66,\n          -113,\n          -88,\n          -21,\n          66,\n          -74,\n          -117,\n          60,\n          66,\n          98,\n          109,\n          -22,\n          66,\n          -75,\n          22,\n          90,\n          66,\n          -77,\n          -18,\n          -68,\n          66,\n          91,\n          -115,\n          70,\n          66,\n          -98,\n          99,\n          -24,\n          66,\n          94,\n          45,\n          55,\n          66,\n          -117,\n          107,\n          -45,\n          66,\n          -84,\n          95,\n          82,\n          66,\n          -109,\n          112,\n          27,\n          66,\n          -93,\n          65,\n          -124,\n          66,\n          105,\n          -51,\n          112,\n          66,\n          -93,\n          -67,\n          -117,\n          66,\n          -127,\n          -31,\n          64,\n          66,\n          -66,\n          -128,\n          -70,\n          66,\n          -125,\n          -47,\n          -9,\n          66,\n          -121,\n          59,\n          -3,\n          66,\n          -110,\n          -70,\n          29,\n          66,\n          85,\n          58,\n          84,\n          66,\n          -127,\n          98,\n          57,\n          66,\n          106,\n          -120,\n          -47,\n          66,\n          125,\n          -104,\n          -63,\n          66,\n          -87,\n          51,\n          -34,\n          66,\n          -88,\n          5,\n          -121,\n          66,\n          -116,\n          -116,\n          108,\n          66,\n          96,\n          -109,\n          -102,\n          66,\n          -122,\n          -54,\n          29,\n          66,\n          -69,\n          90,\n          39,\n          66,\n          -102,\n          -71,\n          106,\n          66,\n          101,\n          -108,\n          -6,\n          66,\n          93,\n          -68,\n          -48,\n          66,\n          -72,\n          107,\n          52,\n          66,\n          100,\n          -81,\n          -42,\n          66,\n          -123,\n          -70,\n          -29,\n          66,\n          -110,\n          9,\n          -82,\n          66,\n          -65,\n          12,\n          -119,\n          66,\n          119,\n          55,\n          -21,\n          66,\n          115,\n          18,\n          24,\n          66,\n          -125,\n          34,\n          28,\n          66,\n          -101,\n          -96,\n          -108,\n          66,\n          -126,\n          -91,\n          -92,\n          66,\n          -91,\n          41,\n          97,\n          66,\n          -117,\n          93,\n          70,\n          66,\n          106,\n          -67,\n          53,\n          66,\n          -127,\n          77,\n          29,\n          66,\n          -128,\n          -10,\n          -55,\n          66,\n          85,\n          -6,\n          -14,\n          66,\n          115,\n          -88,\n          -116,\n          66,\n          89,\n          -110,\n          -95,\n          66,\n          -85,\n          -71,\n          -27,\n          66,\n          -122,\n          -85,\n          -92,\n          66,\n          -84,\n          -86,\n          -55,\n          66,\n          102,\n          126,\n          -46,\n          66,\n          -125,\n          -39,\n          17,\n          66,\n          -118,\n          63,\n          116,\n          66,\n          120,\n          -36,\n          -123,\n          66,\n          122,\n          -92,\n          96,\n          66,\n          -87,\n          -126,\n          -22,\n          66,\n          -98,\n          61,\n          68,\n          66,\n          -102,\n          115,\n          -15,\n          66,\n          -81,\n          53,\n          -70,\n          66,\n          -127,\n          93,\n          -42,\n          66,\n          -83,\n          55,\n          -42,\n          66,\n          -125,\n          -21,\n          47,\n          66,\n          -75,\n          110,\n          73,\n          66,\n          111,\n          33,\n          -66,\n          66,\n          -124,\n          -72,\n          -55,\n          66,\n          -101,\n          12,\n          72,\n          66,\n          76,\n          -24,\n          -89,\n          66,\n          116,\n          -81,\n          -60,\n          66,\n          104,\n          -115,\n          -102,\n          66,\n          89,\n          110,\n          -6,\n          66,\n          116,\n          88,\n          -28,\n          66,\n          -93,\n          44,\n          -64,\n          66,\n          -81,\n          -128,\n          -76,\n          66,\n          -68,\n          -75,\n          117,\n          66,\n          -97,\n          75,\n          -1,\n          66,\n          -79,\n          15,\n          29,\n          66,\n          -83,\n          51,\n          0,\n          66,\n          121,\n          7,\n          66,\n          66,\n          -80,\n          -49,\n          9,\n          66,\n          105,\n          -66,\n          -96,\n          66,\n          -102,\n          103,\n          -87,\n          66,\n          -95,\n          83,\n          83,\n          66,\n          -107,\n          -14,\n          123,\n          66,\n          95,\n          13,\n          -63,\n          66,\n          76,\n          -48,\n          74,\n          66,\n          -63,\n          -41,\n          -2,\n          66,\n          -65,\n          -67,\n          -112,\n          66,\n          -112,\n          52,\n          123,\n          66,\n          -96,\n          -70,\n          -16,\n          66,\n          -75,\n          -70,\n          -55,\n          66,\n          -71,\n          -126,\n          -122,\n          66,\n          -90,\n          26,\n          -49,\n          66,\n          -126,\n          38,\n          32,\n          66,\n          -102,\n          -29,\n          -77,\n          66,\n          112,\n          79,\n          29,\n          66,\n          120,\n          5,\n          14,\n          66,\n          -127,\n          -35,\n          -13,\n          66,\n          107,\n          -29,\n          -92,\n          66,\n          -127,\n          -94,\n          51,\n          66,\n          121,\n          -88,\n          -120,\n          66,\n          -103,\n          76,\n          -72,\n          66,\n          -126,\n          -44,\n          -62,\n          66,\n          -121,\n          -2,\n          118,\n          66,\n          -120,\n          65,\n          -41,\n          66,\n          -100,\n          -97,\n          33,\n          66,\n          -120,\n          -82,\n          36,\n          66,\n          -67,\n          32,\n          -50,\n          66,\n          -94,\n          122,\n          -48,\n          66,\n          -101,\n          -12,\n          50,\n          66,\n          -102,\n          -15,\n          38,\n          66,\n          -117,\n          -37,\n          14,\n          66,\n          -86,\n          -126,\n          71,\n          66,\n          -81,\n          7,\n          -126,\n          66,\n          -116,\n          69,\n          -63,\n          66,\n          -78,\n          -41,\n          -94\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          790626295,\n          679477087,\n          412506246,\n          950297478,\n          705039134,\n          736271007,\n          176986631,\n          138021382,\n          2418\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1041231855,\n          961487323,\n          419388414,\n          203088578,\n          563888667,\n          712115865,\n          449557004,\n          685772805,\n          289\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7146792820767120252,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          233499737,\n          158994418,\n          628395225,\n          752668636,\n          285752461,\n          857045498,\n          516575438,\n          605536352,\n          399910403,\n          134305809,\n          406637502,\n          120699630,\n          764732399,\n          9847357,\n          369440076,\n          274108631,\n          115797896,\n          417072363,\n          258526687,\n          67175843,\n          650792399,\n          234580157,\n          267144489,\n          392867647,\n          38134234,\n          523793265,\n          230238218,\n          859788879,\n          761518583,\n          34225241,\n          392417407,\n          377993091,\n          593385661,\n          314056312,\n          746276621,\n          498382796,\n          402814384,\n          857345902,\n          33761211,\n          13348931,\n          32970897,\n          65301105,\n          7747\n        ],\n        \"cutValueData\": [\n          66,\n          -74,\n          81,\n          69,\n          66,\n          -106,\n          91,\n          98,\n          66,\n          -119,\n          125,\n          113,\n          66,\n          -81,\n          -73,\n          116,\n          66,\n          -118,\n          54,\n          -23,\n          66,\n          94,\n          -117,\n          25,\n          66,\n          -88,\n          4,\n          80,\n          66,\n          123,\n          112,\n          11,\n          66,\n          84,\n          -79,\n          -116,\n          66,\n          -123,\n          61,\n          -42,\n          66,\n          -99,\n          25,\n          -1,\n          66,\n          -97,\n          -41,\n          -98,\n          66,\n          -123,\n          -46,\n          56,\n          66,\n          -100,\n          -74,\n          -90,\n          66,\n          -75,\n          -93,\n          120,\n          66,\n          -77,\n          -83,\n          -70,\n          66,\n          100,\n          99,\n          90,\n          66,\n          -70,\n          95,\n          84,\n          66,\n          90,\n          106,\n          -94,\n          66,\n          81,\n          1,\n          26,\n          66,\n          -106,\n          -41,\n          65,\n          66,\n          126,\n          -124,\n          65,\n          66,\n          118,\n          -75,\n          -97,\n          66,\n          -92,\n          -54,\n          30,\n          66,\n          -92,\n          -106,\n          93,\n          66,\n          94,\n          -81,\n          71,\n          66,\n          108,\n          -66,\n          97,\n          66,\n          -104,\n          39,\n          -106,\n          66,\n          -60,\n          -62,\n          -112,\n          66,\n          -59,\n          -17,\n          -23,\n          66,\n          -119,\n          -74,\n          86,\n          66,\n          -93,\n          -61,\n          -43,\n          66,\n          127,\n          -106,\n          -108,\n          66,\n          -87,\n          124,\n          120,\n          66,\n          -123,\n          60,\n          -106,\n          66,\n          -63,\n          121,\n          94,\n          66,\n          -115,\n          98,\n          47,\n          66,\n          -71,\n          112,\n          80,\n          66,\n          -71,\n          54,\n          -27,\n          66,\n          -91,\n          -127,\n          113,\n          66,\n          -116,\n          39,\n          -73,\n          66,\n          120,\n          87,\n          -96,\n          66,\n          -72,\n          -106,\n          -104,\n          66,\n          -123,\n          -51,\n          105,\n          66,\n          100,\n          -36,\n          110,\n          66,\n          84,\n          -83,\n          60,\n          66,\n          -91,\n          -128,\n          108,\n          66,\n          -106,\n          -16,\n          118,\n          66,\n          -107,\n          -28,\n          -6,\n          66,\n          -92,\n          23,\n          -124,\n          66,\n          -77,\n          114,\n          -46,\n          66,\n          -106,\n          58,\n          109,\n          66,\n          100,\n          -11,\n          -29,\n          66,\n          -86,\n          111,\n          84,\n          66,\n          -115,\n          -77,\n          -39,\n          66,\n          -123,\n          54,\n          70,\n          66,\n          -81,\n          113,\n          60,\n          66,\n          -105,\n          -96,\n          117,\n          66,\n          -80,\n          -3,\n          -105,\n          66,\n          -107,\n          64,\n          59,\n          66,\n          -127,\n          27,\n          -67,\n          66,\n          -87,\n          24,\n          -13,\n          66,\n          115,\n          -54,\n          -125,\n          66,\n          -81,\n          62,\n          21,\n          66,\n          -124,\n          -84,\n          94,\n          66,\n          -98,\n          -117,\n          -93,\n          66,\n          -113,\n          -34,\n          29,\n          66,\n          -100,\n          -48,\n          -6,\n          66,\n          -120,\n          -104,\n          20,\n          66,\n          -118,\n          -90,\n          69,\n          66,\n          127,\n          119,\n          64,\n          66,\n          -112,\n          18,\n          42,\n          66,\n          -121,\n          44,\n          90,\n          66,\n          120,\n          34,\n          -15,\n          66,\n          -91,\n          51,\n          -92,\n          66,\n          -71,\n          -52,\n          18,\n          66,\n          -128,\n          -100,\n          22,\n          66,\n          -77,\n          -31,\n          65,\n          66,\n          -122,\n          -10,\n          16,\n          66,\n          -106,\n          62,\n          19,\n          66,\n          121,\n          104,\n          112,\n          66,\n          -118,\n          76,\n          -102,\n          66,\n          -84,\n          -42,\n          44,\n          66,\n          -103,\n          114,\n          37,\n          66,\n          -82,\n          -105,\n          -54,\n          66,\n          -90,\n          -24,\n          119,\n          66,\n          -127,\n          107,\n          -88,\n          66,\n          -116,\n          127,\n          -67,\n          66,\n          -95,\n          80,\n          112,\n          66,\n          -107,\n          70,\n          27,\n          66,\n          96,\n          25,\n          93,\n          66,\n          -98,\n          -100,\n          78,\n          66,\n          -108,\n          54,\n          -64,\n          66,\n          -108,\n          49,\n          121,\n          66,\n          -126,\n          -78,\n          -87,\n          66,\n          121,\n          -95,\n          92,\n          66,\n          -123,\n          -56,\n          -46,\n          66,\n          -77,\n          -123,\n          -22,\n          66,\n          -121,\n          -86,\n          31,\n          66,\n          96,\n          101,\n          48,\n          66,\n          -73,\n          122,\n          46,\n          66,\n          97,\n          1,\n          0,\n          66,\n          -109,\n          -87,\n          -19,\n          66,\n          -72,\n          3,\n          -86,\n          66,\n          -108,\n          -64,\n          85,\n          66,\n          107,\n          -84,\n          3,\n          66,\n          -126,\n          -13,\n          -22,\n          66,\n          -124,\n          85,\n          42,\n          66,\n          117,\n          -2,\n          90,\n          66,\n          -100,\n          -47,\n          -25,\n          66,\n          -94,\n          -21,\n          108,\n          66,\n          -100,\n          28,\n          8,\n          66,\n          -84,\n          -84,\n          -19,\n          66,\n          -101,\n          -55,\n          65,\n          66,\n          -89,\n          -117,\n          120,\n          66,\n          -72,\n          72,\n          -6,\n          66,\n          -94,\n          -120,\n          26,\n          66,\n          109,\n          24,\n          61,\n          66,\n          -114,\n          29,\n          10,\n          66,\n          121,\n          65,\n          -43,\n          66,\n          -109,\n          62,\n          27,\n          66,\n          -105,\n          -89,\n          -31,\n          66,\n          125,\n          114,\n          116,\n          66,\n          -107,\n          60,\n          -7,\n          66,\n          -81,\n          118,\n          -89,\n          66,\n          -68,\n          10,\n          -123,\n          66,\n          97,\n          -63,\n          104,\n          66,\n          -126,\n          1,\n          66,\n          66,\n          97,\n          103,\n          95,\n          66,\n          -63,\n          -119,\n          -4,\n          66,\n          -89,\n          -90,\n          -54,\n          66,\n          -108,\n          -112,\n          -41,\n          66,\n          -109,\n          72,\n          80,\n          66,\n          -91,\n          -6,\n          2,\n          66,\n          -74,\n          -103,\n          58,\n          66,\n          -92,\n          25,\n          19,\n          66,\n          87,\n          -78,\n          8,\n          66,\n          90,\n          -41,\n          -109,\n          66,\n          -114,\n          -67,\n          -98,\n          66,\n          -126,\n          116,\n          -71,\n          66,\n          -86,\n          16,\n          117,\n          66,\n          -80,\n          -12,\n          -67,\n          66,\n          -114,\n          -79,\n          -93,\n          66,\n          -111,\n          5,\n          -104,\n          66,\n          -96,\n          44,\n          24,\n          66,\n          -65,\n          -1,\n          -75,\n          66,\n          119,\n          81,\n          -57,\n          66,\n          -84,\n          53,\n          100,\n          66,\n          -70,\n          -84,\n          58,\n          66,\n          119,\n          106,\n          123,\n          66,\n          85,\n          -29,\n          -26,\n          66,\n          -94,\n          17,\n          68,\n          66,\n          -97,\n          -61,\n          -108,\n          66,\n          -72,\n          8,\n          7,\n          66,\n          -113,\n          -38,\n          78,\n          66,\n          -100,\n          -119,\n          40,\n          66,\n          -108,\n          -15,\n          -117,\n          66,\n          -95,\n          -14,\n          -48,\n          66,\n          83,\n          -32,\n          -64,\n          66,\n          120,\n          50,\n          -122,\n          66,\n          -115,\n          48,\n          -46,\n          66,\n          -127,\n          -69,\n          37,\n          66,\n          -83,\n          -72,\n          -56,\n          66,\n          -73,\n          99,\n          -58,\n          66,\n          -90,\n          -85,\n          105,\n          66,\n          -116,\n          -90,\n          -12,\n          66,\n          87,\n          -123,\n          -81,\n          66,\n          -120,\n          -80,\n          35,\n          66,\n          -80,\n          -61,\n          28,\n          66,\n          123,\n          36,\n          -128,\n          66,\n          -97,\n          -47,\n          47,\n          66,\n          92,\n          -65,\n          105,\n          66,\n          -94,\n          -95,\n          -23,\n          66,\n          -102,\n          -56,\n          114,\n          66,\n          -108,\n          -21,\n          23,\n          66,\n          -128,\n          61,\n          127,\n          66,\n          -78,\n          20,\n          14,\n          66,\n          -102,\n          -68,\n          -27,\n          66,\n          112,\n          -97,\n          -3,\n          66,\n          -110,\n          5,\n          -99,\n          66,\n          -115,\n          78,\n          -74,\n          66,\n          -62,\n          -61,\n          95,\n          66,\n          -118,\n          113,\n          -95,\n          66,\n          -101,\n          -32,\n          105,\n          66,\n          86,\n          -115,\n          81,\n          66,\n          -126,\n          59,\n          101,\n          66,\n          81,\n          72,\n          -97,\n          66,\n          -110,\n          -79,\n          -88,\n          66,\n          -70,\n          -38,\n          123,\n          66,\n          -113,\n          37,\n          6,\n          66,\n          -105,\n          48,\n          -51,\n          66,\n          -106,\n          22,\n          102,\n          66,\n          -124,\n          -48,\n          -25,\n          66,\n          -109,\n          -100,\n          53,\n          66,\n          -100,\n          19,\n          53,\n          66,\n          -106,\n          97,\n          -107,\n          66,\n          -118,\n          126,\n          -80,\n          66,\n          -110,\n          -127,\n          -23,\n          66,\n          -117,\n          107,\n          -34,\n          66,\n          -99,\n          96,\n          -106,\n          66,\n          126,\n          121,\n          -1,\n          66,\n          -91,\n          -38,\n          -1,\n          66,\n          -85,\n          86,\n          -39,\n          66,\n          -103,\n          10,\n          -115,\n          66,\n          100,\n          -53,\n          -2,\n          66,\n          -111,\n          -113,\n          -70,\n          66,\n          -112,\n          112,\n          -59,\n          66,\n          -118,\n          -25,\n          75,\n          66,\n          -105,\n          120,\n          -28,\n          66,\n          76,\n          -97,\n          107,\n          66,\n          -108,\n          -68,\n          -47,\n          66,\n          -113,\n          -88,\n          25,\n          66,\n          118,\n          46,\n          -3,\n          66,\n          -68,\n          97,\n          -94,\n          66,\n          118,\n          31,\n          77,\n          66,\n          115,\n          98,\n          35,\n          66,\n          -101,\n          -87,\n          81,\n          66,\n          -83,\n          -125,\n          95,\n          66,\n          -75,\n          17,\n          -42,\n          66,\n          112,\n          66,\n          -79,\n          66,\n          122,\n          -51,\n          -95,\n          66,\n          -71,\n          100,\n          17,\n          66,\n          120,\n          127,\n          -31,\n          66,\n          -106,\n          96,\n          100,\n          66,\n          -89,\n          54,\n          56,\n          66,\n          -119,\n          42,\n          -111,\n          66,\n          -107,\n          57,\n          79,\n          66,\n          108,\n          120,\n          -101,\n          66,\n          121,\n          -4,\n          56,\n          66,\n          -87,\n          -100,\n          -29,\n          66,\n          -127,\n          9,\n          -18,\n          66,\n          -106,\n          -61,\n          122,\n          66,\n          -109,\n          -33,\n          127,\n          66,\n          -99,\n          46,\n          -117,\n          66,\n          -125,\n          -76,\n          75,\n          66,\n          126,\n          73,\n          -26,\n          66,\n          -127,\n          -63,\n          87,\n          66,\n          -120,\n          -16,\n          -90,\n          66,\n          111,\n          82,\n          60,\n          66,\n          -116,\n          102,\n          -56,\n          66,\n          106,\n          75,\n          -80,\n          66,\n          -66,\n          15,\n          64,\n          66,\n          98,\n          38,\n          73,\n          66,\n          109,\n          37,\n          -108,\n          66,\n          -126,\n          74,\n          -40,\n          66,\n          98,\n          99,\n          74,\n          66,\n          -98,\n          -67,\n          -1,\n          66,\n          -81,\n          17,\n          -34,\n          66,\n          -128,\n          84,\n          -15,\n          66,\n          -92,\n          -33,\n          -119,\n          66,\n          118,\n          -26,\n          115,\n          66,\n          -116,\n          -115,\n          -5,\n          66,\n          -125,\n          -5,\n          -122,\n          66,\n          -120,\n          29,\n          99,\n          66,\n          -82,\n          64,\n          -30\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1048307455,\n          905133887,\n          557349561,\n          356481149,\n          997778536,\n          38684738,\n          359091074,\n          25198865,\n          4248\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          243250943,\n          98823801,\n          825786037,\n          3135615,\n          808137098,\n          312601984,\n          93592132,\n          545771575,\n          4124\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6408488327328865688,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1031133617,\n          338291887,\n          1066871715,\n          201039099,\n          45476909,\n          366836685,\n          896216501,\n          1029793774,\n          896185561,\n          175111619,\n          590053042,\n          251432098,\n          761771375,\n          111523962,\n          912841061,\n          393793337,\n          1068963422,\n          182285743,\n          303728743,\n          69990621,\n          919905963,\n          912324141,\n          1033631475,\n          314153517,\n          977255359,\n          127203582,\n          863567834,\n          747682889,\n          366464319,\n          652791394,\n          498297426,\n          576788985,\n          919709037,\n          256986938,\n          370448213,\n          783362081,\n          268396485,\n          714987482,\n          789636803,\n          585188839,\n          207440957,\n          884976566,\n          1017\n        ],\n        \"cutValueData\": [\n          66,\n          -77,\n          15,\n          -76,\n          66,\n          125,\n          86,\n          92,\n          66,\n          -108,\n          -92,\n          -20,\n          66,\n          -72,\n          -78,\n          -29,\n          66,\n          108,\n          -83,\n          -35,\n          66,\n          -112,\n          -84,\n          -28,\n          66,\n          -80,\n          61,\n          -97,\n          66,\n          -115,\n          -38,\n          101,\n          66,\n          76,\n          -107,\n          -108,\n          66,\n          105,\n          108,\n          -4,\n          66,\n          -120,\n          -31,\n          -65,\n          66,\n          -68,\n          -59,\n          -115,\n          66,\n          -91,\n          44,\n          -66,\n          66,\n          90,\n          4,\n          -56,\n          66,\n          -97,\n          104,\n          122,\n          66,\n          -117,\n          19,\n          -91,\n          66,\n          101,\n          -5,\n          -102,\n          66,\n          -117,\n          5,\n          33,\n          66,\n          125,\n          76,\n          81,\n          66,\n          -89,\n          -128,\n          11,\n          66,\n          -106,\n          35,\n          -56,\n          66,\n          116,\n          123,\n          -10,\n          66,\n          115,\n          -79,\n          -79,\n          66,\n          78,\n          72,\n          -32,\n          66,\n          -116,\n          -49,\n          -37,\n          66,\n          -93,\n          -30,\n          -68,\n          66,\n          -118,\n          127,\n          -107,\n          66,\n          -127,\n          -121,\n          -15,\n          66,\n          -99,\n          87,\n          89,\n          66,\n          -118,\n          4,\n          -104,\n          66,\n          84,\n          71,\n          42,\n          66,\n          -90,\n          13,\n          3,\n          66,\n          96,\n          110,\n          93,\n          66,\n          111,\n          -117,\n          -120,\n          66,\n          -118,\n          4,\n          75,\n          66,\n          114,\n          127,\n          114,\n          66,\n          91,\n          81,\n          4,\n          66,\n          -119,\n          -20,\n          5,\n          66,\n          -109,\n          -102,\n          -6,\n          66,\n          -88,\n          -85,\n          19,\n          66,\n          -89,\n          112,\n          -27,\n          66,\n          -83,\n          75,\n          12,\n          66,\n          80,\n          -47,\n          16,\n          66,\n          -125,\n          5,\n          114,\n          66,\n          -114,\n          -96,\n          -35,\n          66,\n          -99,\n          -86,\n          7,\n          66,\n          -85,\n          99,\n          -67,\n          66,\n          -91,\n          126,\n          -21,\n          66,\n          -76,\n          -57,\n          -45,\n          66,\n          -121,\n          19,\n          -13,\n          66,\n          99,\n          21,\n          -61,\n          66,\n          94,\n          27,\n          -62,\n          66,\n          82,\n          48,\n          105,\n          66,\n          -121,\n          42,\n          120,\n          66,\n          -116,\n          -38,\n          48,\n          66,\n          90,\n          95,\n          -19,\n          66,\n          -116,\n          84,\n          -98,\n          66,\n          -71,\n          102,\n          -76,\n          66,\n          -118,\n          -127,\n          86,\n          66,\n          101,\n          89,\n          -48,\n          66,\n          -124,\n          19,\n          106,\n          66,\n          -128,\n          69,\n          -104,\n          66,\n          -74,\n          12,\n          7,\n          66,\n          101,\n          -95,\n          30,\n          66,\n          -121,\n          59,\n          11,\n          66,\n          124,\n          95,\n          91,\n          66,\n          -127,\n          -53,\n          -56,\n          66,\n          -67,\n          -58,\n          65,\n          66,\n          -93,\n          -1,\n          24,\n          66,\n          -78,\n          -128,\n          106,\n          66,\n          -78,\n          109,\n          -100,\n          66,\n          125,\n          11,\n          -68,\n          66,\n          -126,\n          -114,\n          63,\n          66,\n          -102,\n          -99,\n          -98,\n          66,\n          -112,\n          85,\n          72,\n          66,\n          -86,\n          -78,\n          -127,\n          66,\n          -70,\n          -18,\n          47,\n          66,\n          -105,\n          -91,\n          -31,\n          66,\n          -81,\n          97,\n          -37,\n          66,\n          -91,\n          -125,\n          -106,\n          66,\n          124,\n          -75,\n          -26,\n          66,\n          -107,\n          92,\n          -120,\n          66,\n          -102,\n          19,\n          127,\n          66,\n          -85,\n          112,\n          15,\n          66,\n          -111,\n          37,\n          45,\n          66,\n          -96,\n          -21,\n          77,\n          66,\n          -122,\n          -23,\n          90,\n          66,\n          -107,\n          -23,\n          28,\n          66,\n          -111,\n          65,\n          -49,\n          66,\n          -103,\n          21,\n          -9,\n          66,\n          -87,\n          -65,\n          -48,\n          66,\n          -101,\n          -76,\n          41,\n          66,\n          -80,\n          -54,\n          125,\n          66,\n          -96,\n          -61,\n          -24,\n          66,\n          -102,\n          99,\n          45,\n          66,\n          -95,\n          -102,\n          29,\n          66,\n          -95,\n          -4,\n          -33,\n          66,\n          -123,\n          52,\n          123,\n          66,\n          -94,\n          -18,\n          52,\n          66,\n          -78,\n          59,\n          -37,\n          66,\n          -86,\n          9,\n          -125,\n          66,\n          -91,\n          41,\n          -77,\n          66,\n          -115,\n          -103,\n          -62,\n          66,\n          -84,\n          43,\n          -4,\n          66,\n          86,\n          -125,\n          -127,\n          66,\n          -105,\n          93,\n          -74,\n          66,\n          -112,\n          -30,\n          71,\n          66,\n          -122,\n          -24,\n          -95,\n          66,\n          -90,\n          -4,\n          -112,\n          66,\n          -120,\n          -77,\n          -104,\n          66,\n          -103,\n          89,\n          -39,\n          66,\n          -75,\n          -113,\n          113,\n          66,\n          98,\n          -50,\n          29,\n          66,\n          -123,\n          -117,\n          -103,\n          66,\n          -100,\n          96,\n          -5,\n          66,\n          -114,\n          99,\n          -25,\n          66,\n          -96,\n          76,\n          -34,\n          66,\n          -84,\n          19,\n          -11,\n          66,\n          102,\n          -102,\n          -1,\n          66,\n          -91,\n          78,\n          102,\n          66,\n          -113,\n          -89,\n          -87,\n          66,\n          98,\n          39,\n          114,\n          66,\n          -100,\n          36,\n          -86,\n          66,\n          97,\n          -53,\n          18,\n          66,\n          109,\n          18,\n          -75,\n          66,\n          -116,\n          -20,\n          45,\n          66,\n          -113,\n          -51,\n          52,\n          66,\n          113,\n          -81,\n          -105,\n          66,\n          127,\n          41,\n          -39,\n          66,\n          -87,\n          -124,\n          74,\n          66,\n          -117,\n          -68,\n          -18,\n          66,\n          -116,\n          36,\n          65,\n          66,\n          -116,\n          98,\n          -58,\n          66,\n          -83,\n          126,\n          29,\n          66,\n          101,\n          81,\n          -77,\n          66,\n          119,\n          69,\n          -7,\n          66,\n          100,\n          72,\n          -99,\n          66,\n          88,\n          44,\n          -4,\n          66,\n          -97,\n          -93,\n          -36,\n          66,\n          -117,\n          -111,\n          -71,\n          66,\n          111,\n          79,\n          100,\n          66,\n          96,\n          39,\n          1,\n          66,\n          -90,\n          -115,\n          23,\n          66,\n          121,\n          19,\n          -64,\n          66,\n          -114,\n          63,\n          53,\n          66,\n          -110,\n          52,\n          -78,\n          66,\n          -119,\n          -17,\n          109,\n          66,\n          -110,\n          -59,\n          88,\n          66,\n          120,\n          -42,\n          90,\n          66,\n          -110,\n          76,\n          -26,\n          66,\n          -65,\n          -97,\n          -50,\n          66,\n          88,\n          50,\n          -100,\n          66,\n          101,\n          18,\n          -107,\n          66,\n          -113,\n          -56,\n          125,\n          66,\n          -96,\n          -17,\n          -123,\n          66,\n          -88,\n          -109,\n          51,\n          66,\n          106,\n          38,\n          -101,\n          66,\n          -101,\n          -72,\n          98,\n          66,\n          -83,\n          65,\n          16,\n          66,\n          -100,\n          63,\n          -40,\n          66,\n          90,\n          114,\n          122,\n          66,\n          -96,\n          -32,\n          67,\n          66,\n          -105,\n          -14,\n          11,\n          66,\n          116,\n          19,\n          8,\n          66,\n          -112,\n          -77,\n          36,\n          66,\n          127,\n          123,\n          31,\n          66,\n          96,\n          75,\n          106,\n          66,\n          -115,\n          71,\n          -15,\n          66,\n          -70,\n          47,\n          -38,\n          66,\n          114,\n          -125,\n          -1,\n          66,\n          -125,\n          -24,\n          -117,\n          66,\n          -117,\n          -53,\n          89,\n          66,\n          -97,\n          -39,\n          -97,\n          66,\n          -91,\n          125,\n          -9,\n          66,\n          -87,\n          -57,\n          -10,\n          66,\n          -112,\n          87,\n          -113,\n          66,\n          -113,\n          -41,\n          25,\n          66,\n          -96,\n          -69,\n          -1,\n          66,\n          -97,\n          31,\n          -118,\n          66,\n          -88,\n          -56,\n          54,\n          66,\n          -114,\n          -101,\n          112,\n          66,\n          -98,\n          72,\n          25,\n          66,\n          -98,\n          -18,\n          28,\n          66,\n          106,\n          39,\n          73,\n          66,\n          -113,\n          -76,\n          17,\n          66,\n          103,\n          -90,\n          -79,\n          66,\n          -124,\n          17,\n          34,\n          66,\n          68,\n          75,\n          -15,\n          66,\n          122,\n          -92,\n          -122,\n          66,\n          -67,\n          98,\n          30,\n          66,\n          -91,\n          69,\n          -19,\n          66,\n          102,\n          -49,\n          42,\n          66,\n          -75,\n          -116,\n          -124,\n          66,\n          -125,\n          -90,\n          19,\n          66,\n          -100,\n          97,\n          79,\n          66,\n          109,\n          -67,\n          -59,\n          66,\n          -108,\n          -46,\n          9,\n          66,\n          -96,\n          -40,\n          83,\n          66,\n          -105,\n          81,\n          123,\n          66,\n          -110,\n          -94,\n          96,\n          66,\n          -127,\n          -109,\n          -94,\n          66,\n          -113,\n          -110,\n          -65,\n          66,\n          -121,\n          127,\n          49,\n          66,\n          111,\n          -24,\n          55,\n          66,\n          -76,\n          -98,\n          50,\n          66,\n          112,\n          -4,\n          29,\n          66,\n          -74,\n          110,\n          -127,\n          66,\n          -115,\n          59,\n          14,\n          66,\n          116,\n          56,\n          3,\n          66,\n          -95,\n          36,\n          -59,\n          66,\n          81,\n          -73,\n          48,\n          66,\n          90,\n          -12,\n          125,\n          66,\n          97,\n          -48,\n          -17,\n          66,\n          -62,\n          -111,\n          76,\n          66,\n          84,\n          -115,\n          91,\n          66,\n          -89,\n          112,\n          -19,\n          66,\n          -102,\n          87,\n          -114,\n          66,\n          -123,\n          -25,\n          121,\n          66,\n          89,\n          120,\n          86,\n          66,\n          125,\n          -35,\n          -113,\n          66,\n          110,\n          -123,\n          -105,\n          66,\n          118,\n          -38,\n          -18,\n          66,\n          -73,\n          61,\n          -63,\n          66,\n          -76,\n          -45,\n          39,\n          66,\n          125,\n          -103,\n          -114,\n          66,\n          -113,\n          -74,\n          124,\n          66,\n          -111,\n          -34,\n          58,\n          66,\n          -69,\n          -36,\n          -61,\n          66,\n          -83,\n          -89,\n          67,\n          66,\n          118,\n          -89,\n          -8,\n          66,\n          -91,\n          26,\n          -34,\n          66,\n          -89,\n          -13,\n          103,\n          66,\n          -120,\n          61,\n          -56,\n          66,\n          -65,\n          89,\n          -57,\n          66,\n          115,\n          88,\n          31,\n          66,\n          -112,\n          94,\n          -69,\n          66,\n          102,\n          -77,\n          -64,\n          66,\n          -114,\n          -91,\n          -106,\n          66,\n          -69,\n          -5,\n          21,\n          66,\n          -98,\n          -79,\n          -83,\n          66,\n          -66,\n          85,\n          -5,\n          66,\n          -107,\n          -90,\n          69,\n          66,\n          93,\n          -68,\n          58,\n          66,\n          -94,\n          -118,\n          -42,\n          66,\n          122,\n          -56,\n          -58,\n          66,\n          -71,\n          -100,\n          94,\n          66,\n          -70,\n          -117,\n          -34,\n          66,\n          -86,\n          -120,\n          -83,\n          66,\n          -114,\n          -115,\n          65,\n          66,\n          -122,\n          39,\n          -37,\n          66,\n          -118,\n          127,\n          -43,\n          66,\n          -104,\n          -36,\n          18,\n          66,\n          122,\n          62,\n          88,\n          66,\n          -105,\n          -38,\n          -123,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162254905,\n          1160646650,\n          1026664226,\n          1013988659,\n          645103511,\n          975548491,\n          1026012500,\n          710883986,\n          767669444,\n          724649296,\n          581197193,\n          639058531,\n          731003657,\n          1174\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1018595249,\n          1162253933,\n          645619136,\n          760429858,\n          595567880,\n          1011678901,\n          1012188524,\n          582970849,\n          724797059,\n          630575401,\n          753554141,\n          1155154837,\n          625975168,\n          1120\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 8378244044999309591,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          628773447,\n          523117802,\n          370389849,\n          517791936,\n          26743225,\n          857227540,\n          253775774,\n          411967183,\n          614660847,\n          543272654,\n          119639168,\n          600774267,\n          490466084,\n          257461899,\n          865254068,\n          266169102,\n          194334916,\n          758179408,\n          114518120,\n          628919913,\n          605158240,\n          36659517,\n          310705840,\n          846445514,\n          188402781,\n          816839279,\n          249904951,\n          356210919,\n          76512078,\n          427778890,\n          123316813,\n          266176825,\n          509054253,\n          491918261,\n          828330153,\n          276432827,\n          710019274,\n          127807641,\n          147226708,\n          13057635,\n          842276530,\n          295266369,\n          1939\n        ],\n        \"cutValueData\": [\n          66,\n          -77,\n          87,\n          -48,\n          66,\n          -109,\n          -66,\n          37,\n          66,\n          -117,\n          27,\n          72,\n          66,\n          122,\n          46,\n          -123,\n          66,\n          -59,\n          98,\n          -22,\n          66,\n          -93,\n          -64,\n          100,\n          66,\n          -111,\n          24,\n          -71,\n          66,\n          -121,\n          -64,\n          123,\n          66,\n          -116,\n          22,\n          34,\n          66,\n          -106,\n          -46,\n          2,\n          66,\n          -119,\n          -100,\n          64,\n          66,\n          -102,\n          -71,\n          -28,\n          66,\n          -63,\n          -40,\n          -23,\n          66,\n          -114,\n          42,\n          -83,\n          66,\n          -71,\n          -27,\n          97,\n          66,\n          122,\n          -78,\n          77,\n          66,\n          -105,\n          -31,\n          22,\n          66,\n          -124,\n          -116,\n          -29,\n          66,\n          -101,\n          123,\n          -59,\n          66,\n          -119,\n          17,\n          82,\n          66,\n          -74,\n          -72,\n          -109,\n          66,\n          -110,\n          121,\n          -126,\n          66,\n          121,\n          56,\n          -115,\n          66,\n          -93,\n          -19,\n          -88,\n          66,\n          -78,\n          -95,\n          -108,\n          66,\n          -61,\n          -31,\n          14,\n          66,\n          -116,\n          -103,\n          -34,\n          66,\n          -68,\n          101,\n          90,\n          66,\n          -96,\n          61,\n          -32,\n          66,\n          -84,\n          -53,\n          59,\n          66,\n          81,\n          25,\n          -46,\n          66,\n          -115,\n          -87,\n          -123,\n          66,\n          -91,\n          21,\n          -105,\n          66,\n          -106,\n          -127,\n          51,\n          66,\n          -73,\n          -4,\n          -91,\n          66,\n          114,\n          -64,\n          28,\n          66,\n          109,\n          21,\n          -104,\n          66,\n          -121,\n          -36,\n          -74,\n          66,\n          -88,\n          -79,\n          106,\n          66,\n          -107,\n          -54,\n          110,\n          66,\n          86,\n          87,\n          -80,\n          66,\n          -81,\n          102,\n          -35,\n          66,\n          -83,\n          -123,\n          -39,\n          66,\n          -100,\n          8,\n          109,\n          66,\n          103,\n          -5,\n          -117,\n          66,\n          -95,\n          -68,\n          -61,\n          66,\n          -101,\n          5,\n          43,\n          66,\n          112,\n          -29,\n          54,\n          66,\n          100,\n          19,\n          -66,\n          66,\n          78,\n          71,\n          -114,\n          66,\n          -93,\n          35,\n          -21,\n          66,\n          -80,\n          -21,\n          77,\n          66,\n          -114,\n          -96,\n          -7,\n          66,\n          -94,\n          -22,\n          -30,\n          66,\n          108,\n          41,\n          -85,\n          66,\n          116,\n          26,\n          -112,\n          66,\n          105,\n          -68,\n          -15,\n          66,\n          -81,\n          60,\n          -13,\n          66,\n          -103,\n          -112,\n          48,\n          66,\n          80,\n          -5,\n          -106,\n          66,\n          -118,\n          29,\n          -111,\n          66,\n          73,\n          56,\n          -65,\n          66,\n          -126,\n          56,\n          50,\n          66,\n          -103,\n          -96,\n          5,\n          66,\n          -104,\n          -15,\n          38,\n          66,\n          -122,\n          109,\n          13,\n          66,\n          96,\n          110,\n          -91,\n          66,\n          -71,\n          -88,\n          58,\n          66,\n          -107,\n          -46,\n          -88,\n          66,\n          -89,\n          -111,\n          88,\n          66,\n          -120,\n          -110,\n          -11,\n          66,\n          -106,\n          -79,\n          -27,\n          66,\n          -124,\n          -115,\n          -48,\n          66,\n          -96,\n          82,\n          -90,\n          66,\n          -107,\n          1,\n          -118,\n          66,\n          111,\n          108,\n          -107,\n          66,\n          -106,\n          47,\n          -94,\n          66,\n          -75,\n          -96,\n          -46,\n          66,\n          -109,\n          41,\n          84,\n          66,\n          -101,\n          29,\n          -46,\n          66,\n          -99,\n          82,\n          37,\n          66,\n          115,\n          -9,\n          -29,\n          66,\n          -128,\n          63,\n          -72,\n          66,\n          -68,\n          -22,\n          -103,\n          66,\n          120,\n          -94,\n          -3,\n          66,\n          108,\n          -125,\n          74,\n          66,\n          -93,\n          79,\n          -107,\n          66,\n          -102,\n          -77,\n          -21,\n          66,\n          -120,\n          107,\n          13,\n          66,\n          -89,\n          -10,\n          83,\n          66,\n          98,\n          -26,\n          -86,\n          66,\n          -118,\n          -110,\n          113,\n          66,\n          -72,\n          62,\n          65,\n          66,\n          -100,\n          33,\n          -121,\n          66,\n          -127,\n          -119,\n          -57,\n          66,\n          -127,\n          12,\n          -103,\n          66,\n          -103,\n          -88,\n          -99,\n          66,\n          74,\n          -60,\n          79,\n          66,\n          -102,\n          -6,\n          -12,\n          66,\n          78,\n          -113,\n          68,\n          66,\n          -95,\n          69,\n          -128,\n          66,\n          -120,\n          15,\n          -2,\n          66,\n          125,\n          -125,\n          -18,\n          66,\n          -99,\n          -40,\n          -6,\n          66,\n          91,\n          105,\n          -85,\n          66,\n          -74,\n          41,\n          67,\n          66,\n          -122,\n          87,\n          107,\n          66,\n          109,\n          -79,\n          117,\n          66,\n          -99,\n          18,\n          -56,\n          66,\n          105,\n          126,\n          67,\n          66,\n          -86,\n          -109,\n          13,\n          66,\n          -75,\n          76,\n          94,\n          66,\n          -117,\n          47,\n          -59,\n          66,\n          121,\n          44,\n          113,\n          66,\n          -109,\n          21,\n          57,\n          66,\n          -103,\n          -73,\n          -128,\n          66,\n          107,\n          57,\n          81,\n          66,\n          -74,\n          -10,\n          59,\n          66,\n          -111,\n          56,\n          77,\n          66,\n          -99,\n          37,\n          -102,\n          66,\n          -85,\n          -43,\n          -101,\n          66,\n          110,\n          -49,\n          63,\n          66,\n          84,\n          41,\n          -48,\n          66,\n          -81,\n          -69,\n          -67,\n          66,\n          -88,\n          95,\n          -89,\n          66,\n          84,\n          -95,\n          100,\n          66,\n          -123,\n          92,\n          -99,\n          66,\n          -119,\n          -75,\n          5,\n          66,\n          -101,\n          86,\n          97,\n          66,\n          -70,\n          13,\n          -112,\n          66,\n          -94,\n          -10,\n          -6,\n          66,\n          -115,\n          60,\n          43,\n          66,\n          -114,\n          12,\n          87,\n          66,\n          84,\n          1,\n          75,\n          66,\n          -104,\n          -62,\n          -64,\n          66,\n          125,\n          -18,\n          46,\n          66,\n          -93,\n          65,\n          79,\n          66,\n          -100,\n          -8,\n          92,\n          66,\n          -90,\n          -65,\n          -66,\n          66,\n          -123,\n          91,\n          -87,\n          66,\n          -64,\n          -114,\n          51,\n          66,\n          110,\n          -18,\n          -48,\n          66,\n          115,\n          32,\n          122,\n          66,\n          -92,\n          92,\n          -58,\n          66,\n          -98,\n          88,\n          14,\n          66,\n          -86,\n          113,\n          84,\n          66,\n          -97,\n          -75,\n          38,\n          66,\n          -94,\n          -48,\n          -44,\n          66,\n          -101,\n          24,\n          83,\n          66,\n          -94,\n          -114,\n          -57,\n          66,\n          -85,\n          126,\n          39,\n          66,\n          -126,\n          -97,\n          99,\n          66,\n          -95,\n          -28,\n          46,\n          66,\n          -58,\n          -35,\n          41,\n          66,\n          -118,\n          106,\n          25,\n          66,\n          -91,\n          5,\n          64,\n          66,\n          -94,\n          -3,\n          9,\n          66,\n          87,\n          -17,\n          -93,\n          66,\n          -109,\n          -56,\n          57,\n          66,\n          111,\n          37,\n          -34,\n          66,\n          -108,\n          24,\n          43,\n          66,\n          -67,\n          6,\n          80,\n          66,\n          96,\n          45,\n          -94,\n          66,\n          108,\n          51,\n          59,\n          66,\n          99,\n          -51,\n          15,\n          66,\n          -103,\n          -84,\n          -69,\n          66,\n          -127,\n          93,\n          35,\n          66,\n          -95,\n          112,\n          95,\n          66,\n          -88,\n          84,\n          124,\n          66,\n          -68,\n          -99,\n          100,\n          66,\n          -86,\n          110,\n          -38,\n          66,\n          -83,\n          0,\n          -17,\n          66,\n          -115,\n          -13,\n          38,\n          66,\n          102,\n          94,\n          2,\n          66,\n          99,\n          102,\n          7,\n          66,\n          -65,\n          74,\n          -5,\n          66,\n          88,\n          -22,\n          47,\n          66,\n          -74,\n          2,\n          75,\n          66,\n          -62,\n          95,\n          103,\n          66,\n          -113,\n          -90,\n          -5,\n          66,\n          101,\n          -85,\n          117,\n          66,\n          -104,\n          -107,\n          -26,\n          66,\n          -81,\n          13,\n          -99,\n          66,\n          -94,\n          -38,\n          75,\n          66,\n          93,\n          -126,\n          -33,\n          66,\n          -84,\n          -63,\n          -84,\n          66,\n          -81,\n          -8,\n          47,\n          66,\n          -93,\n          108,\n          -11,\n          66,\n          -94,\n          -113,\n          116,\n          66,\n          -75,\n          -121,\n          44,\n          66,\n          -125,\n          37,\n          104,\n          66,\n          122,\n          -97,\n          47,\n          66,\n          110,\n          94,\n          97,\n          66,\n          -124,\n          96,\n          97,\n          66,\n          93,\n          49,\n          87,\n          66,\n          -124,\n          -75,\n          27,\n          66,\n          -125,\n          -4,\n          -120,\n          66,\n          -122,\n          -102,\n          35,\n          66,\n          70,\n          44,\n          -19,\n          66,\n          -128,\n          47,\n          -89,\n          66,\n          -65,\n          7,\n          76,\n          66,\n          -122,\n          -48,\n          -55,\n          66,\n          -92,\n          -53,\n          96,\n          66,\n          -81,\n          -36,\n          -89,\n          66,\n          -122,\n          120,\n          -72,\n          66,\n          -113,\n          -120,\n          -102,\n          66,\n          -118,\n          -128,\n          -65,\n          66,\n          96,\n          104,\n          -92,\n          66,\n          -120,\n          -14,\n          127,\n          66,\n          -107,\n          -44,\n          -74,\n          66,\n          -107,\n          96,\n          9,\n          66,\n          -124,\n          -5,\n          104,\n          66,\n          -108,\n          81,\n          -86,\n          66,\n          -101,\n          -102,\n          0,\n          66,\n          -113,\n          -94,\n          -97,\n          66,\n          -94,\n          -25,\n          -113,\n          66,\n          -95,\n          -108,\n          120,\n          66,\n          -79,\n          -35,\n          35,\n          66,\n          -69,\n          43,\n          33,\n          66,\n          -89,\n          44,\n          -80,\n          66,\n          121,\n          -22,\n          -64,\n          66,\n          107,\n          107,\n          109,\n          66,\n          -127,\n          -32,\n          113,\n          66,\n          -96,\n          -61,\n          -92,\n          66,\n          -90,\n          34,\n          -105,\n          66,\n          -118,\n          33,\n          112,\n          66,\n          110,\n          48,\n          -42,\n          66,\n          -95,\n          57,\n          122,\n          66,\n          76,\n          90,\n          -48,\n          66,\n          -76,\n          -62,\n          -46,\n          66,\n          126,\n          -90,\n          -99,\n          66,\n          82,\n          98,\n          -79,\n          66,\n          96,\n          -24,\n          -52,\n          66,\n          82,\n          -2,\n          34,\n          66,\n          -106,\n          -86,\n          77,\n          66,\n          -128,\n          107,\n          54,\n          66,\n          -118,\n          111,\n          -94,\n          66,\n          -120,\n          34,\n          27,\n          66,\n          87,\n          -35,\n          -65,\n          66,\n          -91,\n          55,\n          120,\n          66,\n          -80,\n          124,\n          119,\n          66,\n          -101,\n          104,\n          55,\n          66,\n          -95,\n          -115,\n          -61,\n          66,\n          -98,\n          7,\n          -32,\n          66,\n          109,\n          -19,\n          -86,\n          66,\n          -95,\n          -16,\n          -85,\n          66,\n          -113,\n          -122,\n          28,\n          66,\n          93,\n          118,\n          -118,\n          66,\n          90,\n          -20,\n          78,\n          66,\n          90,\n          65,\n          84,\n          66,\n          -72,\n          -15,\n          -28,\n          66,\n          117,\n          -110,\n          -20,\n          66,\n          -106,\n          53,\n          -9,\n          66,\n          118,\n          -19,\n          -5,\n          66,\n          -108,\n          113,\n          -75\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          935327743,\n          199668476,\n          818660543,\n          976326301,\n          34955939,\n          316812057,\n          612405004,\n          738726841,\n          36\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          904916975,\n          1072630527,\n          309256703,\n          576437929,\n          538206497,\n          25306769,\n          296010768,\n          364118708,\n          6444\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -4725511223573268555,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          723406712,\n          463420568,\n          836352260,\n          142147135,\n          391259158,\n          742252631,\n          262160029,\n          528972865,\n          114759685,\n          174013496,\n          769258,\n          184775505,\n          47955563,\n          48818671,\n          803157143,\n          854078044,\n          169185932,\n          126833083,\n          169765394,\n          177023887,\n          382033702,\n          534393311,\n          426718093,\n          655907919,\n          603617051,\n          423890693,\n          187606437,\n          396843701,\n          154402857,\n          285894412,\n          853007638,\n          814257351,\n          164096517,\n          115263279,\n          652047690,\n          241228250,\n          634709117,\n          849127301,\n          291115295,\n          59755515,\n          506144251,\n          734488252,\n          13296\n        ],\n        \"cutValueData\": [\n          66,\n          -71,\n          -54,\n          -8,\n          66,\n          -83,\n          -29,\n          30,\n          66,\n          -75,\n          -45,\n          -2,\n          66,\n          -124,\n          -110,\n          -108,\n          66,\n          -66,\n          4,\n          6,\n          66,\n          -66,\n          -103,\n          7,\n          66,\n          -115,\n          -105,\n          -115,\n          66,\n          125,\n          117,\n          -9,\n          66,\n          -65,\n          -57,\n          123,\n          66,\n          -114,\n          113,\n          -18,\n          66,\n          -103,\n          -11,\n          54,\n          66,\n          -65,\n          86,\n          -110,\n          66,\n          -115,\n          -50,\n          -22,\n          66,\n          97,\n          17,\n          26,\n          66,\n          -75,\n          53,\n          49,\n          66,\n          124,\n          -111,\n          -58,\n          66,\n          -107,\n          127,\n          -60,\n          66,\n          127,\n          48,\n          42,\n          66,\n          107,\n          101,\n          127,\n          66,\n          -106,\n          123,\n          -91,\n          66,\n          -90,\n          -70,\n          -109,\n          66,\n          75,\n          -62,\n          99,\n          66,\n          115,\n          87,\n          -100,\n          66,\n          -121,\n          115,\n          100,\n          66,\n          -92,\n          -128,\n          -56,\n          66,\n          -122,\n          -64,\n          38,\n          66,\n          -85,\n          121,\n          -28,\n          66,\n          -112,\n          -35,\n          117,\n          66,\n          114,\n          55,\n          -93,\n          66,\n          -106,\n          -97,\n          16,\n          66,\n          122,\n          81,\n          7,\n          66,\n          99,\n          -94,\n          5,\n          66,\n          91,\n          77,\n          -124,\n          66,\n          104,\n          -80,\n          -112,\n          66,\n          104,\n          -114,\n          84,\n          66,\n          -115,\n          74,\n          25,\n          66,\n          -61,\n          -57,\n          -118,\n          66,\n          -117,\n          -90,\n          89,\n          66,\n          113,\n          23,\n          -73,\n          66,\n          -88,\n          121,\n          -125,\n          66,\n          84,\n          54,\n          24,\n          66,\n          -110,\n          -36,\n          78,\n          66,\n          -94,\n          18,\n          -106,\n          66,\n          -96,\n          -99,\n          9,\n          66,\n          -108,\n          -41,\n          -126,\n          66,\n          114,\n          16,\n          89,\n          66,\n          103,\n          -5,\n          -128,\n          66,\n          -78,\n          38,\n          16,\n          66,\n          -128,\n          -119,\n          18,\n          66,\n          -101,\n          -61,\n          13,\n          66,\n          -87,\n          28,\n          89,\n          66,\n          88,\n          114,\n          -81,\n          66,\n          -120,\n          -60,\n          50,\n          66,\n          -94,\n          -66,\n          47,\n          66,\n          -94,\n          47,\n          -58,\n          66,\n          95,\n          -8,\n          -90,\n          66,\n          -104,\n          -11,\n          -55,\n          66,\n          121,\n          -11,\n          51,\n          66,\n          -109,\n          -92,\n          -126,\n          66,\n          -105,\n          79,\n          -27,\n          66,\n          -61,\n          -69,\n          115,\n          66,\n          71,\n          22,\n          56,\n          66,\n          -75,\n          48,\n          93,\n          66,\n          -99,\n          36,\n          30,\n          66,\n          -78,\n          -78,\n          114,\n          66,\n          117,\n          -26,\n          -78,\n          66,\n          -117,\n          -60,\n          -123,\n          66,\n          -98,\n          105,\n          7,\n          66,\n          -83,\n          -96,\n          33,\n          66,\n          108,\n          21,\n          23,\n          66,\n          95,\n          49,\n          104,\n          66,\n          109,\n          69,\n          -79,\n          66,\n          -108,\n          -40,\n          123,\n          66,\n          115,\n          51,\n          114,\n          66,\n          -128,\n          8,\n          -101,\n          66,\n          -99,\n          -52,\n          -5,\n          66,\n          -121,\n          94,\n          -115,\n          66,\n          -91,\n          -53,\n          -71,\n          66,\n          -105,\n          47,\n          -25,\n          66,\n          -88,\n          46,\n          -116,\n          66,\n          -124,\n          67,\n          -30,\n          66,\n          106,\n          116,\n          -87,\n          66,\n          88,\n          96,\n          34,\n          66,\n          -121,\n          -117,\n          122,\n          66,\n          -102,\n          -101,\n          6,\n          66,\n          80,\n          -127,\n          -93,\n          66,\n          -82,\n          -94,\n          22,\n          66,\n          -61,\n          116,\n          0,\n          66,\n          -101,\n          88,\n          -56,\n          66,\n          116,\n          11,\n          -104,\n          66,\n          -87,\n          96,\n          -32,\n          66,\n          -124,\n          1,\n          47,\n          66,\n          107,\n          -11,\n          87,\n          66,\n          -97,\n          15,\n          20,\n          66,\n          -101,\n          -45,\n          -128,\n          66,\n          -96,\n          -11,\n          4,\n          66,\n          -122,\n          81,\n          99,\n          66,\n          -122,\n          -21,\n          -97,\n          66,\n          -71,\n          52,\n          -77,\n          66,\n          -88,\n          108,\n          -95,\n          66,\n          -109,\n          57,\n          -7,\n          66,\n          -110,\n          -16,\n          97,\n          66,\n          126,\n          -86,\n          70,\n          66,\n          -77,\n          -80,\n          -18,\n          66,\n          -114,\n          112,\n          -120,\n          66,\n          -92,\n          -120,\n          96,\n          66,\n          -122,\n          -122,\n          67,\n          66,\n          127,\n          64,\n          86,\n          66,\n          -90,\n          91,\n          76,\n          66,\n          -125,\n          7,\n          -109,\n          66,\n          -73,\n          30,\n          -110,\n          66,\n          -63,\n          2,\n          105,\n          66,\n          -86,\n          80,\n          71,\n          66,\n          -102,\n          -19,\n          -57,\n          66,\n          -98,\n          -39,\n          28,\n          66,\n          125,\n          -1,\n          34,\n          66,\n          104,\n          15,\n          -42,\n          66,\n          -103,\n          -76,\n          -117,\n          66,\n          -92,\n          -103,\n          -32,\n          66,\n          92,\n          -108,\n          81,\n          66,\n          -110,\n          17,\n          -88,\n          66,\n          104,\n          73,\n          5,\n          66,\n          -90,\n          13,\n          57,\n          66,\n          105,\n          3,\n          35,\n          66,\n          124,\n          49,\n          -13,\n          66,\n          -101,\n          -81,\n          106,\n          66,\n          -111,\n          99,\n          58,\n          66,\n          -120,\n          20,\n          -62,\n          66,\n          -107,\n          83,\n          -114,\n          66,\n          -88,\n          -30,\n          -98,\n          66,\n          -104,\n          120,\n          109,\n          66,\n          102,\n          9,\n          -9,\n          66,\n          -87,\n          -125,\n          80,\n          66,\n          -92,\n          -117,\n          20,\n          66,\n          108,\n          36,\n          123,\n          66,\n          -67,\n          -44,\n          -3,\n          66,\n          -95,\n          -30,\n          -106,\n          66,\n          -111,\n          -87,\n          -126,\n          66,\n          94,\n          -99,\n          45,\n          66,\n          -91,\n          -116,\n          127,\n          66,\n          -114,\n          -8,\n          127,\n          66,\n          -102,\n          -83,\n          -46,\n          66,\n          -101,\n          -93,\n          -57,\n          66,\n          -97,\n          -17,\n          -64,\n          66,\n          110,\n          112,\n          -44,\n          66,\n          -98,\n          113,\n          -103,\n          66,\n          -105,\n          -76,\n          18,\n          66,\n          -63,\n          108,\n          -97,\n          66,\n          -102,\n          38,\n          73,\n          66,\n          -87,\n          108,\n          -53,\n          66,\n          -103,\n          52,\n          -128,\n          66,\n          -92,\n          32,\n          111,\n          66,\n          -103,\n          -98,\n          -45,\n          66,\n          -115,\n          14,\n          -3,\n          66,\n          123,\n          -109,\n          -82,\n          66,\n          71,\n          104,\n          -36,\n          66,\n          110,\n          -57,\n          -59,\n          66,\n          -103,\n          -63,\n          -99,\n          66,\n          -83,\n          -60,\n          -82,\n          66,\n          -92,\n          -76,\n          88,\n          66,\n          -94,\n          57,\n          113,\n          66,\n          -122,\n          -85,\n          106,\n          66,\n          -68,\n          30,\n          95,\n          66,\n          126,\n          37,\n          88,\n          66,\n          125,\n          -47,\n          29,\n          66,\n          -116,\n          89,\n          96,\n          66,\n          -88,\n          -111,\n          68,\n          66,\n          121,\n          119,\n          124,\n          66,\n          -92,\n          85,\n          27,\n          66,\n          -97,\n          77,\n          -75,\n          66,\n          -115,\n          -107,\n          -34,\n          66,\n          -106,\n          59,\n          74,\n          66,\n          -118,\n          -80,\n          80,\n          66,\n          -127,\n          -95,\n          70,\n          66,\n          -127,\n          -26,\n          86,\n          66,\n          -70,\n          -114,\n          -15,\n          66,\n          -93,\n          -11,\n          11,\n          66,\n          -99,\n          -30,\n          0,\n          66,\n          -101,\n          -80,\n          25,\n          66,\n          -96,\n          8,\n          57,\n          66,\n          86,\n          -100,\n          -126,\n          66,\n          116,\n          -76,\n          -48,\n          66,\n          -100,\n          35,\n          19,\n          66,\n          -124,\n          -23,\n          64,\n          66,\n          -98,\n          68,\n          46,\n          66,\n          115,\n          -126,\n          -92,\n          66,\n          -79,\n          -110,\n          44,\n          66,\n          -120,\n          102,\n          -26,\n          66,\n          -89,\n          69,\n          44,\n          66,\n          -65,\n          38,\n          72,\n          66,\n          -125,\n          75,\n          34,\n          66,\n          -104,\n          53,\n          36,\n          66,\n          -111,\n          -53,\n          -78,\n          66,\n          -95,\n          -11,\n          114,\n          66,\n          118,\n          20,\n          -102,\n          66,\n          -101,\n          3,\n          72,\n          66,\n          -97,\n          114,\n          -79,\n          66,\n          84,\n          -120,\n          125,\n          66,\n          80,\n          29,\n          -116,\n          66,\n          -125,\n          -68,\n          0,\n          66,\n          -125,\n          -3,\n          82,\n          66,\n          112,\n          -83,\n          -85,\n          66,\n          114,\n          53,\n          81,\n          66,\n          111,\n          -100,\n          -118,\n          66,\n          -99,\n          -61,\n          0,\n          66,\n          -127,\n          85,\n          50,\n          66,\n          -125,\n          65,\n          75,\n          66,\n          -106,\n          45,\n          8,\n          66,\n          -118,\n          120,\n          104,\n          66,\n          111,\n          -83,\n          -26,\n          66,\n          -120,\n          -79,\n          -11,\n          66,\n          -107,\n          -11,\n          29,\n          66,\n          -104,\n          78,\n          -43,\n          66,\n          -85,\n          -101,\n          92,\n          66,\n          -89,\n          93,\n          -19,\n          66,\n          -104,\n          -6,\n          -25,\n          66,\n          -111,\n          -113,\n          -67,\n          66,\n          -102,\n          120,\n          0,\n          66,\n          -116,\n          -2,\n          102,\n          66,\n          110,\n          39,\n          83,\n          66,\n          -121,\n          84,\n          28,\n          66,\n          -105,\n          52,\n          -72,\n          66,\n          -123,\n          50,\n          -10,\n          66,\n          -120,\n          83,\n          -67,\n          66,\n          -93,\n          3,\n          110,\n          66,\n          97,\n          -43,\n          62,\n          66,\n          118,\n          -74,\n          42,\n          66,\n          111,\n          -113,\n          63,\n          66,\n          -79,\n          -32,\n          -49,\n          66,\n          94,\n          1,\n          -47,\n          66,\n          88,\n          23,\n          -53,\n          66,\n          -112,\n          -127,\n          -13,\n          66,\n          -105,\n          -5,\n          -89,\n          66,\n          -83,\n          19,\n          -62,\n          66,\n          -64,\n          -74,\n          85,\n          66,\n          -118,\n          54,\n          45,\n          66,\n          -86,\n          -88,\n          -11,\n          66,\n          69,\n          -86,\n          104,\n          66,\n          114,\n          20,\n          52,\n          66,\n          -85,\n          15,\n          63,\n          66,\n          -71,\n          -83,\n          8,\n          66,\n          -84,\n          -110,\n          102,\n          66,\n          -116,\n          -125,\n          -77,\n          66,\n          -91,\n          -35,\n          -6,\n          66,\n          -110,\n          2,\n          -83,\n          66,\n          -116,\n          78,\n          39,\n          66,\n          -102,\n          -31,\n          -19,\n          66,\n          95,\n          99,\n          121,\n          66,\n          -88,\n          -104,\n          -67,\n          66,\n          -103,\n          -80,\n          90,\n          66,\n          -124,\n          23,\n          -71,\n          66,\n          -106,\n          102,\n          86,\n          66,\n          -99,\n          -84,\n          -102,\n          66,\n          -109,\n          93,\n          9,\n          66,\n          -83,\n          -47,\n          -94\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          461832191,\n          418676726,\n          162175476,\n          384891898,\n          675263187,\n          680187224,\n          123221008,\n          604112001,\n          2049\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1000859551,\n          682065855,\n          552189678,\n          373257974,\n          548345042,\n          59130747,\n          350560777,\n          542249088,\n          1408\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -8285893777058293760,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          81583742,\n          385386586,\n          363936474,\n          878815858,\n          242833911,\n          80808377,\n          346646609,\n          116776262,\n          464958554,\n          106614201,\n          441779250,\n          201190265,\n          652076871,\n          338423675,\n          313904821,\n          391588830,\n          669161562,\n          330627649,\n          596832195,\n          372463395,\n          374411119,\n          452578515,\n          511408254,\n          523175133,\n          716953662,\n          448388817,\n          311499591,\n          1033932471,\n          65773007,\n          236796129,\n          35726673,\n          120187742,\n          1006204994,\n          500991723,\n          573783925,\n          712718303,\n          225549543,\n          590130523,\n          257993027,\n          65874550,\n          469166766,\n          246363898,\n          19\n        ],\n        \"cutValueData\": [\n          66,\n          -73,\n          80,\n          45,\n          66,\n          72,\n          77,\n          120,\n          66,\n          -127,\n          121,\n          127,\n          66,\n          -86,\n          43,\n          -31,\n          66,\n          84,\n          1,\n          117,\n          66,\n          91,\n          -15,\n          -116,\n          66,\n          -97,\n          -32,\n          124,\n          66,\n          103,\n          -122,\n          6,\n          66,\n          69,\n          -17,\n          84,\n          66,\n          -94,\n          -14,\n          -100,\n          66,\n          -123,\n          -99,\n          -41,\n          66,\n          103,\n          -33,\n          97,\n          66,\n          -119,\n          7,\n          60,\n          66,\n          -105,\n          -105,\n          22,\n          66,\n          107,\n          -41,\n          -70,\n          66,\n          -78,\n          -42,\n          -94,\n          66,\n          -88,\n          -126,\n          108,\n          66,\n          109,\n          65,\n          -128,\n          66,\n          -90,\n          61,\n          70,\n          66,\n          -63,\n          93,\n          -127,\n          66,\n          -96,\n          -33,\n          -128,\n          66,\n          -113,\n          97,\n          -17,\n          66,\n          -81,\n          107,\n          5,\n          66,\n          -69,\n          55,\n          75,\n          66,\n          -74,\n          -94,\n          6,\n          66,\n          -87,\n          32,\n          -106,\n          66,\n          99,\n          79,\n          -7,\n          66,\n          -96,\n          -7,\n          110,\n          66,\n          96,\n          0,\n          -85,\n          66,\n          81,\n          51,\n          14,\n          66,\n          100,\n          -59,\n          120,\n          66,\n          120,\n          -86,\n          93,\n          66,\n          -67,\n          -119,\n          -28,\n          66,\n          -87,\n          52,\n          -25,\n          66,\n          -94,\n          96,\n          41,\n          66,\n          89,\n          -30,\n          -45,\n          66,\n          -82,\n          -76,\n          25,\n          66,\n          127,\n          -112,\n          -107,\n          66,\n          -110,\n          89,\n          15,\n          66,\n          -109,\n          -37,\n          109,\n          66,\n          -80,\n          -65,\n          100,\n          66,\n          -65,\n          -58,\n          -112,\n          66,\n          -103,\n          -65,\n          -30,\n          66,\n          -122,\n          46,\n          -108,\n          66,\n          -79,\n          32,\n          -107,\n          66,\n          -118,\n          -29,\n          -120,\n          66,\n          -97,\n          34,\n          -63,\n          66,\n          94,\n          61,\n          114,\n          66,\n          -104,\n          -12,\n          88,\n          66,\n          89,\n          -64,\n          57,\n          66,\n          -111,\n          -56,\n          110,\n          66,\n          126,\n          28,\n          6,\n          66,\n          -84,\n          -42,\n          106,\n          66,\n          -77,\n          -10,\n          54,\n          66,\n          -77,\n          -43,\n          -94,\n          66,\n          -97,\n          125,\n          69,\n          66,\n          -104,\n          -122,\n          -77,\n          66,\n          123,\n          56,\n          90,\n          66,\n          101,\n          9,\n          -7,\n          66,\n          -120,\n          5,\n          -16,\n          66,\n          99,\n          -65,\n          63,\n          66,\n          79,\n          -128,\n          78,\n          66,\n          -121,\n          124,\n          -14,\n          66,\n          -75,\n          49,\n          -109,\n          66,\n          -124,\n          -35,\n          -79,\n          66,\n          124,\n          -94,\n          -41,\n          66,\n          -120,\n          38,\n          -69,\n          66,\n          -108,\n          -98,\n          121,\n          66,\n          77,\n          -3,\n          65,\n          66,\n          -120,\n          -74,\n          49,\n          66,\n          93,\n          86,\n          -66,\n          66,\n          -116,\n          -63,\n          52,\n          66,\n          -118,\n          -107,\n          71,\n          66,\n          122,\n          -97,\n          114,\n          66,\n          -111,\n          -11,\n          -124,\n          66,\n          -114,\n          -10,\n          -87,\n          66,\n          -102,\n          28,\n          29,\n          66,\n          -108,\n          -105,\n          -111,\n          66,\n          113,\n          101,\n          -18,\n          66,\n          -107,\n          114,\n          103,\n          66,\n          -116,\n          -11,\n          101,\n          66,\n          -80,\n          -32,\n          -13,\n          66,\n          -85,\n          -78,\n          -87,\n          66,\n          -110,\n          -112,\n          13,\n          66,\n          122,\n          -16,\n          74,\n          66,\n          125,\n          -76,\n          16,\n          66,\n          -111,\n          72,\n          -118,\n          66,\n          -90,\n          -18,\n          -4,\n          66,\n          -126,\n          -77,\n          -119,\n          66,\n          126,\n          5,\n          -38,\n          66,\n          -95,\n          -63,\n          81,\n          66,\n          -115,\n          72,\n          -32,\n          66,\n          -111,\n          -3,\n          -92,\n          66,\n          -90,\n          100,\n          51,\n          66,\n          -107,\n          57,\n          59,\n          66,\n          98,\n          20,\n          -32,\n          66,\n          68,\n          -102,\n          -60,\n          66,\n          -102,\n          -38,\n          95,\n          66,\n          -83,\n          -97,\n          73,\n          66,\n          94,\n          -24,\n          53,\n          66,\n          -117,\n          -67,\n          47,\n          66,\n          -69,\n          114,\n          -17,\n          66,\n          -115,\n          117,\n          -92,\n          66,\n          -116,\n          3,\n          -35,\n          66,\n          109,\n          -97,\n          -43,\n          66,\n          -119,\n          -59,\n          -119,\n          66,\n          101,\n          -18,\n          28,\n          66,\n          -109,\n          -115,\n          61,\n          66,\n          -101,\n          -95,\n          -95,\n          66,\n          -112,\n          127,\n          104,\n          66,\n          -112,\n          -2,\n          -85,\n          66,\n          -78,\n          -54,\n          -78,\n          66,\n          -99,\n          -30,\n          -81,\n          66,\n          -106,\n          -85,\n          92,\n          66,\n          -68,\n          -109,\n          72,\n          66,\n          -107,\n          125,\n          68,\n          66,\n          -102,\n          -92,\n          -64,\n          66,\n          -109,\n          87,\n          -99,\n          66,\n          -94,\n          -7,\n          -10,\n          66,\n          -80,\n          110,\n          -109,\n          66,\n          122,\n          -33,\n          109,\n          66,\n          -77,\n          0,\n          -108,\n          66,\n          -114,\n          124,\n          68,\n          66,\n          -72,\n          -51,\n          46,\n          66,\n          81,\n          -63,\n          -50,\n          66,\n          127,\n          -68,\n          34,\n          66,\n          -109,\n          -100,\n          0,\n          66,\n          115,\n          66,\n          -77,\n          66,\n          -100,\n          87,\n          -113,\n          66,\n          -71,\n          -114,\n          -107,\n          66,\n          -88,\n          -24,\n          119,\n          66,\n          -111,\n          41,\n          94,\n          66,\n          -116,\n          48,\n          115,\n          66,\n          -106,\n          57,\n          -55,\n          66,\n          124,\n          122,\n          -31,\n          66,\n          -111,\n          -115,\n          70,\n          66,\n          76,\n          -76,\n          -42,\n          66,\n          -83,\n          85,\n          36,\n          66,\n          -98,\n          82,\n          117,\n          66,\n          -85,\n          69,\n          -25,\n          66,\n          103,\n          -70,\n          -17,\n          66,\n          124,\n          -51,\n          46,\n          66,\n          -125,\n          -46,\n          -117,\n          66,\n          126,\n          -25,\n          95,\n          66,\n          -102,\n          70,\n          9,\n          66,\n          -120,\n          -76,\n          118,\n          66,\n          -116,\n          -36,\n          0,\n          66,\n          -98,\n          55,\n          71,\n          66,\n          -109,\n          4,\n          -64,\n          66,\n          107,\n          8,\n          23,\n          66,\n          -117,\n          -85,\n          22,\n          66,\n          -85,\n          -38,\n          -91,\n          66,\n          -124,\n          -79,\n          62,\n          66,\n          -91,\n          -30,\n          -30,\n          66,\n          -93,\n          58,\n          6,\n          66,\n          -100,\n          9,\n          38,\n          66,\n          122,\n          94,\n          -86,\n          66,\n          -91,\n          47,\n          -15,\n          66,\n          -79,\n          -42,\n          78,\n          66,\n          -115,\n          -115,\n          78,\n          66,\n          110,\n          -40,\n          -62,\n          66,\n          -108,\n          -4,\n          87,\n          66,\n          -117,\n          -61,\n          14,\n          66,\n          96,\n          54,\n          87,\n          66,\n          -83,\n          -23,\n          -111,\n          66,\n          77,\n          35,\n          -121,\n          66,\n          -102,\n          -67,\n          21,\n          66,\n          -80,\n          -6,\n          -25,\n          66,\n          -118,\n          -22,\n          122,\n          66,\n          -64,\n          -74,\n          4,\n          66,\n          -97,\n          -33,\n          -107,\n          66,\n          -106,\n          -15,\n          -22,\n          66,\n          -81,\n          -29,\n          122,\n          66,\n          107,\n          -89,\n          -98,\n          66,\n          -121,\n          -85,\n          -58,\n          66,\n          -109,\n          3,\n          -44,\n          66,\n          -115,\n          -13,\n          -52,\n          66,\n          -73,\n          -91,\n          -76,\n          66,\n          -121,\n          -31,\n          10,\n          66,\n          71,\n          112,\n          66,\n          66,\n          -64,\n          51,\n          100,\n          66,\n          -101,\n          51,\n          -76,\n          66,\n          -72,\n          -85,\n          44,\n          66,\n          126,\n          33,\n          -127,\n          66,\n          82,\n          98,\n          -87,\n          66,\n          88,\n          -82,\n          107,\n          66,\n          -112,\n          -29,\n          -94,\n          66,\n          -92,\n          47,\n          -29,\n          66,\n          113,\n          75,\n          -86,\n          66,\n          -109,\n          119,\n          -90,\n          66,\n          118,\n          26,\n          -24,\n          66,\n          111,\n          81,\n          -37,\n          66,\n          -108,\n          -92,\n          18,\n          66,\n          -96,\n          75,\n          18,\n          66,\n          121,\n          48,\n          73,\n          66,\n          -98,\n          70,\n          -12,\n          66,\n          105,\n          -90,\n          73,\n          66,\n          -97,\n          -20,\n          -81,\n          66,\n          -116,\n          -100,\n          34,\n          66,\n          -123,\n          20,\n          32,\n          66,\n          -89,\n          12,\n          -36,\n          66,\n          -122,\n          -26,\n          48,\n          66,\n          -90,\n          118,\n          86,\n          66,\n          112,\n          122,\n          9,\n          66,\n          -127,\n          -5,\n          -3,\n          66,\n          95,\n          -128,\n          71,\n          66,\n          99,\n          0,\n          -88,\n          66,\n          -121,\n          99,\n          -91,\n          66,\n          123,\n          31,\n          11,\n          66,\n          -110,\n          92,\n          46,\n          66,\n          112,\n          -41,\n          -72,\n          66,\n          117,\n          43,\n          76,\n          66,\n          -66,\n          13,\n          -122,\n          66,\n          -126,\n          4,\n          72,\n          66,\n          -123,\n          101,\n          95,\n          66,\n          122,\n          -99,\n          -122,\n          66,\n          -128,\n          -59,\n          -67,\n          66,\n          99,\n          -83,\n          103,\n          66,\n          -120,\n          32,\n          -39,\n          66,\n          -89,\n          -103,\n          -58,\n          66,\n          -92,\n          22,\n          98,\n          66,\n          -86,\n          24,\n          -69,\n          66,\n          -79,\n          -94,\n          49,\n          66,\n          -100,\n          114,\n          -19,\n          66,\n          -117,\n          -44,\n          -92,\n          66,\n          -119,\n          15,\n          -63,\n          66,\n          -98,\n          96,\n          -19,\n          66,\n          107,\n          -47,\n          35,\n          66,\n          127,\n          11,\n          20,\n          66,\n          -112,\n          103,\n          -20,\n          66,\n          -64,\n          -121,\n          78,\n          66,\n          111,\n          -110,\n          -118,\n          66,\n          -86,\n          -5,\n          -37,\n          66,\n          110,\n          -60,\n          1,\n          66,\n          -112,\n          -109,\n          -120,\n          66,\n          -121,\n          -7,\n          -10,\n          66,\n          -95,\n          -53,\n          -125,\n          66,\n          121,\n          44,\n          6,\n          66,\n          -104,\n          -70,\n          100,\n          66,\n          -123,\n          -82,\n          -22,\n          66,\n          121,\n          10,\n          -128,\n          66,\n          -102,\n          -123,\n          -96,\n          66,\n          93,\n          -9,\n          -13,\n          66,\n          -104,\n          25,\n          125,\n          66,\n          -122,\n          -46,\n          -128,\n          66,\n          -92,\n          32,\n          89,\n          66,\n          -99,\n          32,\n          -104,\n          66,\n          -74,\n          111,\n          56,\n          66,\n          -78,\n          -107,\n          25,\n          66,\n          126,\n          -88,\n          -76,\n          66,\n          -78,\n          69,\n          -91,\n          66,\n          -79,\n          34,\n          -58,\n          66,\n          105,\n          -61,\n          79,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          770049260,\n          731794148,\n          1033121272,\n          583495964,\n          985223492,\n          769498052,\n          767688749,\n          710470975,\n          768443963,\n          1142569354,\n          581750635,\n          1118424983,\n          759694936,\n          364\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          774781901,\n          725409556,\n          1026146200,\n          583515404,\n          1027804378,\n          585973570,\n          769322195,\n          710352553,\n          755176892,\n          712405651,\n          595558466,\n          982900942,\n          970683632,\n          365\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3042871460721369999,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          818563458,\n          267024833,\n          592483417,\n          601842370,\n          638545174,\n          358713604,\n          524252847,\n          83682238,\n          485162116,\n          232785329,\n          69940461,\n          649755178,\n          585897883,\n          531552002,\n          166221134,\n          527594082,\n          192270582,\n          306093898,\n          577226065,\n          540259506,\n          700193265,\n          356647355,\n          12031430,\n          728793122,\n          694852009,\n          262245903,\n          843596849,\n          129431883,\n          573061619,\n          423118739,\n          850836800,\n          514586036,\n          834951447,\n          477313838,\n          611354023,\n          586263186,\n          417820913,\n          148138274,\n          199159551,\n          242168422,\n          628870152,\n          257972368,\n          8336\n        ],\n        \"cutValueData\": [\n          66,\n          127,\n          -95,\n          61,\n          66,\n          -103,\n          42,\n          93,\n          66,\n          -83,\n          -123,\n          81,\n          66,\n          93,\n          76,\n          25,\n          66,\n          -122,\n          -4,\n          -51,\n          66,\n          92,\n          -89,\n          -23,\n          66,\n          81,\n          119,\n          81,\n          66,\n          -103,\n          5,\n          33,\n          66,\n          -65,\n          118,\n          108,\n          66,\n          -123,\n          3,\n          73,\n          66,\n          -106,\n          64,\n          -123,\n          66,\n          -100,\n          82,\n          -101,\n          66,\n          -83,\n          112,\n          -119,\n          66,\n          -107,\n          -92,\n          3,\n          66,\n          -110,\n          73,\n          36,\n          66,\n          -91,\n          -100,\n          22,\n          66,\n          122,\n          -66,\n          107,\n          66,\n          -99,\n          116,\n          80,\n          66,\n          121,\n          -92,\n          70,\n          66,\n          -111,\n          123,\n          -117,\n          66,\n          -97,\n          -51,\n          -91,\n          66,\n          -89,\n          30,\n          -8,\n          66,\n          120,\n          80,\n          7,\n          66,\n          -122,\n          -37,\n          -73,\n          66,\n          -71,\n          77,\n          64,\n          66,\n          -106,\n          80,\n          24,\n          66,\n          102,\n          118,\n          103,\n          66,\n          -66,\n          -57,\n          -126,\n          66,\n          87,\n          -61,\n          19,\n          66,\n          95,\n          97,\n          -48,\n          66,\n          -86,\n          5,\n          -27,\n          66,\n          -118,\n          -114,\n          -17,\n          66,\n          80,\n          -57,\n          25,\n          66,\n          -60,\n          111,\n          102,\n          66,\n          -93,\n          -66,\n          98,\n          66,\n          -69,\n          -100,\n          108,\n          66,\n          -102,\n          50,\n          -126,\n          66,\n          -76,\n          23,\n          20,\n          66,\n          -80,\n          -49,\n          68,\n          66,\n          82,\n          93,\n          -72,\n          66,\n          -124,\n          -108,\n          99,\n          66,\n          -120,\n          56,\n          52,\n          66,\n          -90,\n          63,\n          -114,\n          66,\n          125,\n          75,\n          -48,\n          66,\n          112,\n          82,\n          23,\n          66,\n          -120,\n          -4,\n          107,\n          66,\n          -116,\n          14,\n          -17,\n          66,\n          127,\n          86,\n          18,\n          66,\n          -68,\n          -80,\n          -57,\n          66,\n          117,\n          67,\n          -41,\n          66,\n          -96,\n          27,\n          109,\n          66,\n          92,\n          16,\n          -81,\n          66,\n          -123,\n          47,\n          74,\n          66,\n          -86,\n          10,\n          -68,\n          66,\n          126,\n          71,\n          122,\n          66,\n          -112,\n          -81,\n          -96,\n          66,\n          -123,\n          -29,\n          6,\n          66,\n          101,\n          103,\n          -56,\n          66,\n          80,\n          -69,\n          -43,\n          66,\n          89,\n          23,\n          -89,\n          66,\n          -83,\n          36,\n          -69,\n          66,\n          96,\n          41,\n          84,\n          66,\n          -106,\n          -32,\n          17,\n          66,\n          -118,\n          34,\n          106,\n          66,\n          -113,\n          29,\n          22,\n          66,\n          -90,\n          -59,\n          -13,\n          66,\n          -115,\n          -84,\n          -35,\n          66,\n          127,\n          -15,\n          10,\n          66,\n          -81,\n          2,\n          20,\n          66,\n          -89,\n          22,\n          79,\n          66,\n          -83,\n          40,\n          -3,\n          66,\n          -127,\n          19,\n          84,\n          66,\n          -85,\n          47,\n          117,\n          66,\n          -90,\n          89,\n          -84,\n          66,\n          -112,\n          117,\n          -103,\n          66,\n          -114,\n          -115,\n          -11,\n          66,\n          -108,\n          -78,\n          44,\n          66,\n          -117,\n          -117,\n          6,\n          66,\n          -64,\n          18,\n          69,\n          66,\n          -68,\n          -86,\n          -13,\n          66,\n          -88,\n          84,\n          65,\n          66,\n          -126,\n          47,\n          125,\n          66,\n          -74,\n          -58,\n          -33,\n          66,\n          78,\n          -105,\n          44,\n          66,\n          112,\n          -109,\n          57,\n          66,\n          117,\n          1,\n          -71,\n          66,\n          85,\n          5,\n          107,\n          66,\n          74,\n          -43,\n          69,\n          66,\n          117,\n          -58,\n          93,\n          66,\n          -96,\n          -128,\n          7,\n          66,\n          -65,\n          25,\n          -23,\n          66,\n          -126,\n          -81,\n          -51,\n          66,\n          82,\n          -59,\n          -32,\n          66,\n          122,\n          38,\n          78,\n          66,\n          -124,\n          -62,\n          -43,\n          66,\n          -98,\n          -77,\n          -119,\n          66,\n          100,\n          103,\n          24,\n          66,\n          -104,\n          -54,\n          96,\n          66,\n          -80,\n          74,\n          -63,\n          66,\n          -86,\n          33,\n          -76,\n          66,\n          118,\n          63,\n          101,\n          66,\n          -108,\n          -87,\n          -72,\n          66,\n          -111,\n          -24,\n          78,\n          66,\n          110,\n          74,\n          47,\n          66,\n          -99,\n          27,\n          33,\n          66,\n          -94,\n          37,\n          121,\n          66,\n          -111,\n          -4,\n          -46,\n          66,\n          -123,\n          73,\n          -107,\n          66,\n          87,\n          -127,\n          -63,\n          66,\n          -117,\n          47,\n          72,\n          66,\n          -95,\n          -84,\n          -8,\n          66,\n          124,\n          18,\n          -111,\n          66,\n          -120,\n          -71,\n          -32,\n          66,\n          94,\n          -1,\n          -2,\n          66,\n          124,\n          57,\n          31,\n          66,\n          -96,\n          -81,\n          93,\n          66,\n          110,\n          9,\n          -16,\n          66,\n          -101,\n          107,\n          -45,\n          66,\n          91,\n          -111,\n          15,\n          66,\n          -82,\n          90,\n          -80,\n          66,\n          -127,\n          -98,\n          41,\n          66,\n          -111,\n          -80,\n          74,\n          66,\n          -93,\n          96,\n          115,\n          66,\n          -98,\n          -33,\n          64,\n          66,\n          -95,\n          -62,\n          -43,\n          66,\n          107,\n          -60,\n          30,\n          66,\n          -71,\n          -67,\n          -102,\n          66,\n          89,\n          -46,\n          -84,\n          66,\n          119,\n          -81,\n          3,\n          66,\n          85,\n          22,\n          -16,\n          66,\n          -96,\n          30,\n          -7,\n          66,\n          -81,\n          -66,\n          -67,\n          66,\n          113,\n          -39,\n          62,\n          66,\n          -79,\n          -74,\n          -96,\n          66,\n          -95,\n          51,\n          29,\n          66,\n          108,\n          -36,\n          -4,\n          66,\n          95,\n          -35,\n          46,\n          66,\n          -116,\n          -57,\n          106,\n          66,\n          108,\n          37,\n          39,\n          66,\n          -81,\n          -34,\n          -33,\n          66,\n          -110,\n          69,\n          117,\n          66,\n          -107,\n          -80,\n          82,\n          66,\n          -75,\n          -76,\n          -59,\n          66,\n          -107,\n          91,\n          52,\n          66,\n          -113,\n          49,\n          59,\n          66,\n          -81,\n          -86,\n          -79,\n          66,\n          -71,\n          -83,\n          31,\n          66,\n          100,\n          9,\n          9,\n          66,\n          120,\n          105,\n          13,\n          66,\n          -64,\n          47,\n          5,\n          66,\n          -84,\n          113,\n          107,\n          66,\n          -85,\n          15,\n          -22,\n          66,\n          -65,\n          48,\n          36,\n          66,\n          -93,\n          -62,\n          -77,\n          66,\n          92,\n          19,\n          -27,\n          66,\n          -128,\n          -117,\n          84,\n          66,\n          -93,\n          -48,\n          110,\n          66,\n          118,\n          30,\n          -67,\n          66,\n          -106,\n          -30,\n          -24,\n          66,\n          112,\n          -45,\n          -63,\n          66,\n          -100,\n          114,\n          -97,\n          66,\n          -114,\n          93,\n          -110,\n          66,\n          -96,\n          35,\n          -71,\n          66,\n          -92,\n          45,\n          -44,\n          66,\n          -85,\n          111,\n          -71,\n          66,\n          -63,\n          -75,\n          16,\n          66,\n          -124,\n          -88,\n          41,\n          66,\n          -119,\n          -47,\n          -31,\n          66,\n          -65,\n          -10,\n          7,\n          66,\n          98,\n          -33,\n          107,\n          66,\n          -116,\n          -10,\n          -81,\n          66,\n          -114,\n          -45,\n          76,\n          66,\n          -95,\n          -76,\n          59,\n          66,\n          -69,\n          -88,\n          -8,\n          66,\n          -79,\n          -79,\n          74,\n          66,\n          82,\n          47,\n          -121,\n          66,\n          -97,\n          71,\n          72,\n          66,\n          -84,\n          -14,\n          59,\n          66,\n          -112,\n          12,\n          69,\n          66,\n          -59,\n          -34,\n          -126,\n          66,\n          114,\n          104,\n          50,\n          66,\n          -100,\n          63,\n          120,\n          66,\n          -116,\n          -69,\n          119,\n          66,\n          -63,\n          -34,\n          62,\n          66,\n          110,\n          127,\n          111,\n          66,\n          -109,\n          -8,\n          -81,\n          66,\n          -114,\n          -108,\n          -116,\n          66,\n          122,\n          68,\n          -2,\n          66,\n          -128,\n          67,\n          86,\n          66,\n          -91,\n          -33,\n          -6,\n          66,\n          -128,\n          80,\n          80,\n          66,\n          123,\n          45,\n          19,\n          66,\n          -117,\n          115,\n          13,\n          66,\n          -81,\n          60,\n          -24,\n          66,\n          -123,\n          107,\n          19,\n          66,\n          -104,\n          26,\n          -73,\n          66,\n          109,\n          -83,\n          -61,\n          66,\n          -114,\n          -123,\n          -75,\n          66,\n          105,\n          37,\n          -69,\n          66,\n          -94,\n          -51,\n          112,\n          66,\n          -103,\n          -124,\n          -86,\n          66,\n          -103,\n          105,\n          95,\n          66,\n          121,\n          23,\n          98,\n          66,\n          -123,\n          -63,\n          -87,\n          66,\n          114,\n          -72,\n          64,\n          66,\n          -82,\n          -58,\n          71,\n          66,\n          -77,\n          -70,\n          74,\n          66,\n          -121,\n          3,\n          116,\n          66,\n          -123,\n          102,\n          -87,\n          66,\n          116,\n          -10,\n          18,\n          66,\n          -117,\n          -87,\n          19,\n          66,\n          -100,\n          20,\n          25,\n          66,\n          110,\n          -108,\n          -33,\n          66,\n          92,\n          -2,\n          91,\n          66,\n          107,\n          123,\n          114,\n          66,\n          -122,\n          -76,\n          88,\n          66,\n          -71,\n          -92,\n          62,\n          66,\n          105,\n          75,\n          7,\n          66,\n          -95,\n          75,\n          -87,\n          66,\n          -116,\n          -75,\n          76,\n          66,\n          -109,\n          13,\n          -100,\n          66,\n          123,\n          113,\n          -58,\n          66,\n          124,\n          -29,\n          26,\n          66,\n          -112,\n          -87,\n          30,\n          66,\n          -78,\n          46,\n          67,\n          66,\n          -100,\n          -72,\n          21,\n          66,\n          124,\n          86,\n          69,\n          66,\n          -99,\n          -80,\n          -83,\n          66,\n          -104,\n          50,\n          17,\n          66,\n          -85,\n          115,\n          -27,\n          66,\n          -92,\n          -18,\n          -126,\n          66,\n          -84,\n          25,\n          66,\n          66,\n          -119,\n          48,\n          127,\n          66,\n          110,\n          51,\n          -20,\n          66,\n          -100,\n          42,\n          -119,\n          66,\n          -102,\n          -119,\n          -50,\n          66,\n          -113,\n          80,\n          110,\n          66,\n          -95,\n          95,\n          -48,\n          66,\n          -113,\n          -27,\n          -21,\n          66,\n          -111,\n          15,\n          -82,\n          66,\n          87,\n          -96,\n          123,\n          66,\n          -112,\n          18,\n          -99,\n          66,\n          125,\n          -126,\n          118,\n          66,\n          -97,\n          111,\n          12,\n          66,\n          -89,\n          103,\n          74,\n          66,\n          -71,\n          59,\n          51,\n          66,\n          -99,\n          44,\n          4,\n          66,\n          119,\n          110,\n          -4,\n          66,\n          -119,\n          -38,\n          -29,\n          66,\n          -83,\n          -56,\n          -56,\n          66,\n          -83,\n          -26,\n          15,\n          66,\n          -77,\n          -124,\n          70,\n          66,\n          -101,\n          90,\n          -72,\n          66,\n          70,\n          12,\n          -55,\n          66,\n          -107,\n          111,\n          -118\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          263979007,\n          238943482,\n          861173295,\n          782356496,\n          69879311,\n          5211467,\n          664087149,\n          20185088,\n          2\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          534241023,\n          993588829,\n          868644511,\n          78510832,\n          310362411,\n          41550898,\n          800049877,\n          206995,\n          280\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3094212984032582080,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          305507652,\n          699820696,\n          805367403,\n          391631488,\n          572779008,\n          767723353,\n          144957048,\n          392687978,\n          230679448,\n          419884415,\n          861636011,\n          876161820,\n          291679656,\n          55783162,\n          171001627,\n          2687080,\n          460052471,\n          496706844,\n          592608466,\n          84448833,\n          401075265,\n          68391469,\n          527969991,\n          191298208,\n          691434969,\n          590001468,\n          754500652,\n          744342113,\n          839885172,\n          639197907,\n          485833829,\n          638708915,\n          710264857,\n          838079986,\n          412540093,\n          145864818,\n          402939544,\n          130866749,\n          500538829,\n          541498500,\n          66971836,\n          755679140,\n          5712\n        ],\n        \"cutValueData\": [\n          66,\n          111,\n          -72,\n          91,\n          66,\n          -94,\n          -29,\n          -117,\n          66,\n          74,\n          72,\n          -70,\n          66,\n          117,\n          -120,\n          -73,\n          66,\n          88,\n          12,\n          18,\n          66,\n          -110,\n          19,\n          -79,\n          66,\n          111,\n          124,\n          12,\n          66,\n          -86,\n          -81,\n          7,\n          66,\n          69,\n          32,\n          2,\n          66,\n          110,\n          126,\n          103,\n          66,\n          -118,\n          -2,\n          -4,\n          66,\n          69,\n          99,\n          -38,\n          66,\n          89,\n          -124,\n          41,\n          66,\n          -75,\n          66,\n          19,\n          66,\n          -123,\n          -30,\n          30,\n          66,\n          -91,\n          78,\n          104,\n          66,\n          -82,\n          -86,\n          -115,\n          66,\n          -90,\n          92,\n          98,\n          66,\n          -79,\n          127,\n          9,\n          66,\n          -91,\n          -52,\n          -108,\n          66,\n          90,\n          123,\n          -111,\n          66,\n          83,\n          -18,\n          -40,\n          66,\n          -108,\n          -1,\n          -15,\n          66,\n          -113,\n          112,\n          -69,\n          66,\n          -79,\n          -109,\n          -32,\n          66,\n          -100,\n          -128,\n          -11,\n          66,\n          74,\n          -97,\n          72,\n          66,\n          -125,\n          -120,\n          -21,\n          66,\n          -78,\n          -33,\n          82,\n          66,\n          -67,\n          -26,\n          -30,\n          66,\n          -122,\n          80,\n          118,\n          66,\n          -113,\n          44,\n          109,\n          66,\n          -78,\n          -127,\n          -7,\n          66,\n          -62,\n          -70,\n          -81,\n          66,\n          111,\n          -69,\n          -99,\n          66,\n          -85,\n          47,\n          44,\n          66,\n          -86,\n          -118,\n          43,\n          66,\n          76,\n          -115,\n          -89,\n          66,\n          68,\n          48,\n          26,\n          66,\n          110,\n          7,\n          -31,\n          66,\n          -92,\n          15,\n          -37,\n          66,\n          -93,\n          -82,\n          78,\n          66,\n          119,\n          -69,\n          126,\n          66,\n          -99,\n          -96,\n          43,\n          66,\n          95,\n          -96,\n          -43,\n          66,\n          -64,\n          92,\n          62,\n          66,\n          -110,\n          -67,\n          83,\n          66,\n          109,\n          -122,\n          9,\n          66,\n          -124,\n          -91,\n          -96,\n          66,\n          -92,\n          5,\n          36,\n          66,\n          -91,\n          117,\n          -64,\n          66,\n          83,\n          110,\n          -19,\n          66,\n          -97,\n          108,\n          -23,\n          66,\n          -100,\n          -4,\n          -44,\n          66,\n          -74,\n          -104,\n          -46,\n          66,\n          127,\n          -25,\n          -47,\n          66,\n          -72,\n          -83,\n          -68,\n          66,\n          -86,\n          56,\n          59,\n          66,\n          -77,\n          21,\n          54,\n          66,\n          71,\n          106,\n          84,\n          66,\n          -116,\n          64,\n          -87,\n          66,\n          -108,\n          -116,\n          75,\n          66,\n          -103,\n          -71,\n          11,\n          66,\n          -118,\n          -61,\n          85,\n          66,\n          111,\n          -99,\n          22,\n          66,\n          87,\n          80,\n          -41,\n          66,\n          -117,\n          91,\n          19,\n          66,\n          -62,\n          -119,\n          68,\n          66,\n          -109,\n          -86,\n          -70,\n          66,\n          126,\n          21,\n          40,\n          66,\n          117,\n          124,\n          23,\n          66,\n          -72,\n          -67,\n          -15,\n          66,\n          -65,\n          -76,\n          92,\n          66,\n          -106,\n          -67,\n          -48,\n          66,\n          -99,\n          49,\n          23,\n          66,\n          101,\n          -39,\n          -11,\n          66,\n          69,\n          -86,\n          118,\n          66,\n          -108,\n          -15,\n          105,\n          66,\n          -128,\n          -20,\n          -107,\n          66,\n          103,\n          -56,\n          47,\n          66,\n          95,\n          86,\n          19,\n          66,\n          -70,\n          63,\n          -11,\n          66,\n          -68,\n          60,\n          -50,\n          66,\n          -109,\n          -21,\n          127,\n          66,\n          -106,\n          50,\n          123,\n          66,\n          93,\n          23,\n          72,\n          66,\n          -95,\n          -31,\n          -113,\n          66,\n          -124,\n          62,\n          -51,\n          66,\n          -114,\n          36,\n          -38,\n          66,\n          -125,\n          29,\n          -14,\n          66,\n          93,\n          -7,\n          52,\n          66,\n          -101,\n          65,\n          -100,\n          66,\n          -87,\n          54,\n          108,\n          66,\n          -88,\n          -126,\n          78,\n          66,\n          -99,\n          -33,\n          49,\n          66,\n          -124,\n          -70,\n          37,\n          66,\n          -97,\n          108,\n          -95,\n          66,\n          -100,\n          -72,\n          -11,\n          66,\n          -122,\n          -21,\n          31,\n          66,\n          -120,\n          -115,\n          -71,\n          66,\n          -90,\n          103,\n          -36,\n          66,\n          -127,\n          116,\n          119,\n          66,\n          -96,\n          27,\n          -68,\n          66,\n          -117,\n          -105,\n          3,\n          66,\n          -125,\n          -15,\n          -32,\n          66,\n          -101,\n          -13,\n          108,\n          66,\n          80,\n          54,\n          -25,\n          66,\n          -70,\n          119,\n          59,\n          66,\n          -81,\n          -90,\n          -84,\n          66,\n          -94,\n          -49,\n          107,\n          66,\n          -95,\n          -89,\n          118,\n          66,\n          115,\n          -36,\n          -60,\n          66,\n          -69,\n          40,\n          -46,\n          66,\n          -96,\n          -93,\n          -85,\n          66,\n          -111,\n          18,\n          103,\n          66,\n          -98,\n          79,\n          113,\n          66,\n          114,\n          79,\n          -66,\n          66,\n          -105,\n          73,\n          -116,\n          66,\n          -97,\n          -62,\n          11,\n          66,\n          -119,\n          15,\n          25,\n          66,\n          -128,\n          89,\n          76,\n          66,\n          86,\n          40,\n          -34,\n          66,\n          -84,\n          -40,\n          21,\n          66,\n          -97,\n          95,\n          125,\n          66,\n          -122,\n          -122,\n          -121,\n          66,\n          117,\n          -94,\n          -35,\n          66,\n          -124,\n          0,\n          34,\n          66,\n          -99,\n          -69,\n          86,\n          66,\n          -69,\n          35,\n          -7,\n          66,\n          111,\n          -17,\n          -43,\n          66,\n          -79,\n          -12,\n          -60,\n          66,\n          -75,\n          51,\n          86,\n          66,\n          93,\n          20,\n          117,\n          66,\n          -105,\n          -74,\n          -123,\n          66,\n          116,\n          -17,\n          122,\n          66,\n          -120,\n          4,\n          -60,\n          66,\n          87,\n          63,\n          -23,\n          66,\n          -122,\n          82,\n          -77,\n          66,\n          -122,\n          -102,\n          -14,\n          66,\n          -83,\n          -13,\n          -88,\n          66,\n          -122,\n          107,\n          53,\n          66,\n          -109,\n          -68,\n          23,\n          66,\n          -91,\n          -119,\n          62,\n          66,\n          -76,\n          -40,\n          124,\n          66,\n          -108,\n          -127,\n          -122,\n          66,\n          -108,\n          -33,\n          88,\n          66,\n          -88,\n          127,\n          20,\n          66,\n          -69,\n          -66,\n          -15,\n          66,\n          -114,\n          78,\n          5,\n          66,\n          -93,\n          7,\n          22,\n          66,\n          -71,\n          -77,\n          -68,\n          66,\n          -85,\n          124,\n          -121,\n          66,\n          -82,\n          77,\n          -5,\n          66,\n          -126,\n          -88,\n          -111,\n          66,\n          88,\n          -105,\n          22,\n          66,\n          -116,\n          -7,\n          21,\n          66,\n          -112,\n          -127,\n          -92,\n          66,\n          -125,\n          -77,\n          38,\n          66,\n          -110,\n          41,\n          -39,\n          66,\n          -86,\n          -88,\n          39,\n          66,\n          96,\n          90,\n          32,\n          66,\n          -90,\n          2,\n          -7,\n          66,\n          124,\n          20,\n          65,\n          66,\n          -118,\n          -17,\n          58,\n          66,\n          -110,\n          98,\n          -37,\n          66,\n          -118,\n          105,\n          -121,\n          66,\n          -106,\n          -68,\n          -49,\n          66,\n          82,\n          -75,\n          -54,\n          66,\n          -93,\n          -75,\n          106,\n          66,\n          -105,\n          -98,\n          33,\n          66,\n          87,\n          -47,\n          22,\n          66,\n          -96,\n          -127,\n          87,\n          66,\n          -117,\n          -120,\n          -52,\n          66,\n          124,\n          -61,\n          85,\n          66,\n          -105,\n          92,\n          -66,\n          66,\n          -118,\n          119,\n          11,\n          66,\n          -107,\n          78,\n          49,\n          66,\n          -103,\n          21,\n          -50,\n          66,\n          107,\n          -99,\n          -29,\n          66,\n          -113,\n          25,\n          -10,\n          66,\n          -99,\n          -117,\n          98,\n          66,\n          126,\n          -15,\n          117,\n          66,\n          -62,\n          70,\n          -13,\n          66,\n          -84,\n          70,\n          -55,\n          66,\n          -124,\n          20,\n          -25,\n          66,\n          -97,\n          42,\n          -81,\n          66,\n          -122,\n          -6,\n          25,\n          66,\n          125,\n          46,\n          -99,\n          66,\n          82,\n          41,\n          -33,\n          66,\n          101,\n          109,\n          103,\n          66,\n          76,\n          23,\n          -115,\n          66,\n          -99,\n          -4,\n          -26,\n          66,\n          105,\n          -121,\n          -55,\n          66,\n          -107,\n          83,\n          41,\n          66,\n          -125,\n          40,\n          -88,\n          66,\n          -107,\n          -101,\n          21,\n          66,\n          -119,\n          -55,\n          -47,\n          66,\n          -105,\n          46,\n          57,\n          66,\n          -80,\n          101,\n          -34,\n          66,\n          126,\n          -65,\n          -67,\n          66,\n          -67,\n          93,\n          33,\n          66,\n          104,\n          74,\n          94,\n          66,\n          -127,\n          100,\n          -17,\n          66,\n          -63,\n          -78,\n          6,\n          66,\n          -84,\n          124,\n          64,\n          66,\n          -103,\n          -118,\n          36,\n          66,\n          -118,\n          -72,\n          -57,\n          66,\n          -112,\n          34,\n          51,\n          66,\n          -126,\n          -20,\n          25,\n          66,\n          102,\n          -123,\n          31,\n          66,\n          -82,\n          53,\n          5,\n          66,\n          -94,\n          51,\n          -79,\n          66,\n          126,\n          101,\n          -8,\n          66,\n          -83,\n          -117,\n          14,\n          66,\n          -104,\n          -61,\n          -72,\n          66,\n          -77,\n          -46,\n          -41,\n          66,\n          -123,\n          119,\n          -12,\n          66,\n          -69,\n          -27,\n          -91,\n          66,\n          -74,\n          117,\n          -51,\n          66,\n          -126,\n          -14,\n          27,\n          66,\n          115,\n          -82,\n          86,\n          66,\n          -121,\n          126,\n          41,\n          66,\n          -119,\n          64,\n          109,\n          66,\n          -126,\n          -42,\n          -20,\n          66,\n          -95,\n          -76,\n          85,\n          66,\n          -109,\n          91,\n          31,\n          66,\n          -109,\n          -45,\n          -107,\n          66,\n          -103,\n          -77,\n          -96,\n          66,\n          117,\n          -92,\n          -93,\n          66,\n          -97,\n          43,\n          -82,\n          66,\n          -79,\n          -62,\n          49,\n          66,\n          -95,\n          -126,\n          -28,\n          66,\n          -116,\n          83,\n          -18,\n          66,\n          -92,\n          -101,\n          -51,\n          66,\n          -110,\n          34,\n          85,\n          66,\n          -90,\n          -104,\n          0,\n          66,\n          -85,\n          -123,\n          -8,\n          66,\n          -115,\n          33,\n          -124,\n          66,\n          95,\n          74,\n          50,\n          66,\n          -82,\n          35,\n          27,\n          66,\n          -123,\n          -17,\n          18,\n          66,\n          -128,\n          31,\n          -25,\n          66,\n          -122,\n          -46,\n          5,\n          66,\n          -78,\n          52,\n          86,\n          66,\n          116,\n          52,\n          40,\n          66,\n          -67,\n          33,\n          31,\n          66,\n          -117,\n          17,\n          -13,\n          66,\n          -69,\n          -82,\n          -3,\n          66,\n          117,\n          -68,\n          42,\n          66,\n          -90,\n          127,\n          120,\n          66,\n          -77,\n          95,\n          92,\n          66,\n          99,\n          -121,\n          55,\n          66,\n          -82,\n          38,\n          36,\n          66,\n          105,\n          89,\n          5,\n          66,\n          -119,\n          22,\n          59\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          536700159,\n          253532396,\n          468991997,\n          584964882,\n          162396161,\n          597516692,\n          341248000,\n          575881263,\n          39\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          50298335,\n          25041377,\n          534523391,\n          917365552,\n          428847705,\n          748233233,\n          961896194,\n          805407828,\n          20\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 8379111172481454749,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          389195638,\n          312558508,\n          73024403,\n          187600000,\n          191590157,\n          246298069,\n          715011560,\n          137214802,\n          266620403,\n          352753290,\n          628494987,\n          574451625,\n          731148569,\n          509361556,\n          77737387,\n          860768503,\n          597147906,\n          2497940,\n          413711482,\n          27677131,\n          28837294,\n          606762722,\n          424970127,\n          378199207,\n          52457400,\n          827644286,\n          79953675,\n          485158991,\n          55700526,\n          881841309,\n          349160806,\n          137759188,\n          69520163,\n          728916894,\n          538604482,\n          254972222,\n          365779964,\n          186739176,\n          628734895,\n          715013232,\n          367189606,\n          291204402,\n          29594\n        ],\n        \"cutValueData\": [\n          66,\n          -100,\n          -36,\n          -104,\n          66,\n          -99,\n          74,\n          -116,\n          66,\n          -113,\n          28,\n          111,\n          66,\n          94,\n          -22,\n          51,\n          66,\n          -77,\n          21,\n          -45,\n          66,\n          -83,\n          118,\n          -13,\n          66,\n          -59,\n          66,\n          -39,\n          66,\n          -115,\n          -5,\n          22,\n          66,\n          -74,\n          -70,\n          -91,\n          66,\n          -126,\n          66,\n          86,\n          66,\n          -128,\n          -113,\n          -60,\n          66,\n          76,\n          -115,\n          17,\n          66,\n          -109,\n          64,\n          -90,\n          66,\n          76,\n          10,\n          69,\n          66,\n          78,\n          23,\n          77,\n          66,\n          84,\n          66,\n          -108,\n          66,\n          -105,\n          86,\n          86,\n          66,\n          76,\n          -109,\n          -58,\n          66,\n          89,\n          57,\n          -43,\n          66,\n          -69,\n          73,\n          -2,\n          66,\n          -114,\n          -102,\n          66,\n          66,\n          -90,\n          -50,\n          41,\n          66,\n          99,\n          -40,\n          -18,\n          66,\n          -128,\n          76,\n          30,\n          66,\n          -87,\n          92,\n          75,\n          66,\n          -71,\n          118,\n          96,\n          66,\n          -79,\n          -70,\n          -29,\n          66,\n          76,\n          77,\n          -1,\n          66,\n          -120,\n          -104,\n          -77,\n          66,\n          -87,\n          -114,\n          120,\n          66,\n          97,\n          -1,\n          -59,\n          66,\n          -85,\n          59,\n          107,\n          66,\n          121,\n          59,\n          -8,\n          66,\n          -125,\n          -4,\n          117,\n          66,\n          -72,\n          -18,\n          72,\n          66,\n          -115,\n          -70,\n          -119,\n          66,\n          -127,\n          19,\n          -73,\n          66,\n          123,\n          -128,\n          124,\n          66,\n          -95,\n          -64,\n          -8,\n          66,\n          121,\n          48,\n          22,\n          66,\n          -108,\n          -11,\n          33,\n          66,\n          -100,\n          97,\n          -90,\n          66,\n          -109,\n          -99,\n          -65,\n          66,\n          113,\n          -27,\n          -54,\n          66,\n          104,\n          106,\n          10,\n          66,\n          -121,\n          92,\n          -128,\n          66,\n          96,\n          46,\n          6,\n          66,\n          -102,\n          35,\n          7,\n          66,\n          -112,\n          -110,\n          33,\n          66,\n          85,\n          -45,\n          -32,\n          66,\n          117,\n          -75,\n          -80,\n          66,\n          -112,\n          -18,\n          82,\n          66,\n          -128,\n          -60,\n          112,\n          66,\n          -106,\n          39,\n          47,\n          66,\n          -119,\n          -79,\n          -91,\n          66,\n          126,\n          -122,\n          106,\n          66,\n          -108,\n          29,\n          22,\n          66,\n          97,\n          6,\n          82,\n          66,\n          -87,\n          100,\n          75,\n          66,\n          97,\n          95,\n          57,\n          66,\n          -62,\n          -102,\n          78,\n          66,\n          -125,\n          -106,\n          -42,\n          66,\n          -124,\n          -41,\n          -10,\n          66,\n          -124,\n          80,\n          51,\n          66,\n          123,\n          74,\n          -28,\n          66,\n          -97,\n          42,\n          40,\n          66,\n          -64,\n          63,\n          0,\n          66,\n          -128,\n          -18,\n          -1,\n          66,\n          -64,\n          -33,\n          -66,\n          66,\n          -86,\n          -19,\n          71,\n          66,\n          -122,\n          -80,\n          -115,\n          66,\n          -127,\n          29,\n          -105,\n          66,\n          -117,\n          -36,\n          76,\n          66,\n          -103,\n          -29,\n          110,\n          66,\n          -102,\n          -96,\n          118,\n          66,\n          78,\n          86,\n          -96,\n          66,\n          -114,\n          65,\n          -16,\n          66,\n          73,\n          14,\n          -108,\n          66,\n          -111,\n          -46,\n          59,\n          66,\n          120,\n          67,\n          -47,\n          66,\n          -122,\n          123,\n          15,\n          66,\n          -112,\n          -67,\n          119,\n          66,\n          -100,\n          -127,\n          82,\n          66,\n          -88,\n          -94,\n          -68,\n          66,\n          111,\n          -127,\n          124,\n          66,\n          -103,\n          119,\n          101,\n          66,\n          -65,\n          -79,\n          -36,\n          66,\n          -118,\n          12,\n          70,\n          66,\n          -71,\n          -112,\n          -57,\n          66,\n          -119,\n          -116,\n          -86,\n          66,\n          -91,\n          -57,\n          -81,\n          66,\n          112,\n          -20,\n          58,\n          66,\n          108,\n          16,\n          -123,\n          66,\n          95,\n          -73,\n          8,\n          66,\n          -114,\n          -86,\n          -27,\n          66,\n          116,\n          -15,\n          -88,\n          66,\n          -93,\n          73,\n          -126,\n          66,\n          91,\n          -103,\n          77,\n          66,\n          -118,\n          -81,\n          -32,\n          66,\n          -118,\n          -9,\n          42,\n          66,\n          117,\n          -32,\n          27,\n          66,\n          120,\n          -123,\n          57,\n          66,\n          107,\n          -1,\n          77,\n          66,\n          -79,\n          -126,\n          -104,\n          66,\n          -101,\n          -85,\n          12,\n          66,\n          -104,\n          -101,\n          -23,\n          66,\n          -67,\n          -121,\n          -54,\n          66,\n          -75,\n          -24,\n          -50,\n          66,\n          86,\n          71,\n          -9,\n          66,\n          -115,\n          110,\n          -79,\n          66,\n          -101,\n          -42,\n          0,\n          66,\n          -94,\n          73,\n          -69,\n          66,\n          -121,\n          9,\n          -2,\n          66,\n          125,\n          8,\n          115,\n          66,\n          112,\n          23,\n          -92,\n          66,\n          -122,\n          65,\n          44,\n          66,\n          -122,\n          -16,\n          102,\n          66,\n          -111,\n          101,\n          -58,\n          66,\n          -89,\n          -62,\n          79,\n          66,\n          127,\n          110,\n          -63,\n          66,\n          -94,\n          39,\n          110,\n          66,\n          -101,\n          -101,\n          41,\n          66,\n          -125,\n          -120,\n          106,\n          66,\n          -112,\n          -23,\n          -29,\n          66,\n          -119,\n          68,\n          -86,\n          66,\n          118,\n          -104,\n          -121,\n          66,\n          118,\n          -53,\n          -34,\n          66,\n          102,\n          -28,\n          -120,\n          66,\n          -108,\n          65,\n          104,\n          66,\n          110,\n          36,\n          -27,\n          66,\n          -87,\n          102,\n          56,\n          66,\n          -92,\n          37,\n          43,\n          66,\n          -74,\n          93,\n          76,\n          66,\n          -100,\n          102,\n          35,\n          66,\n          110,\n          -68,\n          -117,\n          66,\n          -111,\n          -21,\n          -115,\n          66,\n          -111,\n          -119,\n          52,\n          66,\n          -128,\n          -62,\n          6,\n          66,\n          118,\n          116,\n          -121,\n          66,\n          -92,\n          107,\n          27,\n          66,\n          -72,\n          -59,\n          -86,\n          66,\n          -100,\n          -36,\n          127,\n          66,\n          -85,\n          -58,\n          44,\n          66,\n          110,\n          125,\n          -61,\n          66,\n          -83,\n          17,\n          91,\n          66,\n          117,\n          52,\n          -26,\n          66,\n          112,\n          -84,\n          87,\n          66,\n          -116,\n          -124,\n          18,\n          66,\n          117,\n          28,\n          -37,\n          66,\n          111,\n          118,\n          40,\n          66,\n          -110,\n          -120,\n          -120,\n          66,\n          -128,\n          82,\n          107,\n          66,\n          -123,\n          -43,\n          -30,\n          66,\n          -100,\n          22,\n          -72,\n          66,\n          -100,\n          -75,\n          -87,\n          66,\n          -106,\n          119,\n          84,\n          66,\n          -93,\n          -18,\n          114,\n          66,\n          -63,\n          -62,\n          56,\n          66,\n          -119,\n          101,\n          -40,\n          66,\n          -82,\n          -73,\n          113,\n          66,\n          -111,\n          91,\n          104,\n          66,\n          106,\n          -50,\n          56,\n          66,\n          -112,\n          12,\n          122,\n          66,\n          72,\n          78,\n          34,\n          66,\n          -103,\n          107,\n          109,\n          66,\n          102,\n          73,\n          118,\n          66,\n          -117,\n          5,\n          -9,\n          66,\n          -115,\n          93,\n          -72,\n          66,\n          -78,\n          92,\n          -102,\n          66,\n          112,\n          -60,\n          -37,\n          66,\n          -119,\n          -108,\n          85,\n          66,\n          120,\n          -107,\n          33,\n          66,\n          71,\n          125,\n          -23,\n          66,\n          -74,\n          -92,\n          30,\n          66,\n          -79,\n          -4,\n          -89,\n          66,\n          -108,\n          121,\n          81,\n          66,\n          -103,\n          115,\n          104,\n          66,\n          -128,\n          -21,\n          114,\n          66,\n          108,\n          -42,\n          -73,\n          66,\n          -105,\n          29,\n          -58,\n          66,\n          106,\n          -75,\n          -106,\n          66,\n          -106,\n          -24,\n          49,\n          66,\n          114,\n          -122,\n          -15,\n          66,\n          -125,\n          -29,\n          -87,\n          66,\n          -105,\n          93,\n          39,\n          66,\n          -106,\n          -92,\n          78,\n          66,\n          91,\n          100,\n          61,\n          66,\n          -106,\n          -68,\n          -61,\n          66,\n          -118,\n          -14,\n          105,\n          66,\n          -92,\n          -28,\n          -38,\n          66,\n          102,\n          -106,\n          23,\n          66,\n          -110,\n          -35,\n          125,\n          66,\n          -107,\n          104,\n          -18,\n          66,\n          -103,\n          -55,\n          -46,\n          66,\n          -126,\n          118,\n          -20,\n          66,\n          -102,\n          -4,\n          38,\n          66,\n          102,\n          -60,\n          -73,\n          66,\n          -80,\n          -4,\n          -36,\n          66,\n          -95,\n          -37,\n          93,\n          66,\n          -117,\n          21,\n          -7,\n          66,\n          -86,\n          124,\n          102,\n          66,\n          -92,\n          116,\n          -102,\n          66,\n          -122,\n          30,\n          -53,\n          66,\n          -85,\n          102,\n          -100,\n          66,\n          -93,\n          -123,\n          76,\n          66,\n          -120,\n          0,\n          88,\n          66,\n          -121,\n          71,\n          -70,\n          66,\n          -66,\n          -68,\n          -26,\n          66,\n          -128,\n          121,\n          17,\n          66,\n          -123,\n          -42,\n          47,\n          66,\n          -83,\n          -121,\n          60,\n          66,\n          -120,\n          -66,\n          26,\n          66,\n          -95,\n          -51,\n          53,\n          66,\n          -83,\n          -43,\n          102,\n          66,\n          -87,\n          -56,\n          37,\n          66,\n          -84,\n          63,\n          -101,\n          66,\n          -104,\n          42,\n          -25,\n          66,\n          -73,\n          67,\n          52,\n          66,\n          -61,\n          -64,\n          79,\n          66,\n          85,\n          -104,\n          22,\n          66,\n          111,\n          -69,\n          45,\n          66,\n          -114,\n          -82,\n          -108,\n          66,\n          -109,\n          2,\n          -111,\n          66,\n          -108,\n          -52,\n          48,\n          66,\n          -84,\n          121,\n          -99,\n          66,\n          -89,\n          -76,\n          -35,\n          66,\n          123,\n          -76,\n          117,\n          66,\n          -64,\n          8,\n          36,\n          66,\n          -80,\n          -114,\n          -94,\n          66,\n          -125,\n          -29,\n          -64,\n          66,\n          -91,\n          -69,\n          87,\n          66,\n          -89,\n          -115,\n          76,\n          66,\n          -111,\n          26,\n          69,\n          66,\n          -120,\n          110,\n          30,\n          66,\n          -108,\n          115,\n          126,\n          66,\n          -89,\n          -2,\n          87,\n          66,\n          -91,\n          -65,\n          -109,\n          66,\n          -71,\n          -22,\n          69,\n          66,\n          -121,\n          90,\n          -70,\n          66,\n          -107,\n          -82,\n          -22,\n          66,\n          -98,\n          -44,\n          -9,\n          66,\n          -92,\n          -72,\n          -105,\n          66,\n          -117,\n          4,\n          -122,\n          66,\n          102,\n          -114,\n          -78,\n          66,\n          -123,\n          -99,\n          -60,\n          66,\n          115,\n          -93,\n          -68,\n          66,\n          -125,\n          101,\n          -53,\n          66,\n          -116,\n          15,\n          -57,\n          66,\n          -98,\n          11,\n          -88,\n          66,\n          98,\n          -54,\n          -67,\n          66,\n          -85,\n          -127,\n          21,\n          66,\n          -74,\n          79,\n          -37,\n          66,\n          -92,\n          47,\n          -9,\n          66,\n          -124,\n          -96,\n          -60,\n          66,\n          127,\n          97,\n          -13\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          699400191,\n          266607598,\n          524071438,\n          517151816,\n          29220877,\n          271356121,\n          145756769,\n          230801804,\n          65\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          783286207,\n          1072594924,\n          532522526,\n          32905591,\n          27803799,\n          1042288659,\n          568983905,\n          415616650,\n          1543\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5968712885689118287,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          486694461,\n          64721605,\n          185461313,\n          126593884,\n          174283160,\n          400548254,\n          647367783,\n          845295726,\n          419418155,\n          139086039,\n          748996396,\n          724910844,\n          710223360,\n          140803803,\n          488739332,\n          152216485,\n          586379063,\n          803528157,\n          474182790,\n          466008553,\n          815466476,\n          849697412,\n          688632335,\n          479168762,\n          407199663,\n          364230794,\n          772460320,\n          511074611,\n          736421702,\n          391288059,\n          882800883,\n          843102518,\n          34716885,\n          507559525,\n          595511958,\n          752623314,\n          536247220,\n          706345695,\n          118300011,\n          459173939,\n          577881444,\n          740575421,\n          29586\n        ],\n        \"cutValueData\": [\n          66,\n          -98,\n          -25,\n          -117,\n          66,\n          -69,\n          81,\n          120,\n          66,\n          103,\n          9,\n          1,\n          66,\n          79,\n          2,\n          21,\n          66,\n          -100,\n          -86,\n          -13,\n          66,\n          -82,\n          81,\n          -93,\n          66,\n          -86,\n          87,\n          111,\n          66,\n          -84,\n          67,\n          -40,\n          66,\n          -79,\n          53,\n          8,\n          66,\n          -123,\n          122,\n          -24,\n          66,\n          90,\n          -57,\n          97,\n          66,\n          -102,\n          105,\n          22,\n          66,\n          113,\n          112,\n          73,\n          66,\n          -114,\n          79,\n          49,\n          66,\n          126,\n          -84,\n          70,\n          66,\n          -114,\n          -27,\n          114,\n          66,\n          -100,\n          87,\n          37,\n          66,\n          -98,\n          -33,\n          25,\n          66,\n          -115,\n          64,\n          7,\n          66,\n          -123,\n          28,\n          102,\n          66,\n          125,\n          -8,\n          -64,\n          66,\n          75,\n          -80,\n          69,\n          66,\n          116,\n          -90,\n          79,\n          66,\n          80,\n          -91,\n          -51,\n          66,\n          -75,\n          -116,\n          89,\n          66,\n          120,\n          26,\n          67,\n          66,\n          -104,\n          49,\n          72,\n          66,\n          92,\n          49,\n          79,\n          66,\n          -97,\n          -63,\n          3,\n          66,\n          -112,\n          127,\n          25,\n          66,\n          -107,\n          -124,\n          -112,\n          66,\n          -92,\n          -6,\n          -87,\n          66,\n          -106,\n          -30,\n          -8,\n          66,\n          105,\n          109,\n          83,\n          66,\n          -72,\n          -47,\n          19,\n          66,\n          -74,\n          77,\n          34,\n          66,\n          -108,\n          58,\n          35,\n          66,\n          115,\n          43,\n          29,\n          66,\n          -83,\n          87,\n          82,\n          66,\n          -90,\n          115,\n          72,\n          66,\n          -95,\n          -88,\n          44,\n          66,\n          121,\n          127,\n          82,\n          66,\n          -95,\n          -44,\n          -64,\n          66,\n          -125,\n          96,\n          53,\n          66,\n          -77,\n          29,\n          27,\n          66,\n          -113,\n          96,\n          74,\n          66,\n          -98,\n          91,\n          105,\n          66,\n          117,\n          -85,\n          97,\n          66,\n          -112,\n          -19,\n          30,\n          66,\n          -126,\n          -34,\n          -28,\n          66,\n          101,\n          104,\n          -61,\n          66,\n          -126,\n          -61,\n          -82,\n          66,\n          -100,\n          44,\n          61,\n          66,\n          94,\n          98,\n          -61,\n          66,\n          -100,\n          -30,\n          67,\n          66,\n          -102,\n          63,\n          82,\n          66,\n          113,\n          -9,\n          -86,\n          66,\n          -114,\n          -108,\n          1,\n          66,\n          105,\n          46,\n          -46,\n          66,\n          83,\n          -14,\n          2,\n          66,\n          -95,\n          59,\n          35,\n          66,\n          -106,\n          -29,\n          -43,\n          66,\n          111,\n          -50,\n          79,\n          66,\n          93,\n          53,\n          94,\n          66,\n          -110,\n          -89,\n          95,\n          66,\n          -103,\n          -98,\n          117,\n          66,\n          111,\n          74,\n          -110,\n          66,\n          -71,\n          -38,\n          18,\n          66,\n          117,\n          -90,\n          49,\n          66,\n          -73,\n          125,\n          74,\n          66,\n          83,\n          -83,\n          104,\n          66,\n          83,\n          -22,\n          101,\n          66,\n          100,\n          -4,\n          93,\n          66,\n          81,\n          59,\n          28,\n          66,\n          110,\n          63,\n          -13,\n          66,\n          -85,\n          21,\n          83,\n          66,\n          -118,\n          6,\n          -120,\n          66,\n          -100,\n          24,\n          -18,\n          66,\n          -103,\n          65,\n          -19,\n          66,\n          -79,\n          110,\n          -35,\n          66,\n          -109,\n          106,\n          -77,\n          66,\n          -91,\n          0,\n          -119,\n          66,\n          -100,\n          -12,\n          -33,\n          66,\n          -89,\n          -9,\n          -11,\n          66,\n          116,\n          53,\n          54,\n          66,\n          105,\n          -126,\n          -117,\n          66,\n          -116,\n          -58,\n          -45,\n          66,\n          -101,\n          -49,\n          -76,\n          66,\n          -105,\n          79,\n          -13,\n          66,\n          -119,\n          -22,\n          97,\n          66,\n          -78,\n          -62,\n          -67,\n          66,\n          -69,\n          101,\n          24,\n          66,\n          111,\n          -105,\n          36,\n          66,\n          -102,\n          -30,\n          -127,\n          66,\n          93,\n          119,\n          -16,\n          66,\n          -75,\n          -83,\n          15,\n          66,\n          -94,\n          67,\n          -30,\n          66,\n          -86,\n          -79,\n          118,\n          66,\n          -95,\n          49,\n          -18,\n          66,\n          -117,\n          99,\n          11,\n          66,\n          -74,\n          -94,\n          114,\n          66,\n          -62,\n          28,\n          45,\n          66,\n          70,\n          64,\n          -42,\n          66,\n          123,\n          -126,\n          32,\n          66,\n          -119,\n          -99,\n          -60,\n          66,\n          116,\n          9,\n          -100,\n          66,\n          -114,\n          -81,\n          -28,\n          66,\n          -76,\n          82,\n          -13,\n          66,\n          -115,\n          84,\n          -103,\n          66,\n          -102,\n          119,\n          -31,\n          66,\n          -87,\n          -103,\n          -19,\n          66,\n          117,\n          116,\n          -73,\n          66,\n          106,\n          -47,\n          -7,\n          66,\n          -121,\n          -124,\n          26,\n          66,\n          117,\n          104,\n          -4,\n          66,\n          -121,\n          21,\n          6,\n          66,\n          -101,\n          -80,\n          23,\n          66,\n          106,\n          -20,\n          9,\n          66,\n          -64,\n          36,\n          -4,\n          66,\n          -117,\n          -28,\n          69,\n          66,\n          -91,\n          79,\n          32,\n          66,\n          79,\n          13,\n          85,\n          66,\n          -123,\n          41,\n          -22,\n          66,\n          108,\n          40,\n          15,\n          66,\n          98,\n          -33,\n          114,\n          66,\n          -76,\n          -99,\n          -123,\n          66,\n          93,\n          55,\n          71,\n          66,\n          98,\n          125,\n          -64,\n          66,\n          -107,\n          30,\n          61,\n          66,\n          121,\n          -111,\n          -58,\n          66,\n          -88,\n          -5,\n          79,\n          66,\n          -65,\n          -4,\n          40,\n          66,\n          101,\n          -39,\n          127,\n          66,\n          -119,\n          -77,\n          -124,\n          66,\n          -105,\n          -38,\n          106,\n          66,\n          -74,\n          18,\n          -48,\n          66,\n          -103,\n          99,\n          64,\n          66,\n          -62,\n          -71,\n          92,\n          66,\n          -99,\n          121,\n          17,\n          66,\n          68,\n          85,\n          -118,\n          66,\n          -63,\n          -102,\n          -59,\n          66,\n          -90,\n          -57,\n          50,\n          66,\n          -117,\n          110,\n          -54,\n          66,\n          -83,\n          60,\n          107,\n          66,\n          110,\n          -60,\n          1,\n          66,\n          -110,\n          72,\n          -32,\n          66,\n          119,\n          2,\n          -17,\n          66,\n          -89,\n          -73,\n          15,\n          66,\n          -106,\n          -80,\n          -44,\n          66,\n          80,\n          57,\n          68,\n          66,\n          115,\n          0,\n          122,\n          66,\n          -99,\n          19,\n          104,\n          66,\n          -102,\n          -28,\n          27,\n          66,\n          115,\n          2,\n          4,\n          66,\n          123,\n          16,\n          -84,\n          66,\n          -78,\n          38,\n          -32,\n          66,\n          -124,\n          -62,\n          -77,\n          66,\n          -119,\n          -111,\n          -127,\n          66,\n          88,\n          -115,\n          89,\n          66,\n          -82,\n          -78,\n          -2,\n          66,\n          114,\n          120,\n          73,\n          66,\n          -80,\n          -128,\n          -71,\n          66,\n          -127,\n          73,\n          -68,\n          66,\n          -92,\n          113,\n          14,\n          66,\n          -62,\n          -73,\n          98,\n          66,\n          -113,\n          1,\n          38,\n          66,\n          -85,\n          67,\n          108,\n          66,\n          -90,\n          24,\n          -9,\n          66,\n          -118,\n          111,\n          71,\n          66,\n          122,\n          -64,\n          -10,\n          66,\n          -107,\n          5,\n          -76,\n          66,\n          90,\n          -81,\n          77,\n          66,\n          -90,\n          123,\n          119,\n          66,\n          -80,\n          -27,\n          -65,\n          66,\n          -118,\n          4,\n          99,\n          66,\n          -126,\n          -59,\n          -16,\n          66,\n          110,\n          -98,\n          -98,\n          66,\n          -105,\n          -41,\n          42,\n          66,\n          -112,\n          -58,\n          88,\n          66,\n          -90,\n          -71,\n          110,\n          66,\n          123,\n          -111,\n          -112,\n          66,\n          79,\n          -107,\n          -99,\n          66,\n          -102,\n          -36,\n          -43,\n          66,\n          -100,\n          -31,\n          44,\n          66,\n          108,\n          43,\n          -100,\n          66,\n          -94,\n          107,\n          114,\n          66,\n          -128,\n          -52,\n          -76,\n          66,\n          -126,\n          69,\n          122,\n          66,\n          -85,\n          -106,\n          -104,\n          66,\n          125,\n          -43,\n          57,\n          66,\n          -75,\n          120,\n          -102,\n          66,\n          -102,\n          -38,\n          -39,\n          66,\n          86,\n          -12,\n          -36,\n          66,\n          115,\n          59,\n          -114,\n          66,\n          -125,\n          -30,\n          -39,\n          66,\n          -108,\n          32,\n          60,\n          66,\n          -121,\n          -108,\n          117,\n          66,\n          -119,\n          -43,\n          56,\n          66,\n          -122,\n          -19,\n          -44,\n          66,\n          -124,\n          -124,\n          -97,\n          66,\n          -127,\n          -125,\n          69,\n          66,\n          107,\n          -27,\n          -24,\n          66,\n          -104,\n          85,\n          63,\n          66,\n          -121,\n          -13,\n          28,\n          66,\n          -121,\n          -117,\n          88,\n          66,\n          -80,\n          102,\n          2,\n          66,\n          -81,\n          62,\n          35,\n          66,\n          -110,\n          -36,\n          59,\n          66,\n          -123,\n          -125,\n          54,\n          66,\n          69,\n          99,\n          55,\n          66,\n          104,\n          -10,\n          26,\n          66,\n          -105,\n          102,\n          10,\n          66,\n          -101,\n          7,\n          -61,\n          66,\n          -103,\n          67,\n          29,\n          66,\n          -77,\n          -48,\n          69,\n          66,\n          -117,\n          106,\n          -106,\n          66,\n          -112,\n          -99,\n          -83,\n          66,\n          -116,\n          32,\n          -121,\n          66,\n          -107,\n          127,\n          -93,\n          66,\n          -101,\n          116,\n          59,\n          66,\n          -71,\n          -40,\n          53,\n          66,\n          -101,\n          13,\n          44,\n          66,\n          -89,\n          12,\n          -83,\n          66,\n          -92,\n          -97,\n          10,\n          66,\n          108,\n          -61,\n          -69,\n          66,\n          -116,\n          -75,\n          -1,\n          66,\n          -100,\n          2,\n          -103,\n          66,\n          -77,\n          -128,\n          -66,\n          66,\n          -104,\n          51,\n          -2,\n          66,\n          -124,\n          88,\n          21,\n          66,\n          -115,\n          117,\n          12,\n          66,\n          -123,\n          -35,\n          -74,\n          66,\n          112,\n          -121,\n          29,\n          66,\n          104,\n          103,\n          109,\n          66,\n          -91,\n          -81,\n          -86,\n          66,\n          -126,\n          8,\n          -70,\n          66,\n          -117,\n          107,\n          -94,\n          66,\n          79,\n          47,\n          -34,\n          66,\n          -98,\n          -67,\n          -25,\n          66,\n          -93,\n          125,\n          -112,\n          66,\n          -128,\n          17,\n          -120,\n          66,\n          -110,\n          125,\n          107,\n          66,\n          -102,\n          -72,\n          123,\n          66,\n          -101,\n          -108,\n          -15,\n          66,\n          -112,\n          -124,\n          90,\n          66,\n          -91,\n          52,\n          35,\n          66,\n          -85,\n          125,\n          113,\n          66,\n          110,\n          -79,\n          -55,\n          66,\n          -125,\n          -97,\n          3,\n          66,\n          -72,\n          -20,\n          96,\n          66,\n          125,\n          -3,\n          -34,\n          66,\n          -88,\n          8,\n          21,\n          66,\n          -113,\n          -96,\n          -33,\n          66,\n          114,\n          7,\n          -17,\n          66,\n          -96,\n          80,\n          108\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1037786111,\n          940570355,\n          875282198,\n          629598939,\n          753208112,\n          166751039,\n          69043420,\n          857944212,\n          268\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          989558751,\n          1007615901,\n          268566028,\n          363217617,\n          544743806,\n          569421117,\n          36571843,\n          41980064,\n          2520\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6987922936086641544,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          393690042,\n          880352886,\n          359590950,\n          332912875,\n          609455066,\n          359897695,\n          649702387,\n          206670581,\n          1058225711,\n          106532297,\n          391857894,\n          779450049,\n          263108054,\n          317438891,\n          1066597970,\n          582657454,\n          882284227,\n          723598553,\n          523832643,\n          318551635,\n          488105939,\n          220825146,\n          208485375,\n          446417327,\n          536656307,\n          303147217,\n          770792693,\n          874300963,\n          1038980577,\n          208129733,\n          114362154,\n          848337383,\n          188442239,\n          391986131,\n          234810543,\n          1060019914,\n          199746991,\n          313499211,\n          221189595,\n          452579125,\n          395413097,\n          64720111,\n          18\n        ],\n        \"cutValueData\": [\n          66,\n          -127,\n          85,\n          -2,\n          66,\n          -81,\n          42,\n          25,\n          66,\n          73,\n          -117,\n          69,\n          66,\n          80,\n          85,\n          -76,\n          66,\n          -74,\n          -126,\n          118,\n          66,\n          -85,\n          120,\n          -93,\n          66,\n          -114,\n          59,\n          -43,\n          66,\n          -113,\n          121,\n          -45,\n          66,\n          -71,\n          -6,\n          27,\n          66,\n          -109,\n          63,\n          -90,\n          66,\n          -83,\n          62,\n          55,\n          66,\n          -85,\n          3,\n          4,\n          66,\n          -75,\n          100,\n          -24,\n          66,\n          -94,\n          39,\n          48,\n          66,\n          -109,\n          48,\n          -59,\n          66,\n          88,\n          27,\n          -26,\n          66,\n          -106,\n          -92,\n          -35,\n          66,\n          93,\n          -42,\n          26,\n          66,\n          -92,\n          22,\n          69,\n          66,\n          -78,\n          100,\n          60,\n          66,\n          -93,\n          -108,\n          -8,\n          66,\n          -100,\n          43,\n          -51,\n          66,\n          -75,\n          -49,\n          59,\n          66,\n          -110,\n          120,\n          -100,\n          66,\n          -64,\n          -7,\n          11,\n          66,\n          -120,\n          0,\n          -55,\n          66,\n          77,\n          -84,\n          -17,\n          66,\n          -112,\n          -45,\n          41,\n          66,\n          -110,\n          -28,\n          119,\n          66,\n          72,\n          -76,\n          90,\n          66,\n          -117,\n          14,\n          -111,\n          66,\n          -87,\n          92,\n          114,\n          66,\n          -82,\n          126,\n          -50,\n          66,\n          -108,\n          -85,\n          24,\n          66,\n          -86,\n          -25,\n          102,\n          66,\n          98,\n          19,\n          -62,\n          66,\n          107,\n          -1,\n          -4,\n          66,\n          -119,\n          -100,\n          95,\n          66,\n          94,\n          48,\n          119,\n          66,\n          85,\n          29,\n          -52,\n          66,\n          92,\n          -13,\n          -25,\n          66,\n          119,\n          -95,\n          110,\n          66,\n          -75,\n          -60,\n          11,\n          66,\n          92,\n          121,\n          -38,\n          66,\n          83,\n          84,\n          -85,\n          66,\n          -68,\n          -42,\n          36,\n          66,\n          -124,\n          9,\n          -115,\n          66,\n          -100,\n          34,\n          -18,\n          66,\n          -113,\n          -125,\n          72,\n          66,\n          -100,\n          -63,\n          -100,\n          66,\n          124,\n          23,\n          53,\n          66,\n          -96,\n          -61,\n          -120,\n          66,\n          -104,\n          92,\n          5,\n          66,\n          115,\n          33,\n          11,\n          66,\n          -68,\n          63,\n          121,\n          66,\n          -73,\n          2,\n          -88,\n          66,\n          -62,\n          21,\n          86,\n          66,\n          109,\n          -116,\n          116,\n          66,\n          -86,\n          9,\n          112,\n          66,\n          -112,\n          106,\n          -90,\n          66,\n          -118,\n          -27,\n          -104,\n          66,\n          -88,\n          5,\n          106,\n          66,\n          -107,\n          32,\n          -114,\n          66,\n          -72,\n          21,\n          -37,\n          66,\n          -72,\n          -48,\n          123,\n          66,\n          116,\n          -128,\n          32,\n          66,\n          -97,\n          16,\n          58,\n          66,\n          -107,\n          -43,\n          -68,\n          66,\n          -74,\n          -95,\n          -59,\n          66,\n          -107,\n          -11,\n          -69,\n          66,\n          -83,\n          -14,\n          -93,\n          66,\n          95,\n          -2,\n          37,\n          66,\n          95,\n          -82,\n          123,\n          66,\n          -127,\n          22,\n          -68,\n          66,\n          -125,\n          97,\n          35,\n          66,\n          -83,\n          -67,\n          -4,\n          66,\n          -79,\n          -80,\n          83,\n          66,\n          72,\n          -1,\n          61,\n          66,\n          73,\n          52,\n          -52,\n          66,\n          -105,\n          -118,\n          79,\n          66,\n          -114,\n          -72,\n          -29,\n          66,\n          81,\n          -3,\n          58,\n          66,\n          -104,\n          -51,\n          10,\n          66,\n          -74,\n          -99,\n          59,\n          66,\n          -62,\n          -9,\n          -106,\n          66,\n          -70,\n          96,\n          25,\n          66,\n          88,\n          -82,\n          -117,\n          66,\n          -82,\n          123,\n          -53,\n          66,\n          72,\n          33,\n          -73,\n          66,\n          -123,\n          -98,\n          -120,\n          66,\n          -65,\n          64,\n          16,\n          66,\n          -123,\n          60,\n          -8,\n          66,\n          -87,\n          -94,\n          63,\n          66,\n          -123,\n          24,\n          -81,\n          66,\n          -127,\n          -23,\n          101,\n          66,\n          106,\n          -78,\n          -23,\n          66,\n          -73,\n          124,\n          105,\n          66,\n          90,\n          48,\n          15,\n          66,\n          87,\n          -117,\n          -55,\n          66,\n          -101,\n          113,\n          -1,\n          66,\n          -127,\n          -128,\n          -73,\n          66,\n          -126,\n          -36,\n          -4,\n          66,\n          -116,\n          115,\n          -82,\n          66,\n          -128,\n          -110,\n          91,\n          66,\n          -94,\n          -83,\n          46,\n          66,\n          99,\n          -112,\n          83,\n          66,\n          -96,\n          50,\n          18,\n          66,\n          88,\n          54,\n          88,\n          66,\n          -71,\n          42,\n          -103,\n          66,\n          -85,\n          -48,\n          76,\n          66,\n          -67,\n          9,\n          104,\n          66,\n          -62,\n          -6,\n          89,\n          66,\n          -114,\n          121,\n          65,\n          66,\n          -82,\n          -105,\n          37,\n          66,\n          -82,\n          87,\n          92,\n          66,\n          126,\n          -81,\n          95,\n          66,\n          104,\n          -44,\n          -8,\n          66,\n          -69,\n          -110,\n          -97,\n          66,\n          -81,\n          -26,\n          -13,\n          66,\n          -99,\n          -24,\n          115,\n          66,\n          -83,\n          -127,\n          113,\n          66,\n          76,\n          26,\n          39,\n          66,\n          99,\n          -115,\n          -105,\n          66,\n          -94,\n          42,\n          -26,\n          66,\n          -72,\n          -80,\n          -96,\n          66,\n          122,\n          -103,\n          71,\n          66,\n          78,\n          -82,\n          98,\n          66,\n          98,\n          31,\n          -82,\n          66,\n          -124,\n          -28,\n          -126,\n          66,\n          119,\n          -87,\n          114,\n          66,\n          125,\n          -76,\n          -43,\n          66,\n          -122,\n          -96,\n          37,\n          66,\n          -93,\n          -51,\n          64,\n          66,\n          -98,\n          -111,\n          95,\n          66,\n          95,\n          -81,\n          -125,\n          66,\n          -62,\n          -13,\n          -104,\n          66,\n          -110,\n          -23,\n          30,\n          66,\n          -100,\n          25,\n          -2,\n          66,\n          -99,\n          -1,\n          -13,\n          66,\n          -116,\n          61,\n          -49,\n          66,\n          -96,\n          126,\n          35,\n          66,\n          -103,\n          117,\n          124,\n          66,\n          115,\n          -88,\n          -14,\n          66,\n          -107,\n          67,\n          -1,\n          66,\n          -69,\n          42,\n          64,\n          66,\n          -127,\n          -106,\n          83,\n          66,\n          -83,\n          -81,\n          3,\n          66,\n          -93,\n          -105,\n          -27,\n          66,\n          -111,\n          108,\n          126,\n          66,\n          -127,\n          -100,\n          14,\n          66,\n          120,\n          110,\n          33,\n          66,\n          -115,\n          115,\n          -48,\n          66,\n          -83,\n          118,\n          -93,\n          66,\n          -97,\n          40,\n          -58,\n          66,\n          -112,\n          -67,\n          -97,\n          66,\n          -66,\n          5,\n          -23,\n          66,\n          -127,\n          83,\n          30,\n          66,\n          90,\n          121,\n          -24,\n          66,\n          -105,\n          103,\n          55,\n          66,\n          -122,\n          4,\n          -57,\n          66,\n          -117,\n          125,\n          81,\n          66,\n          -103,\n          94,\n          -4,\n          66,\n          -125,\n          -28,\n          114,\n          66,\n          -83,\n          116,\n          -44,\n          66,\n          -108,\n          68,\n          -128,\n          66,\n          -85,\n          -57,\n          -84,\n          66,\n          -112,\n          99,\n          -62,\n          66,\n          -83,\n          -124,\n          -94,\n          66,\n          109,\n          17,\n          -57,\n          66,\n          124,\n          109,\n          -34,\n          66,\n          115,\n          -119,\n          -13,\n          66,\n          -91,\n          12,\n          34,\n          66,\n          -105,\n          -46,\n          92,\n          66,\n          -125,\n          119,\n          -9,\n          66,\n          -104,\n          -29,\n          68,\n          66,\n          -96,\n          103,\n          63,\n          66,\n          86,\n          95,\n          30,\n          66,\n          -67,\n          70,\n          36,\n          66,\n          112,\n          105,\n          14,\n          66,\n          98,\n          -50,\n          -8,\n          66,\n          -118,\n          67,\n          85,\n          66,\n          -102,\n          -126,\n          78,\n          66,\n          -102,\n          58,\n          92,\n          66,\n          106,\n          40,\n          -84,\n          66,\n          -112,\n          -27,\n          119,\n          66,\n          -95,\n          -16,\n          -106,\n          66,\n          105,\n          18,\n          -28,\n          66,\n          -117,\n          -43,\n          -7,\n          66,\n          117,\n          -58,\n          -31,\n          66,\n          -106,\n          -40,\n          -35,\n          66,\n          -117,\n          -69,\n          -84,\n          66,\n          118,\n          -99,\n          109,\n          66,\n          -75,\n          -26,\n          23,\n          66,\n          -120,\n          85,\n          -59,\n          66,\n          -82,\n          24,\n          -107,\n          66,\n          -128,\n          -127,\n          101,\n          66,\n          -112,\n          -109,\n          20,\n          66,\n          95,\n          83,\n          55,\n          66,\n          -105,\n          71,\n          108,\n          66,\n          78,\n          100,\n          30,\n          66,\n          -102,\n          -90,\n          13,\n          66,\n          -103,\n          -106,\n          -22,\n          66,\n          -100,\n          -5,\n          103,\n          66,\n          -115,\n          30,\n          109,\n          66,\n          -95,\n          122,\n          -98,\n          66,\n          100,\n          -15,\n          -33,\n          66,\n          -95,\n          -52,\n          50,\n          66,\n          -80,\n          -46,\n          -34,\n          66,\n          -104,\n          11,\n          64,\n          66,\n          97,\n          -90,\n          -17,\n          66,\n          -86,\n          -90,\n          48,\n          66,\n          -112,\n          -14,\n          27,\n          66,\n          81,\n          64,\n          -99,\n          66,\n          93,\n          26,\n          -55,\n          66,\n          -116,\n          -64,\n          -71,\n          66,\n          112,\n          102,\n          -127,\n          66,\n          90,\n          39,\n          25,\n          66,\n          -94,\n          -66,\n          -110,\n          66,\n          82,\n          14,\n          93,\n          66,\n          -120,\n          -91,\n          60,\n          66,\n          -62,\n          -115,\n          -107,\n          66,\n          -115,\n          -39,\n          61,\n          66,\n          -81,\n          -96,\n          121,\n          66,\n          -72,\n          59,\n          -5,\n          66,\n          -96,\n          119,\n          21,\n          66,\n          -79,\n          95,\n          75,\n          66,\n          -128,\n          85,\n          78,\n          66,\n          -105,\n          -68,\n          -27,\n          66,\n          -70,\n          63,\n          109,\n          66,\n          -87,\n          126,\n          114,\n          66,\n          -94,\n          71,\n          85,\n          66,\n          124,\n          19,\n          88,\n          66,\n          -100,\n          30,\n          122,\n          66,\n          127,\n          11,\n          -60,\n          66,\n          -114,\n          122,\n          17,\n          66,\n          86,\n          57,\n          68,\n          66,\n          118,\n          67,\n          -36,\n          66,\n          -103,\n          -19,\n          2,\n          66,\n          106,\n          -49,\n          15,\n          66,\n          -79,\n          117,\n          -127,\n          66,\n          125,\n          -94,\n          71,\n          66,\n          112,\n          -50,\n          53,\n          66,\n          -93,\n          101,\n          -81,\n          66,\n          -66,\n          5,\n          80,\n          66,\n          -89,\n          -19,\n          -73,\n          66,\n          -109,\n          68,\n          -38,\n          66,\n          -99,\n          -59,\n          -109,\n          66,\n          -122,\n          -94,\n          -52,\n          66,\n          90,\n          -52,\n          -15,\n          66,\n          -110,\n          35,\n          41,\n          66,\n          126,\n          -59,\n          99,\n          66,\n          -85,\n          -116,\n          106,\n          66,\n          -101,\n          44,\n          -66,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1118506157,\n          1156947047,\n          1160482432,\n          1028279276,\n          643515962,\n          629157604,\n          982986506,\n          1114225843,\n          982900966,\n          1114352689,\n          984730679,\n          601050685,\n          710291311,\n          364\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1104275348,\n          1140997225,\n          1155638959,\n          1031465003,\n          1155685120,\n          711039613,\n          1141532270,\n          1114234586,\n          1140739159,\n          1112659240,\n          1026143708,\n          596092738,\n          628960423,\n          391\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 7337502249195313879,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          278250383,\n          164297688,\n          27510971,\n          832928501,\n          367753154,\n          413299321,\n          580523608,\n          124240170,\n          577698165,\n          722004443,\n          540095055,\n          591233598,\n          642793830,\n          459130683,\n          871739875,\n          720548358,\n          181778104,\n          196388197,\n          248232339,\n          405493197,\n          525456224,\n          721989530,\n          147492756,\n          723832976,\n          378567684,\n          689096449,\n          76587691,\n          247564083,\n          593655786,\n          860086032,\n          164090683,\n          348600156,\n          842797911,\n          582550765,\n          706011474,\n          37720289,\n          866771283,\n          373625313,\n          400683123,\n          516801133,\n          719502795,\n          310574043,\n          25157\n        ],\n        \"cutValueData\": [\n          66,\n          -65,\n          -64,\n          23,\n          66,\n          -110,\n          -76,\n          94,\n          66,\n          -64,\n          -52,\n          94,\n          66,\n          87,\n          -46,\n          104,\n          66,\n          -60,\n          35,\n          -122,\n          66,\n          74,\n          -113,\n          -117,\n          66,\n          123,\n          -72,\n          21,\n          66,\n          -127,\n          68,\n          -35,\n          66,\n          108,\n          19,\n          -26,\n          66,\n          -103,\n          -26,\n          74,\n          66,\n          115,\n          -71,\n          19,\n          66,\n          -126,\n          -35,\n          108,\n          66,\n          106,\n          71,\n          59,\n          66,\n          -96,\n          -20,\n          -73,\n          66,\n          -118,\n          23,\n          -71,\n          66,\n          -113,\n          1,\n          76,\n          66,\n          114,\n          34,\n          40,\n          66,\n          108,\n          -109,\n          -40,\n          66,\n          -92,\n          -23,\n          -63,\n          66,\n          -117,\n          -105,\n          -68,\n          66,\n          -63,\n          106,\n          -55,\n          66,\n          -70,\n          -65,\n          -85,\n          66,\n          88,\n          55,\n          -17,\n          66,\n          -113,\n          -14,\n          97,\n          66,\n          -100,\n          -23,\n          -27,\n          66,\n          108,\n          -63,\n          85,\n          66,\n          -88,\n          112,\n          45,\n          66,\n          72,\n          -119,\n          100,\n          66,\n          -114,\n          -43,\n          -115,\n          66,\n          -65,\n          63,\n          111,\n          66,\n          100,\n          79,\n          -48,\n          66,\n          -119,\n          91,\n          117,\n          66,\n          -96,\n          83,\n          55,\n          66,\n          -96,\n          69,\n          -93,\n          66,\n          77,\n          56,\n          -6,\n          66,\n          -82,\n          117,\n          82,\n          66,\n          85,\n          -60,\n          33,\n          66,\n          80,\n          -105,\n          -39,\n          66,\n          96,\n          -1,\n          15,\n          66,\n          89,\n          -127,\n          29,\n          66,\n          -70,\n          64,\n          -6,\n          66,\n          -92,\n          11,\n          18,\n          66,\n          -102,\n          23,\n          92,\n          66,\n          84,\n          -88,\n          74,\n          66,\n          -119,\n          19,\n          -100,\n          66,\n          -94,\n          1,\n          -96,\n          66,\n          -102,\n          -123,\n          18,\n          66,\n          -90,\n          29,\n          19,\n          66,\n          -123,\n          45,\n          -42,\n          66,\n          -103,\n          56,\n          -2,\n          66,\n          88,\n          -84,\n          -88,\n          66,\n          112,\n          89,\n          81,\n          66,\n          -80,\n          -53,\n          -104,\n          66,\n          -104,\n          46,\n          -72,\n          66,\n          -94,\n          52,\n          58,\n          66,\n          -100,\n          104,\n          -86,\n          66,\n          -89,\n          -92,\n          95,\n          66,\n          -77,\n          -50,\n          -117,\n          66,\n          -87,\n          1,\n          -1,\n          66,\n          -106,\n          115,\n          58,\n          66,\n          127,\n          87,\n          -16,\n          66,\n          -128,\n          -22,\n          92,\n          66,\n          -92,\n          49,\n          -67,\n          66,\n          -98,\n          5,\n          27,\n          66,\n          101,\n          -64,\n          -46,\n          66,\n          103,\n          103,\n          -36,\n          66,\n          -102,\n          -112,\n          97,\n          66,\n          84,\n          10,\n          1,\n          66,\n          -94,\n          -80,\n          -70,\n          66,\n          114,\n          7,\n          -105,\n          66,\n          86,\n          -116,\n          -71,\n          66,\n          -122,\n          54,\n          -119,\n          66,\n          -110,\n          57,\n          -118,\n          66,\n          -98,\n          -60,\n          -122,\n          66,\n          -123,\n          14,\n          -14,\n          66,\n          102,\n          30,\n          124,\n          66,\n          -125,\n          -83,\n          125,\n          66,\n          105,\n          79,\n          2,\n          66,\n          -111,\n          74,\n          68,\n          66,\n          127,\n          -35,\n          -102,\n          66,\n          -74,\n          -79,\n          -22,\n          66,\n          -118,\n          38,\n          52,\n          66,\n          127,\n          -63,\n          53,\n          66,\n          -67,\n          13,\n          2,\n          66,\n          -88,\n          -66,\n          41,\n          66,\n          -81,\n          -73,\n          -31,\n          66,\n          -60,\n          117,\n          105,\n          66,\n          -113,\n          -44,\n          47,\n          66,\n          -101,\n          -116,\n          -116,\n          66,\n          -83,\n          -82,\n          35,\n          66,\n          -121,\n          10,\n          -53,\n          66,\n          -121,\n          -22,\n          -111,\n          66,\n          -99,\n          5,\n          96,\n          66,\n          -84,\n          -13,\n          -22,\n          66,\n          -102,\n          27,\n          66,\n          66,\n          -74,\n          77,\n          91,\n          66,\n          -103,\n          -116,\n          -92,\n          66,\n          125,\n          39,\n          -37,\n          66,\n          -86,\n          -74,\n          76,\n          66,\n          -92,\n          73,\n          -99,\n          66,\n          -100,\n          -46,\n          -23,\n          66,\n          -109,\n          -23,\n          -108,\n          66,\n          -95,\n          -124,\n          -43,\n          66,\n          74,\n          91,\n          -23,\n          66,\n          -105,\n          -34,\n          35,\n          66,\n          109,\n          56,\n          -109,\n          66,\n          110,\n          -121,\n          -38,\n          66,\n          -79,\n          91,\n          127,\n          66,\n          -84,\n          125,\n          80,\n          66,\n          -104,\n          -97,\n          -122,\n          66,\n          -109,\n          92,\n          -114,\n          66,\n          115,\n          -107,\n          91,\n          66,\n          -108,\n          -85,\n          69,\n          66,\n          -120,\n          0,\n          -84,\n          66,\n          80,\n          79,\n          58,\n          66,\n          98,\n          -58,\n          -115,\n          66,\n          -122,\n          50,\n          -63,\n          66,\n          -102,\n          27,\n          114,\n          66,\n          -117,\n          -10,\n          41,\n          66,\n          73,\n          37,\n          49,\n          66,\n          107,\n          -82,\n          40,\n          66,\n          -84,\n          -6,\n          74,\n          66,\n          96,\n          38,\n          -122,\n          66,\n          -98,\n          98,\n          41,\n          66,\n          87,\n          -1,\n          -37,\n          66,\n          -127,\n          93,\n          55,\n          66,\n          -103,\n          -49,\n          22,\n          66,\n          111,\n          47,\n          -9,\n          66,\n          117,\n          -84,\n          122,\n          66,\n          121,\n          -90,\n          23,\n          66,\n          -117,\n          -98,\n          57,\n          66,\n          73,\n          -73,\n          27,\n          66,\n          -83,\n          19,\n          64,\n          66,\n          -65,\n          74,\n          -40,\n          66,\n          -115,\n          -101,\n          -70,\n          66,\n          -60,\n          -49,\n          22,\n          66,\n          -103,\n          19,\n          -102,\n          66,\n          126,\n          83,\n          108,\n          66,\n          116,\n          1,\n          -128,\n          66,\n          -84,\n          118,\n          -37,\n          66,\n          -112,\n          52,\n          102,\n          66,\n          -72,\n          17,\n          19,\n          66,\n          -94,\n          -60,\n          -111,\n          66,\n          82,\n          -104,\n          -45,\n          66,\n          88,\n          84,\n          73,\n          66,\n          -103,\n          20,\n          113,\n          66,\n          -115,\n          -102,\n          -26,\n          66,\n          -62,\n          103,\n          69,\n          66,\n          -84,\n          109,\n          2,\n          66,\n          -118,\n          -125,\n          63,\n          66,\n          -103,\n          -84,\n          81,\n          66,\n          -77,\n          82,\n          24,\n          66,\n          -128,\n          2,\n          35,\n          66,\n          -102,\n          -106,\n          -12,\n          66,\n          -114,\n          110,\n          123,\n          66,\n          -90,\n          73,\n          59,\n          66,\n          119,\n          -25,\n          114,\n          66,\n          120,\n          90,\n          -47,\n          66,\n          -116,\n          21,\n          34,\n          66,\n          -92,\n          -108,\n          -91,\n          66,\n          -81,\n          -4,\n          -106,\n          66,\n          -91,\n          -34,\n          35,\n          66,\n          -88,\n          33,\n          -110,\n          66,\n          -72,\n          -45,\n          -21,\n          66,\n          -88,\n          104,\n          124,\n          66,\n          -117,\n          76,\n          -86,\n          66,\n          114,\n          -91,\n          -98,\n          66,\n          -103,\n          -119,\n          -39,\n          66,\n          -108,\n          8,\n          -75,\n          66,\n          -111,\n          121,\n          -53,\n          66,\n          -92,\n          -24,\n          3,\n          66,\n          103,\n          44,\n          -8,\n          66,\n          -93,\n          -8,\n          66,\n          66,\n          -108,\n          -106,\n          -49,\n          66,\n          -126,\n          -113,\n          -89,\n          66,\n          -98,\n          -21,\n          118,\n          66,\n          72,\n          87,\n          -89,\n          66,\n          -75,\n          -70,\n          -49,\n          66,\n          85,\n          -33,\n          77,\n          66,\n          -113,\n          83,\n          34,\n          66,\n          -102,\n          -94,\n          73,\n          66,\n          -98,\n          -30,\n          105,\n          66,\n          -119,\n          -95,\n          -11,\n          66,\n          -100,\n          -114,\n          -97,\n          66,\n          111,\n          -54,\n          64,\n          66,\n          -109,\n          -10,\n          81,\n          66,\n          95,\n          71,\n          78,\n          66,\n          -81,\n          53,\n          -120,\n          66,\n          -85,\n          -14,\n          -49,\n          66,\n          114,\n          15,\n          -107,\n          66,\n          -119,\n          -102,\n          41,\n          66,\n          -121,\n          6,\n          -73,\n          66,\n          -114,\n          73,\n          53,\n          66,\n          -105,\n          -80,\n          102,\n          66,\n          103,\n          28,\n          -5,\n          66,\n          -68,\n          85,\n          81,\n          66,\n          -94,\n          -120,\n          -118,\n          66,\n          -102,\n          -58,\n          -36,\n          66,\n          -68,\n          -107,\n          -121,\n          66,\n          -113,\n          53,\n          -24,\n          66,\n          113,\n          125,\n          72,\n          66,\n          -105,\n          -58,\n          -91,\n          66,\n          -112,\n          -59,\n          35,\n          66,\n          111,\n          2,\n          -125,\n          66,\n          -113,\n          121,\n          -29,\n          66,\n          -99,\n          50,\n          28,\n          66,\n          109,\n          -39,\n          90,\n          66,\n          -78,\n          -108,\n          18,\n          66,\n          124,\n          27,\n          117,\n          66,\n          -113,\n          43,\n          35,\n          66,\n          -75,\n          -40,\n          127,\n          66,\n          -79,\n          -60,\n          -103,\n          66,\n          88,\n          -63,\n          115,\n          66,\n          -92,\n          103,\n          -62,\n          66,\n          -105,\n          -73,\n          -24,\n          66,\n          -114,\n          -85,\n          25,\n          66,\n          -103,\n          20,\n          80,\n          66,\n          109,\n          72,\n          -117,\n          66,\n          -76,\n          -76,\n          -63,\n          66,\n          -65,\n          71,\n          41,\n          66,\n          117,\n          62,\n          -31,\n          66,\n          -88,\n          -32,\n          64,\n          66,\n          -112,\n          -4,\n          -93,\n          66,\n          -125,\n          -32,\n          36,\n          66,\n          113,\n          115,\n          -54,\n          66,\n          110,\n          -125,\n          -86,\n          66,\n          -91,\n          -70,\n          -56,\n          66,\n          -68,\n          43,\n          32,\n          66,\n          -106,\n          -91,\n          -86,\n          66,\n          -85,\n          -73,\n          -57,\n          66,\n          -97,\n          104,\n          14,\n          66,\n          94,\n          3,\n          -36,\n          66,\n          -120,\n          82,\n          -108,\n          66,\n          101,\n          -65,\n          -91,\n          66,\n          -92,\n          -55,\n          115,\n          66,\n          -109,\n          -125,\n          57,\n          66,\n          -95,\n          118,\n          53,\n          66,\n          -76,\n          -44,\n          119,\n          66,\n          -122,\n          -33,\n          91,\n          66,\n          -89,\n          -62,\n          1,\n          66,\n          -108,\n          -35,\n          -92,\n          66,\n          -73,\n          -107,\n          25,\n          66,\n          -86,\n          48,\n          126,\n          66,\n          -96,\n          -46,\n          52,\n          66,\n          -63,\n          52,\n          -32,\n          66,\n          100,\n          23,\n          89,\n          66,\n          113,\n          60,\n          -2,\n          66,\n          -106,\n          -74,\n          52,\n          66,\n          -121,\n          102,\n          -26,\n          66,\n          102,\n          -86,\n          -104,\n          66,\n          -85,\n          61,\n          35,\n          66,\n          116,\n          -120,\n          107,\n          66,\n          -70,\n          3,\n          -48,\n          66,\n          -110,\n          -117,\n          -87,\n          66,\n          -101,\n          23,\n          -41\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          939386367,\n          654835694,\n          91089351,\n          498687794,\n          864854050,\n          729388756,\n          118344839,\n          407388214,\n          0\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1003356139,\n          77053855,\n          7027618,\n          798016566,\n          922894498,\n          124295892,\n          128765699,\n          8439475,\n          4105\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6898202114516460044,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          863063295,\n          589674055,\n          581363007,\n          173357345,\n          191095977,\n          329975383,\n          728939570,\n          212657826,\n          388742617,\n          248883253,\n          909700289,\n          668002619,\n          372165166,\n          757951917,\n          59849802,\n          708519730,\n          438754037,\n          44231546,\n          371001141,\n          1068796494,\n          532002809,\n          82254437,\n          1010488899,\n          894781143,\n          653732805,\n          598273714,\n          68590131,\n          1067171527,\n          396748469,\n          41674871,\n          328690762,\n          91043747,\n          174894957,\n          73348531,\n          115422305,\n          119781307,\n          732867394,\n          237194410,\n          329186301,\n          100118259,\n          506676843,\n          1063453746,\n          75\n        ],\n        \"cutValueData\": [\n          66,\n          -99,\n          99,\n          45,\n          66,\n          -123,\n          70,\n          69,\n          66,\n          -86,\n          107,\n          31,\n          66,\n          72,\n          99,\n          -112,\n          66,\n          -120,\n          92,\n          60,\n          66,\n          -83,\n          -97,\n          -16,\n          66,\n          -102,\n          -114,\n          -5,\n          66,\n          -101,\n          114,\n          122,\n          66,\n          -64,\n          -106,\n          1,\n          66,\n          104,\n          -25,\n          74,\n          66,\n          86,\n          122,\n          105,\n          66,\n          -100,\n          -124,\n          15,\n          66,\n          -79,\n          -123,\n          -53,\n          66,\n          -101,\n          78,\n          -70,\n          66,\n          116,\n          58,\n          79,\n          66,\n          -74,\n          113,\n          120,\n          66,\n          -90,\n          26,\n          25,\n          66,\n          -106,\n          127,\n          -125,\n          66,\n          -86,\n          -32,\n          60,\n          66,\n          95,\n          9,\n          44,\n          66,\n          -63,\n          -57,\n          106,\n          66,\n          -94,\n          -93,\n          -71,\n          66,\n          -126,\n          85,\n          23,\n          66,\n          121,\n          112,\n          -112,\n          66,\n          -111,\n          -31,\n          79,\n          66,\n          110,\n          -41,\n          -33,\n          66,\n          -103,\n          65,\n          127,\n          66,\n          -91,\n          93,\n          101,\n          66,\n          109,\n          -127,\n          51,\n          66,\n          -124,\n          -43,\n          -15,\n          66,\n          -84,\n          -49,\n          -88,\n          66,\n          110,\n          -113,\n          -95,\n          66,\n          -112,\n          -105,\n          76,\n          66,\n          -121,\n          -112,\n          -18,\n          66,\n          -105,\n          -128,\n          -90,\n          66,\n          -107,\n          -92,\n          110,\n          66,\n          -124,\n          -48,\n          -68,\n          66,\n          -91,\n          0,\n          -31,\n          66,\n          80,\n          73,\n          -54,\n          66,\n          -114,\n          96,\n          -7,\n          66,\n          -120,\n          103,\n          85,\n          66,\n          -120,\n          -117,\n          -117,\n          66,\n          -85,\n          -57,\n          93,\n          66,\n          -113,\n          -106,\n          -96,\n          66,\n          -61,\n          65,\n          -15,\n          66,\n          -69,\n          -82,\n          -68,\n          66,\n          -108,\n          -77,\n          38,\n          66,\n          -113,\n          -127,\n          -22,\n          66,\n          108,\n          -67,\n          -105,\n          66,\n          -117,\n          15,\n          -19,\n          66,\n          117,\n          -15,\n          -30,\n          66,\n          -86,\n          61,\n          114,\n          66,\n          -76,\n          -39,\n          -71,\n          66,\n          -98,\n          41,\n          31,\n          66,\n          124,\n          -81,\n          87,\n          66,\n          120,\n          -56,\n          -8,\n          66,\n          -85,\n          7,\n          113,\n          66,\n          -79,\n          102,\n          -82,\n          66,\n          -96,\n          -26,\n          -73,\n          66,\n          -102,\n          69,\n          -118,\n          66,\n          -112,\n          -48,\n          -95,\n          66,\n          -94,\n          17,\n          84,\n          66,\n          -89,\n          -118,\n          -42,\n          66,\n          -107,\n          100,\n          97,\n          66,\n          -113,\n          -2,\n          62,\n          66,\n          -124,\n          -13,\n          -109,\n          66,\n          120,\n          -22,\n          114,\n          66,\n          -117,\n          56,\n          -72,\n          66,\n          79,\n          7,\n          -106,\n          66,\n          -79,\n          4,\n          -71,\n          66,\n          -95,\n          -102,\n          18,\n          66,\n          -94,\n          58,\n          -47,\n          66,\n          -124,\n          83,\n          22,\n          66,\n          -106,\n          114,\n          -48,\n          66,\n          -99,\n          37,\n          22,\n          66,\n          -69,\n          127,\n          -85,\n          66,\n          -103,\n          -89,\n          -105,\n          66,\n          98,\n          77,\n          -56,\n          66,\n          -114,\n          85,\n          36,\n          66,\n          -80,\n          62,\n          14,\n          66,\n          -102,\n          39,\n          -18,\n          66,\n          -121,\n          76,\n          -34,\n          66,\n          -128,\n          -18,\n          85,\n          66,\n          -97,\n          69,\n          28,\n          66,\n          -125,\n          40,\n          -47,\n          66,\n          -122,\n          10,\n          -38,\n          66,\n          -124,\n          112,\n          -119,\n          66,\n          -99,\n          53,\n          3,\n          66,\n          -122,\n          75,\n          99,\n          66,\n          82,\n          126,\n          88,\n          66,\n          -96,\n          32,\n          65,\n          66,\n          -67,\n          -54,\n          -55,\n          66,\n          -101,\n          52,\n          -78,\n          66,\n          -97,\n          1,\n          -77,\n          66,\n          94,\n          54,\n          29,\n          66,\n          -105,\n          -36,\n          -34,\n          66,\n          -109,\n          111,\n          110,\n          66,\n          97,\n          3,\n          -17,\n          66,\n          -109,\n          -68,\n          -100,\n          66,\n          -108,\n          -93,\n          -80,\n          66,\n          -116,\n          22,\n          58,\n          66,\n          -67,\n          70,\n          -112,\n          66,\n          -124,\n          105,\n          60,\n          66,\n          -106,\n          99,\n          90,\n          66,\n          -119,\n          -10,\n          -63,\n          66,\n          73,\n          91,\n          -114,\n          66,\n          85,\n          -67,\n          -116,\n          66,\n          -85,\n          118,\n          -23,\n          66,\n          -109,\n          114,\n          108,\n          66,\n          -91,\n          -66,\n          99,\n          66,\n          -112,\n          53,\n          -87,\n          66,\n          -93,\n          -100,\n          31,\n          66,\n          -122,\n          -31,\n          122,\n          66,\n          113,\n          84,\n          -117,\n          66,\n          -99,\n          -24,\n          41,\n          66,\n          -72,\n          84,\n          105,\n          66,\n          -96,\n          -128,\n          -28,\n          66,\n          -122,\n          81,\n          -69,\n          66,\n          -109,\n          58,\n          -14,\n          66,\n          -110,\n          -15,\n          -48,\n          66,\n          96,\n          -125,\n          40,\n          66,\n          -112,\n          73,\n          -72,\n          66,\n          -105,\n          97,\n          119,\n          66,\n          -123,\n          34,\n          48,\n          66,\n          -105,\n          32,\n          -118,\n          66,\n          121,\n          -16,\n          -73,\n          66,\n          -98,\n          21,\n          -22,\n          66,\n          116,\n          -96,\n          33,\n          66,\n          -102,\n          -118,\n          -4,\n          66,\n          -127,\n          -79,\n          48,\n          66,\n          122,\n          52,\n          -41,\n          66,\n          -83,\n          59,\n          122,\n          66,\n          109,\n          57,\n          -96,\n          66,\n          -104,\n          -31,\n          24,\n          66,\n          -87,\n          107,\n          -48,\n          66,\n          -76,\n          38,\n          16,\n          66,\n          -119,\n          32,\n          94,\n          66,\n          -73,\n          -121,\n          66,\n          66,\n          125,\n          -46,\n          -62,\n          66,\n          -103,\n          -6,\n          -127,\n          66,\n          -69,\n          -33,\n          30,\n          66,\n          -112,\n          47,\n          69,\n          66,\n          -118,\n          -37,\n          12,\n          66,\n          94,\n          92,\n          -15,\n          66,\n          115,\n          59,\n          -126,\n          66,\n          -127,\n          87,\n          47,\n          66,\n          105,\n          50,\n          -50,\n          66,\n          -114,\n          51,\n          30,\n          66,\n          -100,\n          -46,\n          -87,\n          66,\n          -118,\n          -66,\n          121,\n          66,\n          110,\n          93,\n          27,\n          66,\n          -74,\n          -43,\n          52,\n          66,\n          -97,\n          125,\n          -67,\n          66,\n          -110,\n          87,\n          -75,\n          66,\n          103,\n          51,\n          57,\n          66,\n          -77,\n          84,\n          45,\n          66,\n          -110,\n          -106,\n          80,\n          66,\n          -106,\n          84,\n          30,\n          66,\n          -84,\n          -15,\n          26,\n          66,\n          -126,\n          105,\n          90,\n          66,\n          -89,\n          6,\n          -119,\n          66,\n          -63,\n          -128,\n          -95,\n          66,\n          -62,\n          -83,\n          115,\n          66,\n          -102,\n          -118,\n          23,\n          66,\n          -121,\n          -116,\n          29,\n          66,\n          127,\n          -32,\n          -112,\n          66,\n          115,\n          -92,\n          -115,\n          66,\n          -62,\n          65,\n          -83,\n          66,\n          -98,\n          -117,\n          30,\n          66,\n          -107,\n          -39,\n          78,\n          66,\n          107,\n          12,\n          64,\n          66,\n          -71,\n          44,\n          119,\n          66,\n          -93,\n          -122,\n          -123,\n          66,\n          -64,\n          68,\n          78,\n          66,\n          87,\n          23,\n          80,\n          66,\n          -115,\n          52,\n          24,\n          66,\n          93,\n          97,\n          -6,\n          66,\n          -117,\n          98,\n          109,\n          66,\n          121,\n          58,\n          88,\n          66,\n          -65,\n          -111,\n          110,\n          66,\n          -95,\n          -57,\n          20,\n          66,\n          -90,\n          119,\n          -30,\n          66,\n          76,\n          46,\n          64,\n          66,\n          -92,\n          11,\n          107,\n          66,\n          -69,\n          87,\n          -10,\n          66,\n          -72,\n          122,\n          -112,\n          66,\n          97,\n          40,\n          8,\n          66,\n          78,\n          -47,\n          -55,\n          66,\n          100,\n          -87,\n          13,\n          66,\n          -113,\n          69,\n          -109,\n          66,\n          -74,\n          -95,\n          21,\n          66,\n          120,\n          76,\n          60,\n          66,\n          -100,\n          -85,\n          18,\n          66,\n          -117,\n          67,\n          56,\n          66,\n          -113,\n          -51,\n          88,\n          66,\n          -76,\n          -76,\n          -42,\n          66,\n          -72,\n          16,\n          -51,\n          66,\n          -89,\n          -22,\n          -74,\n          66,\n          -74,\n          -45,\n          -11,\n          66,\n          -99,\n          79,\n          115,\n          66,\n          -112,\n          118,\n          0,\n          66,\n          -97,\n          100,\n          7,\n          66,\n          112,\n          -39,\n          -107,\n          66,\n          -125,\n          -10,\n          92,\n          66,\n          -70,\n          -32,\n          -37,\n          66,\n          127,\n          93,\n          -10,\n          66,\n          109,\n          -38,\n          -110,\n          66,\n          -122,\n          -3,\n          -18,\n          66,\n          104,\n          50,\n          -22,\n          66,\n          -61,\n          -9,\n          118,\n          66,\n          -114,\n          -9,\n          32,\n          66,\n          -125,\n          -77,\n          48,\n          66,\n          123,\n          -26,\n          47,\n          66,\n          -101,\n          79,\n          -59,\n          66,\n          -73,\n          -84,\n          5,\n          66,\n          87,\n          -100,\n          -28,\n          66,\n          125,\n          116,\n          4,\n          66,\n          -103,\n          -5,\n          122,\n          66,\n          -87,\n          42,\n          74,\n          66,\n          93,\n          -43,\n          -73,\n          66,\n          92,\n          -29,\n          -101,\n          66,\n          97,\n          -20,\n          -100,\n          66,\n          109,\n          -35,\n          -102,\n          66,\n          -102,\n          -56,\n          -27,\n          66,\n          115,\n          -82,\n          119,\n          66,\n          85,\n          117,\n          62,\n          66,\n          -120,\n          -2,\n          36,\n          66,\n          86,\n          119,\n          12,\n          66,\n          -128,\n          67,\n          -66,\n          66,\n          -112,\n          -56,\n          19,\n          66,\n          -97,\n          39,\n          46,\n          66,\n          -116,\n          11,\n          101,\n          66,\n          -102,\n          106,\n          64,\n          66,\n          -102,\n          41,\n          -11,\n          66,\n          -88,\n          15,\n          52,\n          66,\n          -90,\n          -20,\n          -57,\n          66,\n          -125,\n          -67,\n          -48,\n          66,\n          111,\n          -19,\n          97,\n          66,\n          102,\n          -91,\n          53,\n          66,\n          -93,\n          21,\n          -24,\n          66,\n          123,\n          -37,\n          -58,\n          66,\n          -77,\n          121,\n          -108,\n          66,\n          -104,\n          -109,\n          84,\n          66,\n          -113,\n          -120,\n          91,\n          66,\n          -96,\n          -61,\n          122,\n          66,\n          -85,\n          -5,\n          105,\n          66,\n          100,\n          -104,\n          5,\n          66,\n          85,\n          84,\n          -89,\n          66,\n          -80,\n          -125,\n          33,\n          66,\n          92,\n          55,\n          -120,\n          66,\n          -69,\n          -108,\n          -42,\n          66,\n          80,\n          17,\n          114,\n          66,\n          -95,\n          40,\n          -89,\n          66,\n          -72,\n          -21,\n          54,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1114429589,\n          1147905998,\n          1104097922,\n          643900796,\n          772984187,\n          982901212,\n          973951262,\n          1156682156,\n          597843904,\n          581218456,\n          600350090,\n          581159164,\n          596033707,\n          1093\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1033119116,\n          1147674176,\n          1147826942,\n          629748716,\n          755505682,\n          1112040646,\n          1012208236,\n          1028073697,\n          588217679,\n          624204673,\n          1026507451,\n          753575767,\n          595718105,\n          1175\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -9202437649290432236,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          535719095,\n          867105787,\n          151614999,\n          707219049,\n          84183276,\n          234971147,\n          149604553,\n          642161656,\n          696657209,\n          852529586,\n          886947438,\n          656297559,\n          492732625,\n          62060633,\n          614388041,\n          56075691,\n          740370285,\n          885229957,\n          706316741,\n          760457038,\n          384840541,\n          849947140,\n          299208006,\n          537546950,\n          355483385,\n          830564046,\n          754257754,\n          133924414,\n          160215686,\n          253534916,\n          486409978,\n          277181321,\n          887324940,\n          396786524,\n          133542942,\n          296075823,\n          377299001,\n          77452291,\n          642617502,\n          704455112,\n          767255632,\n          354700450,\n          13175\n        ],\n        \"cutValueData\": [\n          66,\n          70,\n          121,\n          -124,\n          66,\n          -98,\n          70,\n          107,\n          66,\n          -88,\n          117,\n          -54,\n          66,\n          -120,\n          -83,\n          12,\n          66,\n          -124,\n          -32,\n          87,\n          66,\n          -74,\n          -30,\n          -51,\n          66,\n          -78,\n          -9,\n          -19,\n          66,\n          -119,\n          47,\n          85,\n          66,\n          -114,\n          11,\n          24,\n          66,\n          -95,\n          -42,\n          30,\n          66,\n          -120,\n          -38,\n          -8,\n          66,\n          106,\n          -54,\n          -102,\n          66,\n          -107,\n          31,\n          -38,\n          66,\n          -115,\n          63,\n          -107,\n          66,\n          113,\n          7,\n          -63,\n          66,\n          -71,\n          -89,\n          -99,\n          66,\n          101,\n          -119,\n          48,\n          66,\n          -71,\n          -6,\n          -118,\n          66,\n          -119,\n          -83,\n          -42,\n          66,\n          -87,\n          -29,\n          107,\n          66,\n          -98,\n          -27,\n          0,\n          66,\n          91,\n          41,\n          -22,\n          66,\n          -93,\n          -4,\n          -99,\n          66,\n          -124,\n          70,\n          59,\n          66,\n          69,\n          95,\n          112,\n          66,\n          -114,\n          15,\n          -18,\n          66,\n          94,\n          -62,\n          -56,\n          66,\n          -113,\n          76,\n          -93,\n          66,\n          -124,\n          -95,\n          -114,\n          66,\n          127,\n          32,\n          4,\n          66,\n          -79,\n          -31,\n          11,\n          66,\n          -90,\n          -64,\n          78,\n          66,\n          -81,\n          17,\n          35,\n          66,\n          -71,\n          25,\n          15,\n          66,\n          -118,\n          -63,\n          -10,\n          66,\n          -105,\n          -100,\n          -96,\n          66,\n          -71,\n          124,\n          -62,\n          66,\n          86,\n          -111,\n          -3,\n          66,\n          -98,\n          -47,\n          -91,\n          66,\n          -89,\n          13,\n          8,\n          66,\n          -109,\n          121,\n          -28,\n          66,\n          109,\n          -105,\n          114,\n          66,\n          -67,\n          107,\n          93,\n          66,\n          -122,\n          13,\n          -39,\n          66,\n          -93,\n          -69,\n          27,\n          66,\n          -75,\n          17,\n          124,\n          66,\n          -93,\n          18,\n          58,\n          66,\n          101,\n          72,\n          -72,\n          66,\n          -71,\n          75,\n          53,\n          66,\n          124,\n          66,\n          77,\n          66,\n          -116,\n          -3,\n          114,\n          66,\n          -111,\n          -16,\n          -122,\n          66,\n          -125,\n          4,\n          -15,\n          66,\n          -92,\n          -83,\n          15,\n          66,\n          -69,\n          -95,\n          -110,\n          66,\n          -99,\n          -113,\n          -42,\n          66,\n          93,\n          1,\n          -125,\n          66,\n          118,\n          -59,\n          93,\n          66,\n          -102,\n          32,\n          -4,\n          66,\n          127,\n          -49,\n          -123,\n          66,\n          -81,\n          92,\n          -80,\n          66,\n          -74,\n          20,\n          12,\n          66,\n          -116,\n          -88,\n          -23,\n          66,\n          -93,\n          -61,\n          -92,\n          66,\n          -93,\n          -87,\n          -1,\n          66,\n          -91,\n          13,\n          -97,\n          66,\n          -85,\n          -51,\n          125,\n          66,\n          -72,\n          -4,\n          102,\n          66,\n          -90,\n          -128,\n          121,\n          66,\n          -128,\n          79,\n          -73,\n          66,\n          -126,\n          21,\n          109,\n          66,\n          97,\n          -45,\n          85,\n          66,\n          124,\n          -25,\n          -72,\n          66,\n          -127,\n          -23,\n          -118,\n          66,\n          72,\n          -87,\n          94,\n          66,\n          119,\n          53,\n          -73,\n          66,\n          -115,\n          -41,\n          -28,\n          66,\n          -87,\n          -76,\n          -93,\n          66,\n          76,\n          -50,\n          -86,\n          66,\n          87,\n          -81,\n          77,\n          66,\n          -94,\n          94,\n          105,\n          66,\n          -100,\n          -43,\n          61,\n          66,\n          -121,\n          -22,\n          -21,\n          66,\n          123,\n          -46,\n          -111,\n          66,\n          101,\n          -93,\n          -85,\n          66,\n          127,\n          -99,\n          -48,\n          66,\n          -128,\n          22,\n          101,\n          66,\n          113,\n          92,\n          -91,\n          66,\n          -126,\n          115,\n          58,\n          66,\n          -126,\n          24,\n          -82,\n          66,\n          -70,\n          -127,\n          -126,\n          66,\n          -72,\n          -59,\n          -96,\n          66,\n          -81,\n          29,\n          77,\n          66,\n          -67,\n          -128,\n          50,\n          66,\n          -123,\n          -99,\n          49,\n          66,\n          -96,\n          98,\n          115,\n          66,\n          94,\n          119,\n          117,\n          66,\n          -98,\n          40,\n          22,\n          66,\n          -99,\n          3,\n          -30,\n          66,\n          100,\n          13,\n          -20,\n          66,\n          -79,\n          -123,\n          -26,\n          66,\n          -119,\n          52,\n          112,\n          66,\n          -97,\n          -1,\n          -122,\n          66,\n          -93,\n          115,\n          25,\n          66,\n          102,\n          -88,\n          -68,\n          66,\n          81,\n          -65,\n          -10,\n          66,\n          -113,\n          73,\n          93,\n          66,\n          -125,\n          -29,\n          35,\n          66,\n          -74,\n          -12,\n          101,\n          66,\n          -123,\n          -9,\n          96,\n          66,\n          -123,\n          -1,\n          90,\n          66,\n          -84,\n          -116,\n          67,\n          66,\n          -91,\n          28,\n          -41,\n          66,\n          -77,\n          58,\n          -55,\n          66,\n          -116,\n          56,\n          -101,\n          66,\n          119,\n          -80,\n          -124,\n          66,\n          118,\n          15,\n          -43,\n          66,\n          -74,\n          -65,\n          45,\n          66,\n          -119,\n          -85,\n          13,\n          66,\n          -113,\n          30,\n          22,\n          66,\n          82,\n          -5,\n          110,\n          66,\n          -87,\n          -28,\n          0,\n          66,\n          -93,\n          119,\n          3,\n          66,\n          -86,\n          98,\n          127,\n          66,\n          -80,\n          35,\n          110,\n          66,\n          -128,\n          62,\n          111,\n          66,\n          -86,\n          47,\n          -102,\n          66,\n          -116,\n          -67,\n          49,\n          66,\n          -101,\n          -72,\n          97,\n          66,\n          -113,\n          29,\n          -103,\n          66,\n          83,\n          75,\n          -122,\n          66,\n          -115,\n          -67,\n          -102,\n          66,\n          -69,\n          -2,\n          124,\n          66,\n          100,\n          -40,\n          -98,\n          66,\n          116,\n          -13,\n          -113,\n          66,\n          -115,\n          -106,\n          86,\n          66,\n          -108,\n          23,\n          -14,\n          66,\n          -121,\n          -82,\n          -30,\n          66,\n          -127,\n          -64,\n          -2,\n          66,\n          -78,\n          8,\n          54,\n          66,\n          -106,\n          -65,\n          36,\n          66,\n          -72,\n          -101,\n          48,\n          66,\n          -113,\n          18,\n          -60,\n          66,\n          -116,\n          35,\n          -47,\n          66,\n          -106,\n          -98,\n          -92,\n          66,\n          99,\n          -57,\n          94,\n          66,\n          120,\n          -71,\n          -43,\n          66,\n          -84,\n          47,\n          -53,\n          66,\n          -98,\n          117,\n          25,\n          66,\n          114,\n          -7,\n          66,\n          66,\n          87,\n          92,\n          -8,\n          66,\n          -110,\n          110,\n          107,\n          66,\n          -82,\n          70,\n          -68,\n          66,\n          -75,\n          14,\n          77,\n          66,\n          -95,\n          14,\n          7,\n          66,\n          108,\n          66,\n          -40,\n          66,\n          -87,\n          120,\n          100,\n          66,\n          82,\n          91,\n          95,\n          66,\n          -121,\n          -101,\n          62,\n          66,\n          -109,\n          -65,\n          -127,\n          66,\n          -76,\n          -128,\n          122,\n          66,\n          -122,\n          42,\n          107,\n          66,\n          81,\n          -125,\n          -25,\n          66,\n          -65,\n          24,\n          114,\n          66,\n          94,\n          88,\n          85,\n          66,\n          -120,\n          77,\n          -118,\n          66,\n          -111,\n          -62,\n          -5,\n          66,\n          -79,\n          -98,\n          -98,\n          66,\n          -107,\n          11,\n          -47,\n          66,\n          126,\n          -4,\n          -94,\n          66,\n          110,\n          -46,\n          -80,\n          66,\n          104,\n          34,\n          -89,\n          66,\n          117,\n          -78,\n          45,\n          66,\n          -63,\n          -15,\n          -94,\n          66,\n          93,\n          -103,\n          35,\n          66,\n          -92,\n          98,\n          45,\n          66,\n          -83,\n          88,\n          52,\n          66,\n          -105,\n          -9,\n          -127,\n          66,\n          -121,\n          -111,\n          51,\n          66,\n          -104,\n          65,\n          80,\n          66,\n          112,\n          100,\n          5,\n          66,\n          102,\n          100,\n          107,\n          66,\n          -107,\n          -45,\n          112,\n          66,\n          -123,\n          -48,\n          -73,\n          66,\n          -102,\n          16,\n          -115,\n          66,\n          -106,\n          54,\n          84,\n          66,\n          -72,\n          0,\n          16,\n          66,\n          -101,\n          35,\n          -117,\n          66,\n          125,\n          59,\n          -103,\n          66,\n          -65,\n          78,\n          -91,\n          66,\n          -63,\n          117,\n          123,\n          66,\n          -68,\n          -67,\n          -15,\n          66,\n          -113,\n          121,\n          -69,\n          66,\n          -70,\n          106,\n          -74,\n          66,\n          86,\n          89,\n          21,\n          66,\n          -102,\n          -64,\n          -45,\n          66,\n          -114,\n          -47,\n          102,\n          66,\n          126,\n          48,\n          -118,\n          66,\n          89,\n          -3,\n          -17,\n          66,\n          -89,\n          18,\n          -75,\n          66,\n          -103,\n          2,\n          65,\n          66,\n          -114,\n          -76,\n          -114,\n          66,\n          -92,\n          112,\n          -51,\n          66,\n          122,\n          25,\n          124,\n          66,\n          -94,\n          79,\n          91,\n          66,\n          -102,\n          125,\n          100,\n          66,\n          -64,\n          82,\n          98,\n          66,\n          -120,\n          50,\n          -43,\n          66,\n          123,\n          -121,\n          122,\n          66,\n          -113,\n          58,\n          -91,\n          66,\n          92,\n          57,\n          29,\n          66,\n          -100,\n          109,\n          120,\n          66,\n          120,\n          -127,\n          -13,\n          66,\n          -126,\n          35,\n          94,\n          66,\n          -89,\n          13,\n          85,\n          66,\n          -95,\n          47,\n          -96,\n          66,\n          -114,\n          89,\n          -12,\n          66,\n          76,\n          73,\n          28,\n          66,\n          100,\n          -118,\n          -119,\n          66,\n          -106,\n          -50,\n          112,\n          66,\n          -87,\n          -35,\n          -6,\n          66,\n          71,\n          35,\n          -125,\n          66,\n          -81,\n          -107,\n          -51,\n          66,\n          -123,\n          110,\n          -113,\n          66,\n          -115,\n          -12,\n          -18,\n          66,\n          -82,\n          -38,\n          -68,\n          66,\n          -109,\n          35,\n          -83,\n          66,\n          -106,\n          -70,\n          66,\n          66,\n          -122,\n          34,\n          105,\n          66,\n          -118,\n          -86,\n          -106,\n          66,\n          -126,\n          -124,\n          123,\n          66,\n          123,\n          63,\n          -39,\n          66,\n          -62,\n          -91,\n          -8,\n          66,\n          80,\n          -40,\n          -26,\n          66,\n          80,\n          -39,\n          78,\n          66,\n          -109,\n          121,\n          77,\n          66,\n          118,\n          123,\n          -20,\n          66,\n          120,\n          94,\n          -78,\n          66,\n          120,\n          75,\n          2,\n          66,\n          -86,\n          96,\n          -9,\n          66,\n          96,\n          -83,\n          16,\n          66,\n          -109,\n          -122,\n          37,\n          66,\n          72,\n          -112,\n          -110,\n          66,\n          111,\n          -51,\n          -39,\n          66,\n          -124,\n          -77,\n          -37,\n          66,\n          -81,\n          -27,\n          -29,\n          66,\n          -128,\n          32,\n          -68,\n          66,\n          -89,\n          84,\n          78,\n          66,\n          -90,\n          62,\n          48,\n          66,\n          -96,\n          25,\n          106,\n          66,\n          -122,\n          -67,\n          -26,\n          66,\n          -117,\n          -53,\n          -113,\n          66,\n          -84,\n          42,\n          -103,\n          66,\n          -112,\n          -75,\n          -97,\n          66,\n          112,\n          11,\n          22\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1044373375,\n          872148407,\n          22248433,\n          387200477,\n          762354131,\n          402689047,\n          6856100,\n          311476328,\n          5154\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          1044381565,\n          309447863,\n          326926004,\n          928185553,\n          772906643,\n          24468693,\n          553929732,\n          186810656,\n          112\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3274031270900754220,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          578209893,\n          257924091,\n          585725913,\n          352298682,\n          1059957473,\n          1068420343,\n          227716159,\n          749119150,\n          514098745,\n          715069249,\n          87672902,\n          330159845,\n          871206710,\n          132696553,\n          975129162,\n          254793854,\n          128544595,\n          909768015,\n          52383069,\n          774183261,\n          185947346,\n          257414195,\n          603028837,\n          1016255798,\n          911256998,\n          77956717,\n          86996186,\n          580475745,\n          336965233,\n          866872495,\n          731348601,\n          259210925,\n          463572070,\n          858477874,\n          78428495,\n          44030675,\n          733064953,\n          719022758,\n          712485181,\n          254453550,\n          846256505,\n          917936994,\n          581\n        ],\n        \"cutValueData\": [\n          66,\n          -69,\n          17,\n          18,\n          66,\n          -64,\n          -11,\n          112,\n          66,\n          -70,\n          -40,\n          -77,\n          66,\n          -68,\n          -23,\n          -115,\n          66,\n          -73,\n          22,\n          -21,\n          66,\n          -122,\n          -98,\n          -89,\n          66,\n          -114,\n          11,\n          -41,\n          66,\n          -110,\n          -68,\n          -31,\n          66,\n          101,\n          0,\n          84,\n          66,\n          -77,\n          34,\n          -118,\n          66,\n          114,\n          119,\n          -127,\n          66,\n          -65,\n          -50,\n          117,\n          66,\n          -88,\n          -39,\n          -128,\n          66,\n          -88,\n          97,\n          21,\n          66,\n          -122,\n          64,\n          105,\n          66,\n          100,\n          -24,\n          43,\n          66,\n          111,\n          -109,\n          -29,\n          66,\n          -85,\n          106,\n          118,\n          66,\n          -97,\n          19,\n          82,\n          66,\n          -75,\n          96,\n          -89,\n          66,\n          126,\n          92,\n          103,\n          66,\n          69,\n          112,\n          -21,\n          66,\n          127,\n          -7,\n          95,\n          66,\n          -85,\n          -101,\n          -22,\n          66,\n          -85,\n          92,\n          -101,\n          66,\n          -79,\n          73,\n          -122,\n          66,\n          -93,\n          -120,\n          76,\n          66,\n          -98,\n          1,\n          -107,\n          66,\n          -123,\n          102,\n          54,\n          66,\n          -122,\n          -43,\n          -19,\n          66,\n          100,\n          -117,\n          55,\n          66,\n          109,\n          -37,\n          -28,\n          66,\n          126,\n          8,\n          -127,\n          66,\n          -96,\n          109,\n          -92,\n          66,\n          -103,\n          37,\n          -128,\n          66,\n          -125,\n          -13,\n          103,\n          66,\n          -112,\n          -75,\n          -70,\n          66,\n          108,\n          72,\n          68,\n          66,\n          -72,\n          9,\n          41,\n          66,\n          -122,\n          47,\n          41,\n          66,\n          -69,\n          111,\n          19,\n          66,\n          -89,\n          -1,\n          -88,\n          66,\n          -124,\n          -107,\n          -95,\n          66,\n          -119,\n          56,\n          27,\n          66,\n          -91,\n          -77,\n          119,\n          66,\n          -107,\n          75,\n          -11,\n          66,\n          111,\n          82,\n          93,\n          66,\n          -100,\n          69,\n          -1,\n          66,\n          72,\n          -53,\n          -8,\n          66,\n          -85,\n          113,\n          0,\n          66,\n          -94,\n          -71,\n          21,\n          66,\n          -71,\n          79,\n          30,\n          66,\n          -102,\n          -108,\n          -100,\n          66,\n          -95,\n          48,\n          -69,\n          66,\n          -93,\n          -31,\n          106,\n          66,\n          -69,\n          16,\n          -83,\n          66,\n          -96,\n          -89,\n          -64,\n          66,\n          -91,\n          30,\n          -25,\n          66,\n          -72,\n          -83,\n          -60,\n          66,\n          114,\n          67,\n          -43,\n          66,\n          -79,\n          -81,\n          121,\n          66,\n          123,\n          112,\n          80,\n          66,\n          -95,\n          -112,\n          66,\n          66,\n          73,\n          -18,\n          21,\n          66,\n          -112,\n          10,\n          -128,\n          66,\n          -65,\n          65,\n          3,\n          66,\n          -112,\n          -112,\n          103,\n          66,\n          68,\n          111,\n          -79,\n          66,\n          -109,\n          98,\n          -9,\n          66,\n          -91,\n          63,\n          75,\n          66,\n          -87,\n          -43,\n          -57,\n          66,\n          -97,\n          40,\n          38,\n          66,\n          -80,\n          -99,\n          -3,\n          66,\n          105,\n          -120,\n          123,\n          66,\n          97,\n          -114,\n          -58,\n          66,\n          -102,\n          17,\n          86,\n          66,\n          -115,\n          120,\n          94,\n          66,\n          105,\n          111,\n          -111,\n          66,\n          -110,\n          -55,\n          70,\n          66,\n          -94,\n          69,\n          -113,\n          66,\n          -92,\n          75,\n          -11,\n          66,\n          -101,\n          -31,\n          30,\n          66,\n          -73,\n          60,\n          90,\n          66,\n          -102,\n          58,\n          -61,\n          66,\n          -109,\n          -1,\n          27,\n          66,\n          -109,\n          43,\n          84,\n          66,\n          -104,\n          92,\n          99,\n          66,\n          102,\n          124,\n          -97,\n          66,\n          -114,\n          30,\n          -127,\n          66,\n          115,\n          30,\n          -35,\n          66,\n          -98,\n          -15,\n          -127,\n          66,\n          116,\n          -108,\n          -13,\n          66,\n          -109,\n          42,\n          -93,\n          66,\n          112,\n          49,\n          105,\n          66,\n          -82,\n          -5,\n          -127,\n          66,\n          -118,\n          6,\n          16,\n          66,\n          72,\n          -45,\n          -103,\n          66,\n          -78,\n          32,\n          -115,\n          66,\n          -66,\n          -72,\n          54,\n          66,\n          91,\n          -62,\n          -56,\n          66,\n          80,\n          18,\n          113,\n          66,\n          -124,\n          5,\n          82,\n          66,\n          -81,\n          8,\n          61,\n          66,\n          96,\n          -15,\n          -36,\n          66,\n          -81,\n          -82,\n          25,\n          66,\n          -72,\n          -119,\n          12,\n          66,\n          -110,\n          -44,\n          -5,\n          66,\n          120,\n          -84,\n          -110,\n          66,\n          99,\n          -67,\n          120,\n          66,\n          -70,\n          -117,\n          1,\n          66,\n          -128,\n          -99,\n          -106,\n          66,\n          -126,\n          -71,\n          55,\n          66,\n          90,\n          4,\n          123,\n          66,\n          -78,\n          102,\n          46,\n          66,\n          -113,\n          124,\n          55,\n          66,\n          -121,\n          -114,\n          0,\n          66,\n          -72,\n          -3,\n          -33,\n          66,\n          122,\n          84,\n          -98,\n          66,\n          -116,\n          -36,\n          46,\n          66,\n          -113,\n          -40,\n          -83,\n          66,\n          -80,\n          -56,\n          -10,\n          66,\n          -111,\n          119,\n          -15,\n          66,\n          -109,\n          91,\n          -91,\n          66,\n          -104,\n          -118,\n          -28,\n          66,\n          122,\n          -120,\n          105,\n          66,\n          -109,\n          -27,\n          26,\n          66,\n          -120,\n          -77,\n          -6,\n          66,\n          -123,\n          36,\n          -26,\n          66,\n          93,\n          94,\n          24,\n          66,\n          -125,\n          -102,\n          -27,\n          66,\n          -103,\n          -68,\n          -96,\n          66,\n          93,\n          110,\n          95,\n          66,\n          -102,\n          -69,\n          79,\n          66,\n          -108,\n          51,\n          77,\n          66,\n          -116,\n          -77,\n          -90,\n          66,\n          -120,\n          109,\n          -9,\n          66,\n          91,\n          77,\n          45,\n          66,\n          -108,\n          -13,\n          6,\n          66,\n          -123,\n          58,\n          -97,\n          66,\n          -118,\n          94,\n          -99,\n          66,\n          -125,\n          117,\n          56,\n          66,\n          117,\n          16,\n          125,\n          66,\n          107,\n          -99,\n          -33,\n          66,\n          -99,\n          -54,\n          123,\n          66,\n          -94,\n          -47,\n          -50,\n          66,\n          -119,\n          -91,\n          -5,\n          66,\n          77,\n          39,\n          -17,\n          66,\n          -106,\n          -57,\n          -55,\n          66,\n          -110,\n          -82,\n          -90,\n          66,\n          -113,\n          -13,\n          4,\n          66,\n          87,\n          121,\n          112,\n          66,\n          100,\n          29,\n          62,\n          66,\n          -123,\n          23,\n          87,\n          66,\n          -109,\n          1,\n          -90,\n          66,\n          123,\n          43,\n          52,\n          66,\n          -94,\n          -35,\n          -49,\n          66,\n          93,\n          91,\n          98,\n          66,\n          -106,\n          -44,\n          22,\n          66,\n          -110,\n          -89,\n          78,\n          66,\n          -104,\n          -105,\n          -89,\n          66,\n          109,\n          70,\n          -68,\n          66,\n          -122,\n          55,\n          -99,\n          66,\n          -123,\n          40,\n          -4,\n          66,\n          107,\n          -63,\n          123,\n          66,\n          -89,\n          -17,\n          -25,\n          66,\n          -113,\n          109,\n          44,\n          66,\n          90,\n          -35,\n          72,\n          66,\n          -98,\n          -84,\n          67,\n          66,\n          -102,\n          29,\n          97,\n          66,\n          -105,\n          17,\n          -118,\n          66,\n          -97,\n          -60,\n          55,\n          66,\n          -79,\n          -54,\n          -3,\n          66,\n          -90,\n          -61,\n          -124,\n          66,\n          -86,\n          63,\n          -78,\n          66,\n          -86,\n          -119,\n          -61,\n          66,\n          -116,\n          88,\n          -86,\n          66,\n          -117,\n          -72,\n          105,\n          66,\n          -76,\n          -60,\n          -3,\n          66,\n          -111,\n          -76,\n          96,\n          66,\n          -123,\n          87,\n          46,\n          66,\n          -112,\n          121,\n          -73,\n          66,\n          -91,\n          94,\n          -85,\n          66,\n          -73,\n          -61,\n          -18,\n          66,\n          -101,\n          17,\n          36,\n          66,\n          -111,\n          -114,\n          -119,\n          66,\n          -73,\n          100,\n          -98,\n          66,\n          83,\n          -84,\n          91,\n          66,\n          -118,\n          -66,\n          -114,\n          66,\n          94,\n          27,\n          -40,\n          66,\n          94,\n          11,\n          17,\n          66,\n          -71,\n          -63,\n          108,\n          66,\n          102,\n          20,\n          7,\n          66,\n          -127,\n          16,\n          -75,\n          66,\n          -83,\n          112,\n          14,\n          66,\n          -95,\n          -70,\n          104,\n          66,\n          -112,\n          -96,\n          80,\n          66,\n          -64,\n          -128,\n          -111,\n          66,\n          -121,\n          -26,\n          4,\n          66,\n          109,\n          -33,\n          107,\n          66,\n          -89,\n          120,\n          0,\n          66,\n          120,\n          -89,\n          20,\n          66,\n          -127,\n          49,\n          117,\n          66,\n          86,\n          15,\n          -21,\n          66,\n          99,\n          51,\n          -21,\n          66,\n          -110,\n          -33,\n          45,\n          66,\n          -103,\n          127,\n          -99,\n          66,\n          -91,\n          -114,\n          12,\n          66,\n          -87,\n          101,\n          71,\n          66,\n          74,\n          -126,\n          74,\n          66,\n          -104,\n          12,\n          -17,\n          66,\n          -120,\n          112,\n          102,\n          66,\n          124,\n          -77,\n          -84,\n          66,\n          -89,\n          71,\n          23,\n          66,\n          111,\n          -80,\n          116,\n          66,\n          -126,\n          62,\n          98,\n          66,\n          -120,\n          75,\n          -115,\n          66,\n          -128,\n          -38,\n          -115,\n          66,\n          -101,\n          96,\n          -123,\n          66,\n          -104,\n          -11,\n          119,\n          66,\n          -81,\n          23,\n          -111,\n          66,\n          111,\n          123,\n          85,\n          66,\n          -66,\n          74,\n          -62,\n          66,\n          -78,\n          -124,\n          -81,\n          66,\n          112,\n          124,\n          14,\n          66,\n          -87,\n          35,\n          72,\n          66,\n          102,\n          -49,\n          -25,\n          66,\n          83,\n          74,\n          -78,\n          66,\n          -103,\n          13,\n          24,\n          66,\n          -72,\n          -103,\n          -126,\n          66,\n          -123,\n          87,\n          15,\n          66,\n          -112,\n          -25,\n          112,\n          66,\n          76,\n          -66,\n          104,\n          66,\n          91,\n          86,\n          61,\n          66,\n          -91,\n          111,\n          25,\n          66,\n          -92,\n          -46,\n          49,\n          66,\n          -121,\n          37,\n          112,\n          66,\n          -101,\n          4,\n          5,\n          66,\n          -123,\n          24,\n          -40,\n          66,\n          -86,\n          -82,\n          16,\n          66,\n          -125,\n          -109,\n          76,\n          66,\n          -90,\n          38,\n          -23,\n          66,\n          -121,\n          -5,\n          -101,\n          66,\n          -103,\n          0,\n          -98,\n          66,\n          84,\n          115,\n          -46,\n          66,\n          88,\n          1,\n          -81,\n          66,\n          -86,\n          -13,\n          79,\n          66,\n          -112,\n          57,\n          -101,\n          66,\n          82,\n          -108,\n          -96,\n          66,\n          -114,\n          -110,\n          -23,\n          66,\n          -112,\n          -84,\n          -38,\n          66,\n          -125,\n          90,\n          88,\n          66,\n          -110,\n          -1,\n          124,\n          66,\n          113,\n          -60,\n          118,\n          66,\n          -76,\n          -110,\n          94,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1157477768,\n          587688443,\n          1147846949,\n          975665816,\n          602574268,\n          1026185486,\n          973596749,\n          758185996,\n          753337331,\n          715789547,\n          715232068,\n          969083423,\n          754977551,\n          1093\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          645700004,\n          1117021184,\n          1156704272,\n          600502082,\n          726393487,\n          768264632,\n          629039186,\n          717202579,\n          758888728,\n          717424600,\n          581316871,\n          970172036,\n          640356988,\n          1102\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3778355347224434329,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1020871983,\n          94566889,\n          760979307,\n          762517201,\n          74089533,\n          720059238,\n          1039236787,\n          882164983,\n          443010978,\n          1067685491,\n          744437879,\n          44227651,\n          712934875,\n          586599657,\n          478058170,\n          712497074,\n          584890827,\n          1067023138,\n          212507518,\n          644517879,\n          57637087,\n          922614059,\n          211124159,\n          630942554,\n          212127663,\n          980757562,\n          922680098,\n          251194977,\n          651341409,\n          182695597,\n          992923474,\n          907738585,\n          530524267,\n          524254673,\n          350455678,\n          636851930,\n          978783919,\n          460889075,\n          854407546,\n          1050527682,\n          441574977,\n          1063108273,\n          743\n        ],\n        \"cutValueData\": [\n          66,\n          -82,\n          59,\n          -41,\n          66,\n          -109,\n          15,\n          112,\n          66,\n          -94,\n          21,\n          -4,\n          66,\n          -74,\n          10,\n          -84,\n          66,\n          -124,\n          -19,\n          86,\n          66,\n          106,\n          118,\n          -85,\n          66,\n          116,\n          125,\n          -39,\n          66,\n          -83,\n          -4,\n          63,\n          66,\n          108,\n          68,\n          -113,\n          66,\n          -128,\n          -76,\n          -32,\n          66,\n          124,\n          119,\n          18,\n          66,\n          75,\n          -24,\n          -25,\n          66,\n          -115,\n          -4,\n          120,\n          66,\n          -107,\n          -35,\n          49,\n          66,\n          86,\n          65,\n          17,\n          66,\n          -122,\n          97,\n          67,\n          66,\n          100,\n          -62,\n          9,\n          66,\n          110,\n          17,\n          46,\n          66,\n          122,\n          -118,\n          91,\n          66,\n          -94,\n          52,\n          78,\n          66,\n          -89,\n          92,\n          -90,\n          66,\n          -109,\n          -85,\n          -95,\n          66,\n          -121,\n          -34,\n          -11,\n          66,\n          -59,\n          -80,\n          -45,\n          66,\n          -94,\n          -79,\n          -49,\n          66,\n          -127,\n          -100,\n          -4,\n          66,\n          -125,\n          32,\n          71,\n          66,\n          -74,\n          -73,\n          86,\n          66,\n          -125,\n          106,\n          -99,\n          66,\n          -121,\n          -105,\n          -112,\n          66,\n          -101,\n          0,\n          -118,\n          66,\n          127,\n          99,\n          -46,\n          66,\n          95,\n          -80,\n          -85,\n          66,\n          -99,\n          -40,\n          10,\n          66,\n          74,\n          76,\n          9,\n          66,\n          77,\n          -10,\n          -36,\n          66,\n          118,\n          -13,\n          -57,\n          66,\n          -98,\n          94,\n          -27,\n          66,\n          -121,\n          -30,\n          119,\n          66,\n          -112,\n          16,\n          -45,\n          66,\n          -99,\n          124,\n          -9,\n          66,\n          94,\n          -64,\n          77,\n          66,\n          -67,\n          -39,\n          32,\n          66,\n          -83,\n          -92,\n          -92,\n          66,\n          -117,\n          85,\n          59,\n          66,\n          -92,\n          22,\n          -24,\n          66,\n          -120,\n          77,\n          6,\n          66,\n          -62,\n          43,\n          35,\n          66,\n          117,\n          -77,\n          31,\n          66,\n          114,\n          38,\n          -5,\n          66,\n          109,\n          -108,\n          -103,\n          66,\n          -89,\n          -27,\n          2,\n          66,\n          -87,\n          116,\n          -109,\n          66,\n          71,\n          1,\n          107,\n          66,\n          -113,\n          0,\n          -93,\n          66,\n          112,\n          -124,\n          57,\n          66,\n          -96,\n          -39,\n          -105,\n          66,\n          -99,\n          66,\n          101,\n          66,\n          -100,\n          -109,\n          -59,\n          66,\n          -108,\n          103,\n          49,\n          66,\n          -81,\n          -114,\n          -36,\n          66,\n          -119,\n          113,\n          -75,\n          66,\n          -91,\n          -120,\n          -82,\n          66,\n          -101,\n          67,\n          -60,\n          66,\n          -123,\n          -7,\n          -103,\n          66,\n          -94,\n          -55,\n          -115,\n          66,\n          126,\n          -107,\n          -48,\n          66,\n          -109,\n          114,\n          34,\n          66,\n          119,\n          124,\n          -67,\n          66,\n          -112,\n          -36,\n          103,\n          66,\n          -63,\n          113,\n          -6,\n          66,\n          -98,\n          74,\n          62,\n          66,\n          -102,\n          -64,\n          35,\n          66,\n          111,\n          -37,\n          -125,\n          66,\n          123,\n          65,\n          18,\n          66,\n          115,\n          34,\n          -3,\n          66,\n          -81,\n          -19,\n          16,\n          66,\n          -66,\n          29,\n          -84,\n          66,\n          -121,\n          -38,\n          -46,\n          66,\n          -111,\n          -10,\n          24,\n          66,\n          -125,\n          -53,\n          -119,\n          66,\n          -117,\n          32,\n          -44,\n          66,\n          -99,\n          106,\n          24,\n          66,\n          75,\n          -122,\n          21,\n          66,\n          -68,\n          -96,\n          113,\n          66,\n          102,\n          32,\n          -14,\n          66,\n          -84,\n          -77,\n          1,\n          66,\n          107,\n          18,\n          -95,\n          66,\n          -93,\n          -119,\n          -35,\n          66,\n          -105,\n          -102,\n          -117,\n          66,\n          -98,\n          87,\n          51,\n          66,\n          -99,\n          25,\n          14,\n          66,\n          -101,\n          94,\n          -5,\n          66,\n          -99,\n          75,\n          24,\n          66,\n          -61,\n          122,\n          -72,\n          66,\n          -103,\n          31,\n          45,\n          66,\n          102,\n          25,\n          -43,\n          66,\n          -70,\n          -15,\n          121,\n          66,\n          -79,\n          59,\n          90,\n          66,\n          -79,\n          11,\n          -24,\n          66,\n          -77,\n          51,\n          105,\n          66,\n          115,\n          -30,\n          11,\n          66,\n          -95,\n          23,\n          43,\n          66,\n          -126,\n          -68,\n          -41,\n          66,\n          126,\n          -10,\n          -4,\n          66,\n          -81,\n          -81,\n          105,\n          66,\n          -114,\n          -112,\n          -114,\n          66,\n          81,\n          58,\n          -103,\n          66,\n          -118,\n          45,\n          -41,\n          66,\n          97,\n          -99,\n          91,\n          66,\n          -100,\n          -23,\n          -48,\n          66,\n          -95,\n          3,\n          -22,\n          66,\n          -117,\n          -70,\n          36,\n          66,\n          -98,\n          67,\n          63,\n          66,\n          -65,\n          127,\n          92,\n          66,\n          -69,\n          -117,\n          -109,\n          66,\n          -81,\n          120,\n          -54,\n          66,\n          121,\n          97,\n          -112,\n          66,\n          96,\n          -93,\n          23,\n          66,\n          -79,\n          -74,\n          21,\n          66,\n          -115,\n          -128,\n          -124,\n          66,\n          108,\n          96,\n          -32,\n          66,\n          118,\n          102,\n          53,\n          66,\n          124,\n          54,\n          -73,\n          66,\n          -127,\n          -49,\n          -78,\n          66,\n          -119,\n          87,\n          -30,\n          66,\n          -82,\n          17,\n          -63,\n          66,\n          -59,\n          -62,\n          101,\n          66,\n          -71,\n          -45,\n          21,\n          66,\n          -85,\n          -109,\n          -35,\n          66,\n          -93,\n          113,\n          -99,\n          66,\n          -90,\n          67,\n          -74,\n          66,\n          -121,\n          -27,\n          -83,\n          66,\n          -92,\n          109,\n          21,\n          66,\n          -111,\n          -64,\n          102,\n          66,\n          -63,\n          126,\n          -20,\n          66,\n          -75,\n          -80,\n          72,\n          66,\n          -126,\n          -46,\n          -91,\n          66,\n          -100,\n          49,\n          -79,\n          66,\n          -88,\n          107,\n          39,\n          66,\n          -92,\n          -6,\n          -30,\n          66,\n          94,\n          -101,\n          -33,\n          66,\n          -111,\n          -40,\n          91,\n          66,\n          110,\n          -88,\n          32,\n          66,\n          -96,\n          35,\n          -78,\n          66,\n          -93,\n          107,\n          -15,\n          66,\n          -96,\n          52,\n          -122,\n          66,\n          104,\n          51,\n          -47,\n          66,\n          112,\n          101,\n          -81,\n          66,\n          -89,\n          32,\n          104,\n          66,\n          -97,\n          123,\n          -36,\n          66,\n          -76,\n          67,\n          -15,\n          66,\n          -114,\n          -22,\n          52,\n          66,\n          125,\n          24,\n          86,\n          66,\n          -76,\n          -23,\n          16,\n          66,\n          -103,\n          54,\n          -33,\n          66,\n          -98,\n          -45,\n          -58,\n          66,\n          -96,\n          106,\n          100,\n          66,\n          -105,\n          29,\n          14,\n          66,\n          -63,\n          -37,\n          1,\n          66,\n          85,\n          -121,\n          -36,\n          66,\n          -118,\n          21,\n          -46,\n          66,\n          -100,\n          -74,\n          5,\n          66,\n          -116,\n          35,\n          -126,\n          66,\n          93,\n          78,\n          -110,\n          66,\n          117,\n          69,\n          -61,\n          66,\n          82,\n          -90,\n          -30,\n          66,\n          -95,\n          27,\n          -71,\n          66,\n          -71,\n          -2,\n          -24,\n          66,\n          124,\n          -52,\n          -24,\n          66,\n          -99,\n          86,\n          104,\n          66,\n          122,\n          110,\n          81,\n          66,\n          -73,\n          -25,\n          -100,\n          66,\n          -103,\n          43,\n          -92,\n          66,\n          -112,\n          -127,\n          89,\n          66,\n          77,\n          28,\n          -49,\n          66,\n          98,\n          -50,\n          82,\n          66,\n          -113,\n          -123,\n          -94,\n          66,\n          92,\n          -55,\n          -114,\n          66,\n          107,\n          -124,\n          63,\n          66,\n          -108,\n          61,\n          127,\n          66,\n          94,\n          12,\n          125,\n          66,\n          91,\n          -110,\n          -101,\n          66,\n          -101,\n          -83,\n          31,\n          66,\n          -94,\n          119,\n          47,\n          66,\n          -120,\n          13,\n          -72,\n          66,\n          -126,\n          -34,\n          -2,\n          66,\n          -97,\n          30,\n          14,\n          66,\n          127,\n          -53,\n          -84,\n          66,\n          117,\n          -30,\n          -49,\n          66,\n          -109,\n          -59,\n          -23,\n          66,\n          106,\n          -111,\n          -83,\n          66,\n          -67,\n          66,\n          -93,\n          66,\n          -95,\n          -99,\n          -64,\n          66,\n          -85,\n          -105,\n          -11,\n          66,\n          117,\n          28,\n          49,\n          66,\n          -95,\n          68,\n          116,\n          66,\n          83,\n          88,\n          -94,\n          66,\n          -125,\n          115,\n          89,\n          66,\n          -84,\n          123,\n          106,\n          66,\n          86,\n          -105,\n          35,\n          66,\n          -86,\n          90,\n          -89,\n          66,\n          104,\n          -121,\n          118,\n          66,\n          -105,\n          122,\n          59,\n          66,\n          -101,\n          114,\n          18,\n          66,\n          -94,\n          -32,\n          55,\n          66,\n          -79,\n          104,\n          58,\n          66,\n          106,\n          103,\n          -120,\n          66,\n          108,\n          35,\n          -40,\n          66,\n          118,\n          116,\n          11,\n          66,\n          114,\n          -105,\n          105,\n          66,\n          -81,\n          96,\n          19,\n          66,\n          -96,\n          7,\n          -107,\n          66,\n          91,\n          -33,\n          -2,\n          66,\n          123,\n          125,\n          125,\n          66,\n          -122,\n          47,\n          125,\n          66,\n          68,\n          -56,\n          65,\n          66,\n          -101,\n          50,\n          -35,\n          66,\n          -73,\n          -67,\n          -4,\n          66,\n          -105,\n          -120,\n          21,\n          66,\n          -88,\n          100,\n          70,\n          66,\n          -104,\n          -8,\n          -98,\n          66,\n          123,\n          120,\n          -3,\n          66,\n          -106,\n          4,\n          74,\n          66,\n          -102,\n          -11,\n          -14,\n          66,\n          115,\n          2,\n          -32,\n          66,\n          -69,\n          89,\n          -102,\n          66,\n          -123,\n          -69,\n          22,\n          66,\n          124,\n          28,\n          37,\n          66,\n          91,\n          46,\n          2,\n          66,\n          74,\n          94,\n          27,\n          66,\n          -90,\n          75,\n          -64,\n          66,\n          87,\n          -97,\n          -53,\n          66,\n          -76,\n          -108,\n          105,\n          66,\n          -110,\n          -68,\n          -72,\n          66,\n          -107,\n          -118,\n          67,\n          66,\n          -103,\n          -66,\n          28,\n          66,\n          -79,\n          11,\n          38,\n          66,\n          -120,\n          -52,\n          33,\n          66,\n          88,\n          48,\n          -103,\n          66,\n          -99,\n          67,\n          117,\n          66,\n          -109,\n          -110,\n          -55,\n          66,\n          -63,\n          93,\n          -65,\n          66,\n          -115,\n          19,\n          -79,\n          66,\n          84,\n          -29,\n          100,\n          66,\n          -121,\n          -9,\n          41,\n          66,\n          -69,\n          28,\n          -80,\n          66,\n          -101,\n          -20,\n          -72,\n          66,\n          121,\n          -20,\n          -51,\n          66,\n          -95,\n          -107,\n          -54,\n          66,\n          -65,\n          32,\n          28,\n          66,\n          -100,\n          -97,\n          96,\n          66,\n          114,\n          -31,\n          -119,\n          66,\n          113,\n          35,\n          83,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1112837453,\n          1117086767,\n          975718871,\n          1013967445,\n          1117440845,\n          1032503558,\n          629218502,\n          1104077698,\n          724699304,\n          640674031,\n          753495493,\n          588216641,\n          581721979,\n          1093\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162259279,\n          1156767371,\n          1112830567,\n          1031461285,\n          1117082416,\n          631266250,\n          581330083,\n          1102502788,\n          725178301,\n          768279937,\n          753851246,\n          645613321,\n          768198073,\n          1096\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6113428335752356252,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          754238655,\n          303134165,\n          935192163,\n          354379938,\n          171411282,\n          919938218,\n          481872430,\n          195147813,\n          187248085,\n          473528881,\n          262749606,\n          483639285,\n          318319270,\n          498904418,\n          523308769,\n          731084623,\n          521846193,\n          362265005,\n          1017487010,\n          258991987,\n          103656913,\n          643804798,\n          901504089,\n          383989611,\n          883230441,\n          847214373,\n          707074923,\n          115531125,\n          584362039,\n          742046455,\n          107588339,\n          391829959,\n          1067542898,\n          329321707,\n          1026113227,\n          634501623,\n          35719918,\n          37406818,\n          937236853,\n          91827443,\n          895862653,\n          90536699,\n          5\n        ],\n        \"cutValueData\": [\n          66,\n          -92,\n          -3,\n          98,\n          66,\n          -80,\n          90,\n          -29,\n          66,\n          74,\n          -38,\n          51,\n          66,\n          94,\n          4,\n          -48,\n          66,\n          -120,\n          80,\n          11,\n          66,\n          -91,\n          -118,\n          -70,\n          66,\n          92,\n          -127,\n          43,\n          66,\n          -66,\n          -74,\n          9,\n          66,\n          122,\n          -21,\n          93,\n          66,\n          -91,\n          103,\n          64,\n          66,\n          -91,\n          -113,\n          -117,\n          66,\n          124,\n          -18,\n          3,\n          66,\n          82,\n          -96,\n          -11,\n          66,\n          105,\n          126,\n          -18,\n          66,\n          93,\n          117,\n          -117,\n          66,\n          -98,\n          25,\n          -11,\n          66,\n          -94,\n          99,\n          -104,\n          66,\n          -111,\n          -117,\n          37,\n          66,\n          -110,\n          -102,\n          74,\n          66,\n          -73,\n          -8,\n          35,\n          66,\n          -99,\n          74,\n          8,\n          66,\n          95,\n          109,\n          -56,\n          66,\n          108,\n          -23,\n          -9,\n          66,\n          102,\n          107,\n          2,\n          66,\n          -63,\n          116,\n          124,\n          66,\n          84,\n          114,\n          -86,\n          66,\n          -124,\n          -88,\n          -117,\n          66,\n          -92,\n          -39,\n          -48,\n          66,\n          -121,\n          15,\n          -70,\n          66,\n          -101,\n          -113,\n          92,\n          66,\n          -100,\n          -121,\n          -17,\n          66,\n          -112,\n          -75,\n          78,\n          66,\n          127,\n          33,\n          61,\n          66,\n          117,\n          -70,\n          57,\n          66,\n          82,\n          -1,\n          119,\n          66,\n          -76,\n          -35,\n          43,\n          66,\n          85,\n          78,\n          29,\n          66,\n          -123,\n          116,\n          -123,\n          66,\n          -93,\n          96,\n          7,\n          66,\n          -118,\n          -110,\n          111,\n          66,\n          78,\n          -64,\n          67,\n          66,\n          -121,\n          -24,\n          -40,\n          66,\n          -97,\n          70,\n          40,\n          66,\n          -99,\n          71,\n          64,\n          66,\n          -94,\n          124,\n          -55,\n          66,\n          82,\n          -2,\n          12,\n          66,\n          -113,\n          -22,\n          111,\n          66,\n          -118,\n          121,\n          -13,\n          66,\n          125,\n          111,\n          41,\n          66,\n          101,\n          4,\n          110,\n          66,\n          -120,\n          -75,\n          20,\n          66,\n          -97,\n          86,\n          66,\n          66,\n          106,\n          67,\n          -41,\n          66,\n          -106,\n          -68,\n          -17,\n          66,\n          -80,\n          115,\n          -105,\n          66,\n          -81,\n          52,\n          44,\n          66,\n          -99,\n          -67,\n          -30,\n          66,\n          -70,\n          -128,\n          -37,\n          66,\n          126,\n          -37,\n          80,\n          66,\n          -105,\n          85,\n          118,\n          66,\n          -60,\n          -17,\n          116,\n          66,\n          -85,\n          -71,\n          -77,\n          66,\n          -83,\n          -12,\n          26,\n          66,\n          -106,\n          19,\n          47,\n          66,\n          120,\n          -85,\n          24,\n          66,\n          -107,\n          44,\n          19,\n          66,\n          -111,\n          -116,\n          45,\n          66,\n          -82,\n          -90,\n          -93,\n          66,\n          -103,\n          118,\n          43,\n          66,\n          -124,\n          -47,\n          -16,\n          66,\n          97,\n          -125,\n          -31,\n          66,\n          -83,\n          51,\n          -104,\n          66,\n          -88,\n          70,\n          15,\n          66,\n          105,\n          88,\n          -87,\n          66,\n          -92,\n          86,\n          -72,\n          66,\n          119,\n          120,\n          -121,\n          66,\n          -128,\n          -128,\n          82,\n          66,\n          -79,\n          35,\n          -105,\n          66,\n          -74,\n          42,\n          -112,\n          66,\n          -76,\n          -53,\n          85,\n          66,\n          127,\n          -114,\n          -89,\n          66,\n          127,\n          110,\n          24,\n          66,\n          -99,\n          78,\n          18,\n          66,\n          -89,\n          -123,\n          -30,\n          66,\n          -99,\n          123,\n          -66,\n          66,\n          -128,\n          20,\n          79,\n          66,\n          93,\n          112,\n          16,\n          66,\n          -127,\n          74,\n          70,\n          66,\n          -73,\n          46,\n          12,\n          66,\n          -101,\n          29,\n          -120,\n          66,\n          115,\n          104,\n          105,\n          66,\n          115,\n          75,\n          -122,\n          66,\n          -88,\n          17,\n          -20,\n          66,\n          -104,\n          -54,\n          68,\n          66,\n          -91,\n          54,\n          101,\n          66,\n          -126,\n          81,\n          -23,\n          66,\n          126,\n          -1,\n          96,\n          66,\n          103,\n          -16,\n          121,\n          66,\n          -86,\n          83,\n          -36,\n          66,\n          122,\n          9,\n          57,\n          66,\n          -104,\n          20,\n          1,\n          66,\n          -114,\n          -20,\n          14,\n          66,\n          118,\n          -4,\n          -108,\n          66,\n          -107,\n          35,\n          -59,\n          66,\n          107,\n          116,\n          -108,\n          66,\n          87,\n          -62,\n          29,\n          66,\n          78,\n          -36,\n          125,\n          66,\n          104,\n          -4,\n          -65,\n          66,\n          -76,\n          119,\n          -8,\n          66,\n          -103,\n          54,\n          -35,\n          66,\n          -124,\n          109,\n          -103,\n          66,\n          -95,\n          -14,\n          22,\n          66,\n          -110,\n          44,\n          68,\n          66,\n          -79,\n          -102,\n          29,\n          66,\n          -118,\n          -126,\n          83,\n          66,\n          -82,\n          20,\n          45,\n          66,\n          -102,\n          -50,\n          -52,\n          66,\n          -123,\n          -41,\n          -113,\n          66,\n          -125,\n          75,\n          -105,\n          66,\n          -110,\n          -34,\n          -120,\n          66,\n          -103,\n          90,\n          -122,\n          66,\n          -60,\n          24,\n          -98,\n          66,\n          -97,\n          -85,\n          -62,\n          66,\n          82,\n          -30,\n          -78,\n          66,\n          -79,\n          93,\n          -64,\n          66,\n          109,\n          14,\n          97,\n          66,\n          -71,\n          83,\n          85,\n          66,\n          111,\n          53,\n          22,\n          66,\n          -106,\n          -23,\n          79,\n          66,\n          -79,\n          -88,\n          -27,\n          66,\n          -59,\n          4,\n          36,\n          66,\n          -121,\n          41,\n          -39,\n          66,\n          125,\n          71,\n          -45,\n          66,\n          96,\n          72,\n          -92,\n          66,\n          81,\n          -83,\n          91,\n          66,\n          -90,\n          -79,\n          15,\n          66,\n          -111,\n          8,\n          -2,\n          66,\n          -90,\n          -90,\n          -51,\n          66,\n          -85,\n          -36,\n          -106,\n          66,\n          122,\n          76,\n          35,\n          66,\n          -99,\n          -104,\n          -23,\n          66,\n          117,\n          10,\n          22,\n          66,\n          104,\n          -117,\n          28,\n          66,\n          106,\n          86,\n          -50,\n          66,\n          -86,\n          -109,\n          -31,\n          66,\n          -97,\n          71,\n          -106,\n          66,\n          -125,\n          125,\n          -109,\n          66,\n          -103,\n          -81,\n          -61,\n          66,\n          -88,\n          51,\n          69,\n          66,\n          -80,\n          51,\n          -108,\n          66,\n          127,\n          102,\n          55,\n          66,\n          -83,\n          -121,\n          80,\n          66,\n          -99,\n          7,\n          -47,\n          66,\n          -116,\n          30,\n          104,\n          66,\n          108,\n          87,\n          96,\n          66,\n          -126,\n          -120,\n          -49,\n          66,\n          -88,\n          122,\n          58,\n          66,\n          -71,\n          -65,\n          59,\n          66,\n          -75,\n          -31,\n          -109,\n          66,\n          -91,\n          -9,\n          117,\n          66,\n          -106,\n          20,\n          -70,\n          66,\n          126,\n          -76,\n          63,\n          66,\n          -90,\n          -113,\n          -49,\n          66,\n          -69,\n          -101,\n          102,\n          66,\n          84,\n          1,\n          -80,\n          66,\n          -118,\n          58,\n          118,\n          66,\n          -106,\n          -8,\n          -90,\n          66,\n          -108,\n          122,\n          125,\n          66,\n          -79,\n          69,\n          -124,\n          66,\n          -69,\n          39,\n          2,\n          66,\n          -111,\n          -92,\n          110,\n          66,\n          -77,\n          75,\n          61,\n          66,\n          72,\n          -99,\n          -27,\n          66,\n          -90,\n          -49,\n          -90,\n          66,\n          92,\n          -52,\n          -71,\n          66,\n          -112,\n          32,\n          -90,\n          66,\n          -106,\n          -108,\n          -15,\n          66,\n          -70,\n          -50,\n          -65,\n          66,\n          99,\n          -120,\n          78,\n          66,\n          -118,\n          -105,\n          -6,\n          66,\n          -127,\n          -50,\n          108,\n          66,\n          110,\n          -27,\n          -122,\n          66,\n          -112,\n          -108,\n          -43,\n          66,\n          102,\n          -55,\n          1,\n          66,\n          -78,\n          83,\n          -109,\n          66,\n          114,\n          50,\n          -24,\n          66,\n          -125,\n          99,\n          87,\n          66,\n          -121,\n          -13,\n          -64,\n          66,\n          -107,\n          79,\n          70,\n          66,\n          -94,\n          110,\n          -85,\n          66,\n          -98,\n          -78,\n          -60,\n          66,\n          122,\n          37,\n          98,\n          66,\n          -94,\n          57,\n          -63,\n          66,\n          104,\n          -66,\n          -7,\n          66,\n          -108,\n          22,\n          -122,\n          66,\n          115,\n          -20,\n          -122,\n          66,\n          103,\n          -117,\n          -128,\n          66,\n          102,\n          -128,\n          62,\n          66,\n          -125,\n          -3,\n          96,\n          66,\n          -120,\n          -62,\n          82,\n          66,\n          118,\n          67,\n          -114,\n          66,\n          -127,\n          46,\n          92,\n          66,\n          -126,\n          59,\n          70,\n          66,\n          -70,\n          -33,\n          -6,\n          66,\n          -102,\n          -8,\n          -24,\n          66,\n          -125,\n          25,\n          12,\n          66,\n          -109,\n          14,\n          125,\n          66,\n          -119,\n          -110,\n          -60,\n          66,\n          -82,\n          35,\n          -34,\n          66,\n          -123,\n          24,\n          -97,\n          66,\n          -112,\n          28,\n          -20,\n          66,\n          -105,\n          -54,\n          -56,\n          66,\n          -83,\n          -15,\n          -39,\n          66,\n          121,\n          -67,\n          -44,\n          66,\n          -105,\n          34,\n          88,\n          66,\n          -110,\n          -9,\n          114,\n          66,\n          -111,\n          -93,\n          -94,\n          66,\n          122,\n          72,\n          67,\n          66,\n          -102,\n          65,\n          -45,\n          66,\n          -98,\n          -85,\n          -72,\n          66,\n          -86,\n          -91,\n          -77,\n          66,\n          -127,\n          17,\n          -75,\n          66,\n          96,\n          -59,\n          36,\n          66,\n          -120,\n          62,\n          13,\n          66,\n          -82,\n          16,\n          -60,\n          66,\n          -115,\n          -102,\n          4,\n          66,\n          -95,\n          -63,\n          -27,\n          66,\n          -102,\n          55,\n          -54,\n          66,\n          -102,\n          31,\n          -49,\n          66,\n          -112,\n          -126,\n          32,\n          66,\n          -85,\n          55,\n          60,\n          66,\n          88,\n          84,\n          95,\n          66,\n          -111,\n          -111,\n          14,\n          66,\n          -102,\n          -37,\n          34,\n          66,\n          -123,\n          30,\n          68,\n          66,\n          -121,\n          84,\n          42,\n          66,\n          -72,\n          -109,\n          -53,\n          66,\n          -105,\n          51,\n          89,\n          66,\n          -92,\n          107,\n          -83,\n          66,\n          81,\n          116,\n          62,\n          66,\n          125,\n          -7,\n          31,\n          66,\n          110,\n          94,\n          114,\n          66,\n          -121,\n          17,\n          -12,\n          66,\n          -103,\n          115,\n          -37,\n          66,\n          -113,\n          64,\n          -92,\n          66,\n          -124,\n          -12,\n          -76,\n          66,\n          -71,\n          12,\n          7,\n          66,\n          -114,\n          113,\n          -92,\n          66,\n          -107,\n          -58,\n          8,\n          66,\n          -77,\n          -128,\n          103,\n          66,\n          122,\n          -50,\n          34,\n          66,\n          -123,\n          42,\n          4,\n          66,\n          -95,\n          18,\n          -7,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1161552878,\n          970915724,\n          1162082041,\n          1147201774,\n          711874240,\n          1025966572,\n          1016440232,\n          1156918381,\n          774287572,\n          1098407515,\n          758651710,\n          582803878,\n          712042646,\n          391\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162084076,\n          1104838865,\n          772712942,\n          1155331957,\n          1018061585,\n          773239120,\n          1159870334,\n          769517485,\n          1018586147,\n          969102601,\n          715054135,\n          582786302,\n          753320561,\n          364\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -1388210874864416000,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          1,\n          31,\n          255,\n          425622219,\n          144909231,\n          844096932,\n          408239312,\n          581172034,\n          16895927,\n          857323289,\n          614083712,\n          768110542,\n          599362554,\n          494134646,\n          246430980,\n          722192539,\n          574816296,\n          387821868,\n          69295080,\n          870210871,\n          503884653,\n          816670625,\n          853067783,\n          810444633,\n          713880196,\n          755881543,\n          768770204,\n          639365931,\n          612809248,\n          625484104,\n          257958187,\n          520591636,\n          141167312,\n          374792846,\n          755614941,\n          527588315,\n          169125597,\n          491698020,\n          619845819,\n          405795430,\n          851208309,\n          817350660,\n          314382308,\n          383822855,\n          744296626,\n          7780\n        ],\n        \"cutValueData\": [\n          66,\n          -60,\n          -48,\n          100,\n          66,\n          -73,\n          116,\n          -105,\n          66,\n          -101,\n          -6,\n          -47,\n          66,\n          -88,\n          -52,\n          126,\n          66,\n          90,\n          118,\n          -34,\n          66,\n          102,\n          -91,\n          -120,\n          66,\n          112,\n          79,\n          82,\n          66,\n          -85,\n          -40,\n          -4,\n          66,\n          86,\n          49,\n          -38,\n          66,\n          94,\n          76,\n          32,\n          66,\n          -105,\n          -17,\n          109,\n          66,\n          -61,\n          19,\n          121,\n          66,\n          109,\n          -52,\n          91,\n          66,\n          -72,\n          -36,\n          67,\n          66,\n          -73,\n          -118,\n          -119,\n          66,\n          81,\n          64,\n          -77,\n          66,\n          -105,\n          125,\n          14,\n          66,\n          -103,\n          -23,\n          -104,\n          66,\n          -126,\n          59,\n          -120,\n          66,\n          -90,\n          -105,\n          -94,\n          66,\n          85,\n          67,\n          -23,\n          66,\n          69,\n          -31,\n          -106,\n          66,\n          -75,\n          102,\n          105,\n          66,\n          -88,\n          94,\n          -105,\n          66,\n          -124,\n          49,\n          17,\n          66,\n          -85,\n          11,\n          -16,\n          66,\n          115,\n          -82,\n          36,\n          66,\n          -88,\n          87,\n          25,\n          66,\n          126,\n          44,\n          -25,\n          66,\n          -85,\n          21,\n          -16,\n          66,\n          -89,\n          -76,\n          -91,\n          66,\n          -96,\n          -6,\n          -25,\n          66,\n          -117,\n          47,\n          33,\n          66,\n          107,\n          4,\n          -78,\n          66,\n          -106,\n          -105,\n          -59,\n          66,\n          -66,\n          103,\n          17,\n          66,\n          -115,\n          8,\n          -60,\n          66,\n          -117,\n          91,\n          -71,\n          66,\n          99,\n          44,\n          -35,\n          66,\n          -90,\n          -102,\n          31,\n          66,\n          -88,\n          16,\n          42,\n          66,\n          -120,\n          -62,\n          -68,\n          66,\n          -93,\n          29,\n          -13,\n          66,\n          -92,\n          -38,\n          110,\n          66,\n          78,\n          75,\n          -119,\n          66,\n          -65,\n          -1,\n          35,\n          66,\n          -83,\n          3,\n          121,\n          66,\n          -70,\n          31,\n          25,\n          66,\n          -116,\n          42,\n          8,\n          66,\n          -120,\n          107,\n          -85,\n          66,\n          -67,\n          -4,\n          -77,\n          66,\n          -122,\n          32,\n          94,\n          66,\n          -97,\n          -89,\n          2,\n          66,\n          107,\n          109,\n          66,\n          66,\n          -107,\n          99,\n          76,\n          66,\n          117,\n          78,\n          -119,\n          66,\n          74,\n          35,\n          93,\n          66,\n          -91,\n          -66,\n          16,\n          66,\n          -70,\n          -70,\n          113,\n          66,\n          111,\n          -60,\n          109,\n          66,\n          -62,\n          47,\n          50,\n          66,\n          97,\n          126,\n          -30,\n          66,\n          -66,\n          67,\n          61,\n          66,\n          -118,\n          -9,\n          -110,\n          66,\n          -72,\n          -71,\n          38,\n          66,\n          127,\n          87,\n          19,\n          66,\n          -110,\n          -14,\n          47,\n          66,\n          -67,\n          -118,\n          -69,\n          66,\n          -103,\n          20,\n          -121,\n          66,\n          -83,\n          -32,\n          25,\n          66,\n          -76,\n          78,\n          68,\n          66,\n          -127,\n          -64,\n          2,\n          66,\n          -74,\n          -58,\n          19,\n          66,\n          -76,\n          -6,\n          99,\n          66,\n          -119,\n          -103,\n          73,\n          66,\n          -123,\n          -11,\n          -52,\n          66,\n          -109,\n          -61,\n          84,\n          66,\n          -128,\n          79,\n          -118,\n          66,\n          -109,\n          -22,\n          39,\n          66,\n          68,\n          12,\n          -90,\n          66,\n          124,\n          58,\n          -27,\n          66,\n          -96,\n          29,\n          63,\n          66,\n          -117,\n          21,\n          -23,\n          66,\n          -121,\n          -125,\n          -58,\n          66,\n          -103,\n          121,\n          -54,\n          66,\n          -105,\n          69,\n          15,\n          66,\n          -103,\n          -16,\n          113,\n          66,\n          -100,\n          -111,\n          -24,\n          66,\n          125,\n          24,\n          6,\n          66,\n          -90,\n          111,\n          8,\n          66,\n          -84,\n          115,\n          -110,\n          66,\n          109,\n          -72,\n          -111,\n          66,\n          -119,\n          -19,\n          -67,\n          66,\n          -95,\n          74,\n          -21,\n          66,\n          -117,\n          107,\n          -126,\n          66,\n          103,\n          54,\n          -68,\n          66,\n          111,\n          -44,\n          -105,\n          66,\n          -125,\n          25,\n          -12,\n          66,\n          -111,\n          62,\n          -103,\n          66,\n          -95,\n          90,\n          20,\n          66,\n          -66,\n          -87,\n          28,\n          66,\n          -115,\n          26,\n          -9,\n          66,\n          -77,\n          -17,\n          12,\n          66,\n          -112,\n          -44,\n          -35,\n          66,\n          -68,\n          38,\n          -35,\n          66,\n          -125,\n          47,\n          -30,\n          66,\n          -71,\n          -7,\n          -62,\n          66,\n          -94,\n          48,\n          7,\n          66,\n          -120,\n          -43,\n          1,\n          66,\n          -86,\n          -11,\n          62,\n          66,\n          -91,\n          -21,\n          40,\n          66,\n          123,\n          120,\n          45,\n          66,\n          -80,\n          108,\n          62,\n          66,\n          101,\n          56,\n          7,\n          66,\n          -75,\n          -13,\n          124,\n          66,\n          -113,\n          2,\n          78,\n          66,\n          -82,\n          52,\n          -11,\n          66,\n          -96,\n          95,\n          -58,\n          66,\n          -83,\n          -38,\n          114,\n          66,\n          -98,\n          52,\n          32,\n          66,\n          -96,\n          19,\n          -78,\n          66,\n          -76,\n          56,\n          -2,\n          66,\n          -110,\n          43,\n          4,\n          66,\n          80,\n          125,\n          -61,\n          66,\n          124,\n          -45,\n          -38,\n          66,\n          -112,\n          122,\n          -49,\n          66,\n          -111,\n          43,\n          -18,\n          66,\n          -112,\n          -8,\n          -126,\n          66,\n          90,\n          -97,\n          104,\n          66,\n          -106,\n          78,\n          11,\n          66,\n          -70,\n          86,\n          -55,\n          66,\n          -109,\n          60,\n          -44,\n          66,\n          -117,\n          -92,\n          100,\n          66,\n          -117,\n          70,\n          -47,\n          66,\n          -118,\n          -109,\n          80,\n          66,\n          98,\n          -91,\n          -56,\n          66,\n          -64,\n          44,\n          -57,\n          66,\n          -110,\n          8,\n          -67,\n          66,\n          -118,\n          117,\n          -126,\n          66,\n          -110,\n          -128,\n          110,\n          66,\n          -85,\n          48,\n          31,\n          66,\n          -115,\n          -125,\n          -29,\n          66,\n          85,\n          -109,\n          -55,\n          66,\n          -115,\n          -96,\n          -63,\n          66,\n          -70,\n          97,\n          -114,\n          66,\n          -96,\n          112,\n          -88,\n          66,\n          -105,\n          0,\n          27,\n          66,\n          -98,\n          114,\n          84,\n          66,\n          68,\n          70,\n          11,\n          66,\n          -106,\n          -54,\n          -123,\n          66,\n          -128,\n          27,\n          -62,\n          66,\n          -86,\n          31,\n          -11,\n          66,\n          -116,\n          21,\n          0,\n          66,\n          -108,\n          -90,\n          -12,\n          66,\n          -78,\n          -59,\n          89,\n          66,\n          -121,\n          27,\n          8,\n          66,\n          -104,\n          -92,\n          85,\n          66,\n          -104,\n          -68,\n          116,\n          66,\n          -91,\n          125,\n          -78,\n          66,\n          -89,\n          95,\n          -106,\n          66,\n          86,\n          33,\n          105,\n          66,\n          82,\n          -28,\n          4,\n          66,\n          -103,\n          -61,\n          -121,\n          66,\n          121,\n          25,\n          38,\n          66,\n          103,\n          10,\n          -63,\n          66,\n          -82,\n          21,\n          60,\n          66,\n          -109,\n          -67,\n          -81,\n          66,\n          -104,\n          24,\n          46,\n          66,\n          -128,\n          -36,\n          -15,\n          66,\n          -118,\n          -59,\n          -1,\n          66,\n          -121,\n          -38,\n          69,\n          66,\n          113,\n          -58,\n          -65,\n          66,\n          -121,\n          -71,\n          52,\n          66,\n          -97,\n          -44,\n          22,\n          66,\n          -60,\n          -96,\n          7,\n          66,\n          -99,\n          45,\n          34,\n          66,\n          -106,\n          -14,\n          86,\n          66,\n          -97,\n          29,\n          -106,\n          66,\n          -81,\n          84,\n          127,\n          66,\n          -111,\n          -89,\n          -81,\n          66,\n          -71,\n          -90,\n          124,\n          66,\n          -117,\n          -1,\n          -4,\n          66,\n          -120,\n          -73,\n          126,\n          66,\n          -119,\n          14,\n          -30,\n          66,\n          92,\n          100,\n          -10,\n          66,\n          -77,\n          -127,\n          -9,\n          66,\n          -124,\n          118,\n          -118,\n          66,\n          93,\n          -49,\n          -98,\n          66,\n          -119,\n          -86,\n          -1,\n          66,\n          106,\n          -119,\n          -124,\n          66,\n          -126,\n          99,\n          75,\n          66,\n          -103,\n          -11,\n          30,\n          66,\n          -117,\n          46,\n          -22,\n          66,\n          -115,\n          89,\n          -33,\n          66,\n          -89,\n          -11,\n          29,\n          66,\n          -78,\n          59,\n          14,\n          66,\n          -112,\n          -17,\n          65,\n          66,\n          -69,\n          -79,\n          -21,\n          66,\n          -78,\n          -55,\n          23,\n          66,\n          -89,\n          102,\n          -43,\n          66,\n          -100,\n          -95,\n          -76,\n          66,\n          -100,\n          124,\n          -110,\n          66,\n          -103,\n          57,\n          -39,\n          66,\n          -109,\n          -26,\n          -34,\n          66,\n          -66,\n          102,\n          -21,\n          66,\n          92,\n          105,\n          6,\n          66,\n          -94,\n          -110,\n          0,\n          66,\n          -84,\n          -106,\n          -57,\n          66,\n          -121,\n          36,\n          -67,\n          66,\n          -125,\n          -123,\n          6,\n          66,\n          -101,\n          -95,\n          -91,\n          66,\n          -111,\n          59,\n          104,\n          66,\n          -115,\n          123,\n          -48,\n          66,\n          -92,\n          -102,\n          11,\n          66,\n          88,\n          -95,\n          48,\n          66,\n          92,\n          27,\n          41,\n          66,\n          -119,\n          -35,\n          -76,\n          66,\n          -122,\n          -64,\n          53,\n          66,\n          -122,\n          -77,\n          -127,\n          66,\n          -123,\n          99,\n          65,\n          66,\n          -116,\n          95,\n          -121,\n          66,\n          121,\n          -67,\n          108,\n          66,\n          -65,\n          92,\n          86,\n          66,\n          -126,\n          -11,\n          15,\n          66,\n          -105,\n          -16,\n          96,\n          66,\n          94,\n          83,\n          -63,\n          66,\n          -101,\n          31,\n          46,\n          66,\n          -95,\n          42,\n          -103,\n          66,\n          -82,\n          -100,\n          -36,\n          66,\n          92,\n          19,\n          -23,\n          66,\n          -106,\n          -75,\n          -54,\n          66,\n          -124,\n          -104,\n          89,\n          66,\n          117,\n          71,\n          2,\n          66,\n          -73,\n          11,\n          111,\n          66,\n          -127,\n          -14,\n          50,\n          66,\n          110,\n          -13,\n          112,\n          66,\n          -105,\n          -118,\n          -45,\n          66,\n          -81,\n          0,\n          -95,\n          66,\n          -84,\n          -47,\n          -42,\n          66,\n          -94,\n          98,\n          114,\n          66,\n          -123,\n          63,\n          -28,\n          66,\n          123,\n          -2,\n          4,\n          66,\n          -107,\n          72,\n          51,\n          66,\n          72,\n          67,\n          -102,\n          66,\n          -98,\n          -46,\n          -29,\n          66,\n          -99,\n          78,\n          90,\n          66,\n          -120,\n          87,\n          28,\n          66,\n          106,\n          -15,\n          29,\n          66,\n          -116,\n          -108,\n          -121,\n          66,\n          103,\n          -61,\n          -121,\n          66,\n          -77,\n          -45,\n          -73,\n          66,\n          100,\n          -116,\n          -127,\n          66,\n          -113,\n          101,\n          81,\n          66,\n          125,\n          15,\n          54,\n          66,\n          -120,\n          -71,\n          11\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 255,\n        \"leftIndex\": [\n          0,\n          1,\n          255,\n          1071445919,\n          530489087,\n          567670760,\n          668846143,\n          18740279,\n          266131074,\n          557802017,\n          840968560,\n          0\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          255,\n          360626143,\n          797186431,\n          32615112,\n          384598228,\n          281864855,\n          242933764,\n          619652,\n          709388776,\n          6272\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": -1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": -1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5115030242711725490,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          108505767,\n          479371230,\n          476801865,\n          129435123,\n          357886958,\n          879169134,\n          440351138,\n          456493873,\n          398023646,\n          771081298,\n          249470198,\n          645773885,\n          1039967609,\n          938438618,\n          1038667563,\n          78167277,\n          230547394,\n          1016853958,\n          978511219,\n          370245062,\n          395871426,\n          387233217,\n          802873034,\n          1016568690,\n          1056001443,\n          590855073,\n          460443587,\n          97898103,\n          513088234,\n          1069505259,\n          624010986,\n          1052219045,\n          133424678,\n          663848258,\n          656512707,\n          926770483,\n          212651459,\n          922220895,\n          861469929,\n          320138303,\n          459621995,\n          482134238,\n          23\n        ],\n        \"cutValueData\": [\n          66,\n          -71,\n          50,\n          -39,\n          66,\n          -104,\n          37,\n          46,\n          66,\n          120,\n          14,\n          77,\n          66,\n          -63,\n          81,\n          -111,\n          66,\n          -123,\n          61,\n          34,\n          66,\n          75,\n          -113,\n          110,\n          66,\n          -95,\n          115,\n          -122,\n          66,\n          -79,\n          108,\n          -117,\n          66,\n          -114,\n          74,\n          -46,\n          66,\n          -112,\n          88,\n          -9,\n          66,\n          -124,\n          -61,\n          -58,\n          66,\n          104,\n          119,\n          112,\n          66,\n          -67,\n          -27,\n          -48,\n          66,\n          -78,\n          106,\n          -107,\n          66,\n          -92,\n          -55,\n          -90,\n          66,\n          -95,\n          -127,\n          69,\n          66,\n          -81,\n          -27,\n          117,\n          66,\n          -124,\n          4,\n          -116,\n          66,\n          -122,\n          -28,\n          -70,\n          66,\n          -108,\n          -3,\n          -18,\n          66,\n          -92,\n          -83,\n          71,\n          66,\n          -100,\n          -63,\n          -121,\n          66,\n          -101,\n          8,\n          94,\n          66,\n          117,\n          114,\n          -5,\n          66,\n          -77,\n          -47,\n          -51,\n          66,\n          -70,\n          52,\n          48,\n          66,\n          125,\n          77,\n          14,\n          66,\n          -84,\n          -2,\n          -79,\n          66,\n          68,\n          3,\n          -28,\n          66,\n          -81,\n          100,\n          37,\n          66,\n          75,\n          -68,\n          -47,\n          66,\n          -120,\n          32,\n          -44,\n          66,\n          -114,\n          -10,\n          11,\n          66,\n          72,\n          -82,\n          -64,\n          66,\n          -103,\n          96,\n          -79,\n          66,\n          -110,\n          -17,\n          -90,\n          66,\n          -103,\n          119,\n          38,\n          66,\n          76,\n          -11,\n          -32,\n          66,\n          -113,\n          -5,\n          94,\n          66,\n          -86,\n          119,\n          -75,\n          66,\n          -109,\n          113,\n          -40,\n          66,\n          -113,\n          51,\n          -57,\n          66,\n          -79,\n          29,\n          -53,\n          66,\n          117,\n          -43,\n          19,\n          66,\n          -77,\n          -111,\n          0,\n          66,\n          -102,\n          46,\n          85,\n          66,\n          -104,\n          -127,\n          84,\n          66,\n          -82,\n          79,\n          118,\n          66,\n          -104,\n          -49,\n          -33,\n          66,\n          -71,\n          -55,\n          124,\n          66,\n          -99,\n          -14,\n          -75,\n          66,\n          -112,\n          113,\n          -44,\n          66,\n          -112,\n          -92,\n          53,\n          66,\n          115,\n          -113,\n          -109,\n          66,\n          -120,\n          -103,\n          -63,\n          66,\n          -90,\n          37,\n          86,\n          66,\n          -117,\n          33,\n          -20,\n          66,\n          -105,\n          67,\n          -94,\n          66,\n          97,\n          1,\n          -109,\n          66,\n          -109,\n          16,\n          28,\n          66,\n          114,\n          -125,\n          83,\n          66,\n          -108,\n          113,\n          -70,\n          66,\n          -114,\n          -11,\n          -8,\n          66,\n          -122,\n          89,\n          50,\n          66,\n          -122,\n          -102,\n          -2,\n          66,\n          -102,\n          72,\n          9,\n          66,\n          -99,\n          44,\n          -117,\n          66,\n          121,\n          119,\n          105,\n          66,\n          -105,\n          14,\n          -16,\n          66,\n          -64,\n          91,\n          -58,\n          66,\n          -117,\n          68,\n          -40,\n          66,\n          -80,\n          13,\n          7,\n          66,\n          -118,\n          81,\n          -107,\n          66,\n          123,\n          31,\n          17,\n          66,\n          95,\n          111,\n          -82,\n          66,\n          -105,\n          -127,\n          -119,\n          66,\n          -81,\n          114,\n          -99,\n          66,\n          -96,\n          71,\n          -66,\n          66,\n          -93,\n          -103,\n          35,\n          66,\n          -107,\n          43,\n          89,\n          66,\n          -94,\n          -89,\n          -8,\n          66,\n          -120,\n          -66,\n          121,\n          66,\n          -75,\n          97,\n          2,\n          66,\n          -90,\n          -8,\n          54,\n          66,\n          -117,\n          30,\n          69,\n          66,\n          -116,\n          -70,\n          -102,\n          66,\n          -70,\n          -1,\n          -94,\n          66,\n          -71,\n          14,\n          -91,\n          66,\n          -122,\n          -46,\n          49,\n          66,\n          110,\n          -81,\n          15,\n          66,\n          124,\n          73,\n          28,\n          66,\n          92,\n          57,\n          67,\n          66,\n          113,\n          98,\n          58,\n          66,\n          111,\n          3,\n          -111,\n          66,\n          -124,\n          -6,\n          49,\n          66,\n          103,\n          35,\n          122,\n          66,\n          119,\n          8,\n          -3,\n          66,\n          -99,\n          -25,\n          125,\n          66,\n          108,\n          -17,\n          -35,\n          66,\n          -117,\n          -53,\n          4,\n          66,\n          -71,\n          121,\n          -49,\n          66,\n          110,\n          -86,\n          117,\n          66,\n          -104,\n          -21,\n          48,\n          66,\n          -111,\n          -67,\n          125,\n          66,\n          -101,\n          75,\n          79,\n          66,\n          127,\n          -66,\n          -45,\n          66,\n          -96,\n          23,\n          -114,\n          66,\n          -90,\n          -89,\n          -114,\n          66,\n          -84,\n          -94,\n          116,\n          66,\n          -100,\n          -123,\n          -50,\n          66,\n          -85,\n          84,\n          6,\n          66,\n          88,\n          -124,\n          116,\n          66,\n          126,\n          38,\n          -23,\n          66,\n          -89,\n          -100,\n          -93,\n          66,\n          -95,\n          -102,\n          -114,\n          66,\n          -127,\n          -110,\n          111,\n          66,\n          111,\n          -28,\n          -119,\n          66,\n          -111,\n          -35,\n          -12,\n          66,\n          -81,\n          -31,\n          104,\n          66,\n          -77,\n          112,\n          37,\n          66,\n          -101,\n          -107,\n          16,\n          66,\n          -80,\n          -31,\n          47,\n          66,\n          -103,\n          -16,\n          -118,\n          66,\n          -98,\n          -58,\n          -90,\n          66,\n          -82,\n          27,\n          -62,\n          66,\n          -94,\n          23,\n          67,\n          66,\n          -106,\n          75,\n          103,\n          66,\n          -113,\n          -102,\n          6,\n          66,\n          -84,\n          119,\n          16,\n          66,\n          85,\n          -128,\n          -82,\n          66,\n          -86,\n          -65,\n          19,\n          66,\n          106,\n          -121,\n          112,\n          66,\n          108,\n          -61,\n          -41,\n          66,\n          95,\n          112,\n          -28,\n          66,\n          122,\n          35,\n          83,\n          66,\n          124,\n          104,\n          35,\n          66,\n          -105,\n          -8,\n          -39,\n          66,\n          -125,\n          22,\n          -122,\n          66,\n          -96,\n          98,\n          -32,\n          66,\n          -90,\n          93,\n          -24,\n          66,\n          119,\n          -27,\n          29,\n          66,\n          -127,\n          -50,\n          8,\n          66,\n          114,\n          95,\n          80,\n          66,\n          -95,\n          72,\n          -40,\n          66,\n          -95,\n          -22,\n          -71,\n          66,\n          -69,\n          -125,\n          -61,\n          66,\n          -65,\n          -22,\n          -125,\n          66,\n          110,\n          -124,\n          -42,\n          66,\n          -81,\n          26,\n          72,\n          66,\n          -115,\n          -15,\n          -84,\n          66,\n          93,\n          -102,\n          39,\n          66,\n          -126,\n          -90,\n          -88,\n          66,\n          -92,\n          -81,\n          127,\n          66,\n          -83,\n          7,\n          7,\n          66,\n          -99,\n          84,\n          117,\n          66,\n          100,\n          -66,\n          -4,\n          66,\n          -73,\n          22,\n          -36,\n          66,\n          -114,\n          63,\n          9,\n          66,\n          -103,\n          -43,\n          54,\n          66,\n          -95,\n          -127,\n          -36,\n          66,\n          119,\n          126,\n          71,\n          66,\n          -102,\n          -31,\n          -61,\n          66,\n          -120,\n          63,\n          -113,\n          66,\n          83,\n          22,\n          -45,\n          66,\n          -66,\n          -61,\n          13,\n          66,\n          -105,\n          124,\n          11,\n          66,\n          -99,\n          95,\n          118,\n          66,\n          -114,\n          -38,\n          -68,\n          66,\n          -119,\n          49,\n          94,\n          66,\n          -125,\n          14,\n          -68,\n          66,\n          -121,\n          117,\n          -38,\n          66,\n          108,\n          85,\n          90,\n          66,\n          109,\n          14,\n          -111,\n          66,\n          -89,\n          -6,\n          -20,\n          66,\n          102,\n          2,\n          40,\n          66,\n          -83,\n          20,\n          -98,\n          66,\n          -90,\n          -55,\n          -2,\n          66,\n          94,\n          -77,\n          70,\n          66,\n          -124,\n          -6,\n          81,\n          66,\n          -104,\n          -6,\n          -85,\n          66,\n          112,\n          -95,\n          68,\n          66,\n          -109,\n          21,\n          -42,\n          66,\n          -81,\n          74,\n          80,\n          66,\n          -94,\n          -107,\n          -2,\n          66,\n          -120,\n          -122,\n          47,\n          66,\n          -122,\n          -16,\n          27,\n          66,\n          108,\n          104,\n          82,\n          66,\n          125,\n          -98,\n          81,\n          66,\n          -102,\n          40,\n          -19,\n          66,\n          -100,\n          34,\n          47,\n          66,\n          -98,\n          111,\n          -82,\n          66,\n          -114,\n          86,\n          14,\n          66,\n          -71,\n          -81,\n          52,\n          66,\n          -110,\n          22,\n          -58,\n          66,\n          -108,\n          -105,\n          -53,\n          66,\n          89,\n          -34,\n          55,\n          66,\n          -76,\n          -30,\n          -114,\n          66,\n          -119,\n          -11,\n          -60,\n          66,\n          -125,\n          -102,\n          -30,\n          66,\n          -106,\n          8,\n          66,\n          66,\n          -109,\n          13,\n          -86,\n          66,\n          100,\n          -106,\n          44,\n          66,\n          100,\n          112,\n          -80,\n          66,\n          -82,\n          -29,\n          47,\n          66,\n          113,\n          -115,\n          -118,\n          66,\n          112,\n          74,\n          1,\n          66,\n          -116,\n          126,\n          84,\n          66,\n          -118,\n          -49,\n          122,\n          66,\n          -110,\n          -83,\n          78,\n          66,\n          -79,\n          63,\n          -57,\n          66,\n          -103,\n          29,\n          -8,\n          66,\n          92,\n          -82,\n          31,\n          66,\n          -84,\n          -60,\n          -40,\n          66,\n          94,\n          32,\n          -16,\n          66,\n          -120,\n          -17,\n          54,\n          66,\n          -76,\n          -111,\n          7,\n          66,\n          -80,\n          103,\n          -127,\n          66,\n          -110,\n          115,\n          72,\n          66,\n          -91,\n          -122,\n          60,\n          66,\n          -115,\n          -116,\n          -60,\n          66,\n          -103,\n          -124,\n          -123,\n          66,\n          99,\n          -30,\n          -20,\n          66,\n          70,\n          -102,\n          -26,\n          66,\n          -106,\n          -125,\n          -102,\n          66,\n          -106,\n          -59,\n          -119,\n          66,\n          -92,\n          -109,\n          -122,\n          66,\n          125,\n          -10,\n          -52,\n          66,\n          -125,\n          -54,\n          84,\n          66,\n          -120,\n          13,\n          78,\n          66,\n          118,\n          -93,\n          -67,\n          66,\n          -68,\n          -91,\n          50,\n          66,\n          -125,\n          62,\n          81,\n          66,\n          81,\n          -96,\n          13,\n          66,\n          -102,\n          42,\n          -101,\n          66,\n          -69,\n          -24,\n          -39,\n          66,\n          105,\n          -78,\n          -98,\n          66,\n          -115,\n          99,\n          -92,\n          66,\n          121,\n          75,\n          -68,\n          66,\n          106,\n          -19,\n          36,\n          66,\n          -81,\n          -102,\n          41,\n          66,\n          -97,\n          -31,\n          -6,\n          66,\n          -117,\n          63,\n          -90,\n          66,\n          -87,\n          52,\n          64,\n          66,\n          -67,\n          -26,\n          123,\n          66,\n          -113,\n          -74,\n          105,\n          66,\n          -83,\n          -80,\n          115,\n          66,\n          -91,\n          -50,\n          83,\n          66,\n          -114,\n          86,\n          -123,\n          66,\n          -122,\n          -35,\n          -14,\n          66,\n          -112,\n          34,\n          85,\n          66,\n          -120,\n          -9,\n          66,\n          66,\n          126,\n          93,\n          53,\n          66,\n          124,\n          39,\n          45,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1119207941,\n          640839029,\n          1155884080,\n          626391752,\n          643515029,\n          753398657,\n          760423210,\n          583460159,\n          970704040,\n          1016971657,\n          970736855,\n          1025956570,\n          973356061,\n          395\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1119195062,\n          1031467838,\n          1157478377,\n          640189574,\n          767675527,\n          638546156,\n          1160598265,\n          583263305,\n          984671869,\n          758122793,\n          581748520,\n          984523163,\n          581151145,\n          365\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -878626023249091604,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          920336034,\n          1056012206,\n          1010822202,\n          346413815,\n          173243682,\n          925326126,\n          873813705,\n          652821547,\n          1042528895,\n          919783727,\n          368819802,\n          635762943,\n          173315539,\n          75338801,\n          1063726397,\n          346987043,\n          221824105,\n          518752847,\n          773672570,\n          596716713,\n          364584403,\n          647849797,\n          908967589,\n          112458541,\n          85831497,\n          445881671,\n          622167083,\n          913803195,\n          532380410,\n          628705195,\n          747981359,\n          804882686,\n          508992891,\n          801064179,\n          367967019,\n          590850150,\n          376736057,\n          624530799,\n          722766457,\n          393281118,\n          73266270,\n          749844714,\n          627\n        ],\n        \"cutValueData\": [\n          66,\n          -66,\n          -16,\n          34,\n          66,\n          -101,\n          -37,\n          -70,\n          66,\n          -95,\n          -60,\n          49,\n          66,\n          -124,\n          -128,\n          -118,\n          66,\n          -58,\n          103,\n          -102,\n          66,\n          -128,\n          78,\n          -119,\n          66,\n          -66,\n          -87,\n          -55,\n          66,\n          -93,\n          116,\n          6,\n          66,\n          -65,\n          -23,\n          102,\n          66,\n          73,\n          88,\n          -52,\n          66,\n          -124,\n          -60,\n          -6,\n          66,\n          -98,\n          22,\n          -111,\n          66,\n          -74,\n          83,\n          91,\n          66,\n          -80,\n          71,\n          -32,\n          66,\n          118,\n          103,\n          -24,\n          66,\n          -89,\n          3,\n          48,\n          66,\n          116,\n          -85,\n          10,\n          66,\n          -103,\n          53,\n          -90,\n          66,\n          -69,\n          117,\n          88,\n          66,\n          -69,\n          -41,\n          8,\n          66,\n          115,\n          4,\n          -44,\n          66,\n          123,\n          38,\n          90,\n          66,\n          -76,\n          -62,\n          7,\n          66,\n          -98,\n          -38,\n          9,\n          66,\n          -112,\n          4,\n          -8,\n          66,\n          -121,\n          -27,\n          85,\n          66,\n          -87,\n          -101,\n          -81,\n          66,\n          111,\n          -53,\n          52,\n          66,\n          -107,\n          88,\n          77,\n          66,\n          -81,\n          -82,\n          -96,\n          66,\n          -87,\n          111,\n          43,\n          66,\n          -101,\n          -82,\n          -122,\n          66,\n          108,\n          112,\n          86,\n          66,\n          -99,\n          55,\n          -17,\n          66,\n          -125,\n          21,\n          -77,\n          66,\n          -91,\n          -105,\n          65,\n          66,\n          -122,\n          -49,\n          25,\n          66,\n          -128,\n          -34,\n          89,\n          66,\n          114,\n          64,\n          -43,\n          66,\n          -110,\n          -85,\n          88,\n          66,\n          122,\n          120,\n          121,\n          66,\n          -60,\n          82,\n          125,\n          66,\n          -111,\n          -56,\n          -87,\n          66,\n          123,\n          55,\n          16,\n          66,\n          125,\n          -24,\n          82,\n          66,\n          115,\n          -93,\n          -67,\n          66,\n          -88,\n          15,\n          -120,\n          66,\n          -59,\n          -128,\n          81,\n          66,\n          81,\n          12,\n          88,\n          66,\n          -117,\n          29,\n          -110,\n          66,\n          91,\n          74,\n          105,\n          66,\n          -102,\n          -26,\n          -105,\n          66,\n          -95,\n          -40,\n          63,\n          66,\n          -104,\n          -94,\n          61,\n          66,\n          -84,\n          22,\n          -67,\n          66,\n          -67,\n          -52,\n          -43,\n          66,\n          113,\n          50,\n          -76,\n          66,\n          -124,\n          59,\n          -59,\n          66,\n          -110,\n          28,\n          -74,\n          66,\n          -117,\n          52,\n          -46,\n          66,\n          -80,\n          88,\n          -77,\n          66,\n          -69,\n          103,\n          4,\n          66,\n          -90,\n          -44,\n          -18,\n          66,\n          116,\n          -39,\n          -83,\n          66,\n          -119,\n          -29,\n          -28,\n          66,\n          93,\n          -17,\n          64,\n          66,\n          -80,\n          -94,\n          -71,\n          66,\n          -72,\n          45,\n          -81,\n          66,\n          126,\n          -94,\n          -90,\n          66,\n          -99,\n          -34,\n          -115,\n          66,\n          -81,\n          -88,\n          -92,\n          66,\n          -79,\n          27,\n          -17,\n          66,\n          89,\n          113,\n          -51,\n          66,\n          -122,\n          -116,\n          -12,\n          66,\n          -119,\n          -36,\n          -87,\n          66,\n          -94,\n          54,\n          -7,\n          66,\n          -88,\n          22,\n          33,\n          66,\n          108,\n          -80,\n          2,\n          66,\n          88,\n          59,\n          47,\n          66,\n          98,\n          98,\n          -34,\n          66,\n          -120,\n          68,\n          -106,\n          66,\n          121,\n          5,\n          81,\n          66,\n          -100,\n          -67,\n          -25,\n          66,\n          77,\n          114,\n          -35,\n          66,\n          -77,\n          35,\n          108,\n          66,\n          -89,\n          -2,\n          -28,\n          66,\n          -116,\n          -82,\n          25,\n          66,\n          85,\n          18,\n          -11,\n          66,\n          92,\n          -117,\n          -92,\n          66,\n          -73,\n          -55,\n          14,\n          66,\n          -97,\n          -60,\n          113,\n          66,\n          -112,\n          -47,\n          63,\n          66,\n          -105,\n          122,\n          100,\n          66,\n          -126,\n          -39,\n          -47,\n          66,\n          -64,\n          -24,\n          4,\n          66,\n          -100,\n          -72,\n          97,\n          66,\n          119,\n          -115,\n          37,\n          66,\n          -99,\n          -57,\n          -124,\n          66,\n          -104,\n          -62,\n          -63,\n          66,\n          -74,\n          49,\n          113,\n          66,\n          -96,\n          -40,\n          102,\n          66,\n          -89,\n          18,\n          28,\n          66,\n          -92,\n          84,\n          22,\n          66,\n          -97,\n          119,\n          -19,\n          66,\n          -109,\n          -106,\n          121,\n          66,\n          -104,\n          79,\n          99,\n          66,\n          86,\n          42,\n          -17,\n          66,\n          -125,\n          -10,\n          -31,\n          66,\n          -127,\n          -70,\n          31,\n          66,\n          106,\n          -80,\n          -79,\n          66,\n          -119,\n          32,\n          52,\n          66,\n          89,\n          93,\n          -89,\n          66,\n          93,\n          56,\n          1,\n          66,\n          88,\n          123,\n          56,\n          66,\n          -111,\n          -95,\n          -101,\n          66,\n          -119,\n          41,\n          118,\n          66,\n          115,\n          -14,\n          2,\n          66,\n          110,\n          34,\n          117,\n          66,\n          -116,\n          110,\n          32,\n          66,\n          -122,\n          -114,\n          123,\n          66,\n          -116,\n          77,\n          19,\n          66,\n          -117,\n          -52,\n          41,\n          66,\n          85,\n          -98,\n          -48,\n          66,\n          93,\n          -69,\n          69,\n          66,\n          123,\n          -27,\n          31,\n          66,\n          -78,\n          -104,\n          43,\n          66,\n          104,\n          -1,\n          -118,\n          66,\n          -84,\n          -37,\n          95,\n          66,\n          -104,\n          -92,\n          54,\n          66,\n          -103,\n          116,\n          -66,\n          66,\n          -97,\n          111,\n          -112,\n          66,\n          104,\n          65,\n          37,\n          66,\n          -70,\n          23,\n          88,\n          66,\n          -112,\n          -8,\n          -12,\n          66,\n          -107,\n          -50,\n          -27,\n          66,\n          127,\n          -31,\n          -110,\n          66,\n          -128,\n          -109,\n          -1,\n          66,\n          -96,\n          -18,\n          -110,\n          66,\n          -87,\n          31,\n          27,\n          66,\n          -88,\n          -103,\n          125,\n          66,\n          99,\n          -52,\n          -97,\n          66,\n          -71,\n          117,\n          -6,\n          66,\n          -117,\n          -50,\n          17,\n          66,\n          -97,\n          28,\n          -116,\n          66,\n          -127,\n          -48,\n          -38,\n          66,\n          -81,\n          93,\n          -87,\n          66,\n          -121,\n          -55,\n          59,\n          66,\n          -108,\n          -30,\n          -17,\n          66,\n          85,\n          12,\n          -19,\n          66,\n          74,\n          -74,\n          22,\n          66,\n          85,\n          11,\n          105,\n          66,\n          -96,\n          -106,\n          33,\n          66,\n          -119,\n          -52,\n          -57,\n          66,\n          115,\n          -16,\n          -3,\n          66,\n          -116,\n          46,\n          80,\n          66,\n          -88,\n          55,\n          112,\n          66,\n          116,\n          8,\n          101,\n          66,\n          -88,\n          46,\n          32,\n          66,\n          97,\n          103,\n          9,\n          66,\n          -127,\n          34,\n          72,\n          66,\n          -94,\n          -93,\n          55,\n          66,\n          -84,\n          -89,\n          82,\n          66,\n          -92,\n          7,\n          4,\n          66,\n          127,\n          -4,\n          36,\n          66,\n          -77,\n          -7,\n          -1,\n          66,\n          -78,\n          123,\n          5,\n          66,\n          -120,\n          50,\n          2,\n          66,\n          -98,\n          -92,\n          66,\n          66,\n          -96,\n          -99,\n          -67,\n          66,\n          -106,\n          59,\n          104,\n          66,\n          -97,\n          16,\n          -77,\n          66,\n          -72,\n          -25,\n          -97,\n          66,\n          -118,\n          46,\n          -90,\n          66,\n          -77,\n          -64,\n          -55,\n          66,\n          -84,\n          20,\n          -108,\n          66,\n          -99,\n          -37,\n          -102,\n          66,\n          -111,\n          -100,\n          -37,\n          66,\n          -111,\n          79,\n          40,\n          66,\n          -88,\n          -70,\n          -5,\n          66,\n          -77,\n          13,\n          -100,\n          66,\n          -106,\n          77,\n          65,\n          66,\n          -85,\n          -3,\n          57,\n          66,\n          99,\n          -127,\n          12,\n          66,\n          -89,\n          44,\n          42,\n          66,\n          -79,\n          -22,\n          107,\n          66,\n          -96,\n          -54,\n          3,\n          66,\n          -80,\n          -61,\n          100,\n          66,\n          -128,\n          30,\n          -77,\n          66,\n          -125,\n          -80,\n          56,\n          66,\n          122,\n          26,\n          51,\n          66,\n          74,\n          -72,\n          45,\n          66,\n          80,\n          66,\n          46,\n          66,\n          -120,\n          -24,\n          12,\n          66,\n          -88,\n          61,\n          -61,\n          66,\n          -102,\n          -1,\n          -24,\n          66,\n          -91,\n          28,\n          -70,\n          66,\n          -87,\n          -50,\n          -23,\n          66,\n          -123,\n          93,\n          56,\n          66,\n          -128,\n          -64,\n          -96,\n          66,\n          124,\n          -13,\n          21,\n          66,\n          -111,\n          -32,\n          -68,\n          66,\n          121,\n          -73,\n          -6,\n          66,\n          -72,\n          39,\n          -98,\n          66,\n          -118,\n          -3,\n          -128,\n          66,\n          -104,\n          -7,\n          65,\n          66,\n          -76,\n          58,\n          -123,\n          66,\n          -91,\n          -103,\n          -30,\n          66,\n          -125,\n          -117,\n          35,\n          66,\n          -75,\n          -95,\n          39,\n          66,\n          -72,\n          -29,\n          -67,\n          66,\n          -124,\n          74,\n          -52,\n          66,\n          -104,\n          39,\n          -37,\n          66,\n          104,\n          -35,\n          102,\n          66,\n          -91,\n          118,\n          56,\n          66,\n          -87,\n          125,\n          64,\n          66,\n          -114,\n          93,\n          78,\n          66,\n          -114,\n          28,\n          20,\n          66,\n          -61,\n          -101,\n          66,\n          66,\n          -100,\n          121,\n          69,\n          66,\n          -66,\n          51,\n          -97,\n          66,\n          -125,\n          53,\n          98,\n          66,\n          103,\n          74,\n          -82,\n          66,\n          -81,\n          -94,\n          -108,\n          66,\n          -119,\n          -14,\n          79,\n          66,\n          -121,\n          -50,\n          -126,\n          66,\n          -115,\n          -75,\n          -84,\n          66,\n          99,\n          -36,\n          58,\n          66,\n          -117,\n          59,\n          26,\n          66,\n          -117,\n          87,\n          26,\n          66,\n          -82,\n          -16,\n          -95,\n          66,\n          -77,\n          24,\n          -96,\n          66,\n          -117,\n          -62,\n          45,\n          66,\n          -103,\n          -40,\n          -74,\n          66,\n          -85,\n          59,\n          37,\n          66,\n          96,\n          -66,\n          70,\n          66,\n          -73,\n          -117,\n          -93,\n          66,\n          -69,\n          0,\n          43,\n          66,\n          126,\n          28,\n          -97,\n          66,\n          -114,\n          66,\n          -38,\n          66,\n          -112,\n          118,\n          99,\n          66,\n          78,\n          -114,\n          -3,\n          66,\n          -82,\n          40,\n          18,\n          66,\n          113,\n          43,\n          22,\n          66,\n          -98,\n          40,\n          -70,\n          66,\n          -96,\n          117,\n          -77,\n          66,\n          -101,\n          -16,\n          -76,\n          66,\n          -92,\n          29,\n          19,\n          66,\n          -117,\n          81,\n          106,\n          66,\n          115,\n          -89,\n          -32,\n          66,\n          -103,\n          -127,\n          52,\n          66,\n          -106,\n          -127,\n          -13,\n          66,\n          -74,\n          -1,\n          62,\n          66,\n          88,\n          -97,\n          -103,\n          66,\n          -120,\n          122,\n          -33,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162241783,\n          987870086,\n          772537813,\n          1162057343,\n          1155094576,\n          712399181,\n          1026721133,\n          1119192844,\n          975639533,\n          970736047,\n          582734047,\n          643861210,\n          595482152,\n          1093\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1161486458,\n          731026618,\n          975528818,\n          1114234865,\n          772629190,\n          755679911,\n          629559934,\n          983689973,\n          586447357,\n          1104305696,\n          581310070,\n          596013277,\n          601866733,\n          1102\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7506740242986070872,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          437434827,\n          233024426,\n          769601339,\n          321501377,\n          526254065,\n          69943122,\n          360514594,\n          190753338,\n          1026539113,\n          758482278,\n          331312977,\n          52894174,\n          229180501,\n          869629874,\n          872191853,\n          710778574,\n          737776949,\n          455964503,\n          639308843,\n          974457843,\n          606307926,\n          903319223,\n          938782053,\n          380155693,\n          104528113,\n          804996289,\n          884677314,\n          204516543,\n          311766229,\n          385443951,\n          441530611,\n          117006069,\n          463570654,\n          899931313,\n          476007418,\n          670767710,\n          72451166,\n          925853661,\n          51723313,\n          500267607,\n          376482798,\n          1013050623,\n          859\n        ],\n        \"cutValueData\": [\n          66,\n          85,\n          7,\n          -128,\n          66,\n          -65,\n          41,\n          -73,\n          66,\n          -122,\n          112,\n          47,\n          66,\n          -74,\n          36,\n          64,\n          66,\n          -84,\n          77,\n          15,\n          66,\n          -65,\n          -71,\n          -6,\n          66,\n          -99,\n          -112,\n          65,\n          66,\n          113,\n          -44,\n          -1,\n          66,\n          -110,\n          -99,\n          -125,\n          66,\n          85,\n          -42,\n          37,\n          66,\n          -127,\n          37,\n          -41,\n          66,\n          97,\n          -115,\n          83,\n          66,\n          112,\n          -50,\n          -82,\n          66,\n          -82,\n          -84,\n          -5,\n          66,\n          -123,\n          117,\n          107,\n          66,\n          -62,\n          69,\n          -82,\n          66,\n          -81,\n          -71,\n          10,\n          66,\n          124,\n          63,\n          -84,\n          66,\n          -118,\n          -39,\n          57,\n          66,\n          -119,\n          -109,\n          89,\n          66,\n          -97,\n          86,\n          99,\n          66,\n          -95,\n          58,\n          -14,\n          66,\n          -118,\n          -91,\n          72,\n          66,\n          -115,\n          -61,\n          41,\n          66,\n          -117,\n          -32,\n          -7,\n          66,\n          -80,\n          121,\n          12,\n          66,\n          -87,\n          -109,\n          16,\n          66,\n          -82,\n          40,\n          -12,\n          66,\n          119,\n          85,\n          -120,\n          66,\n          -128,\n          82,\n          17,\n          66,\n          -117,\n          -101,\n          -117,\n          66,\n          79,\n          32,\n          91,\n          66,\n          -76,\n          -128,\n          -109,\n          66,\n          -86,\n          123,\n          32,\n          66,\n          119,\n          -11,\n          -102,\n          66,\n          -95,\n          -113,\n          -128,\n          66,\n          73,\n          87,\n          21,\n          66,\n          -125,\n          46,\n          -18,\n          66,\n          -73,\n          -102,\n          -32,\n          66,\n          -76,\n          70,\n          -79,\n          66,\n          114,\n          -88,\n          -101,\n          66,\n          -69,\n          -50,\n          -65,\n          66,\n          -80,\n          124,\n          14,\n          66,\n          -118,\n          -31,\n          67,\n          66,\n          -72,\n          5,\n          87,\n          66,\n          -128,\n          -1,\n          75,\n          66,\n          -97,\n          115,\n          -12,\n          66,\n          -96,\n          -20,\n          -12,\n          66,\n          108,\n          -65,\n          -9,\n          66,\n          -125,\n          42,\n          68,\n          66,\n          -125,\n          69,\n          -113,\n          66,\n          -99,\n          -10,\n          -3,\n          66,\n          -111,\n          24,\n          -1,\n          66,\n          -84,\n          -29,\n          7,\n          66,\n          112,\n          -68,\n          96,\n          66,\n          -124,\n          49,\n          121,\n          66,\n          -103,\n          29,\n          7,\n          66,\n          97,\n          106,\n          9,\n          66,\n          -83,\n          -33,\n          -92,\n          66,\n          -101,\n          121,\n          98,\n          66,\n          -93,\n          -37,\n          37,\n          66,\n          -73,\n          65,\n          -128,\n          66,\n          -90,\n          20,\n          -98,\n          66,\n          98,\n          11,\n          -11,\n          66,\n          -88,\n          -84,\n          -59,\n          66,\n          -105,\n          -22,\n          23,\n          66,\n          79,\n          -10,\n          46,\n          66,\n          122,\n          -122,\n          20,\n          66,\n          113,\n          -4,\n          -53,\n          66,\n          86,\n          70,\n          -37,\n          66,\n          -86,\n          -44,\n          16,\n          66,\n          126,\n          -123,\n          111,\n          66,\n          89,\n          12,\n          111,\n          66,\n          120,\n          -119,\n          -70,\n          66,\n          -97,\n          -58,\n          29,\n          66,\n          125,\n          -26,\n          115,\n          66,\n          -97,\n          114,\n          106,\n          66,\n          -82,\n          -29,\n          -8,\n          66,\n          -102,\n          10,\n          -26,\n          66,\n          75,\n          69,\n          -119,\n          66,\n          -120,\n          43,\n          -106,\n          66,\n          -106,\n          79,\n          58,\n          66,\n          109,\n          -79,\n          -119,\n          66,\n          114,\n          -70,\n          -79,\n          66,\n          -75,\n          -66,\n          -120,\n          66,\n          -118,\n          37,\n          13,\n          66,\n          -69,\n          -8,\n          118,\n          66,\n          123,\n          55,\n          28,\n          66,\n          -99,\n          82,\n          -55,\n          66,\n          -115,\n          110,\n          22,\n          66,\n          -114,\n          120,\n          107,\n          66,\n          -123,\n          -104,\n          90,\n          66,\n          -128,\n          122,\n          63,\n          66,\n          107,\n          -116,\n          45,\n          66,\n          -122,\n          -75,\n          -8,\n          66,\n          -104,\n          100,\n          -34,\n          66,\n          -113,\n          28,\n          -78,\n          66,\n          -91,\n          56,\n          -39,\n          66,\n          -116,\n          -4,\n          122,\n          66,\n          -90,\n          -22,\n          104,\n          66,\n          -109,\n          54,\n          -49,\n          66,\n          98,\n          81,\n          -43,\n          66,\n          115,\n          -33,\n          45,\n          66,\n          -66,\n          77,\n          1,\n          66,\n          -76,\n          33,\n          91,\n          66,\n          -91,\n          -86,\n          113,\n          66,\n          -68,\n          -60,\n          99,\n          66,\n          98,\n          44,\n          76,\n          66,\n          -68,\n          -47,\n          55,\n          66,\n          -99,\n          -107,\n          -80,\n          66,\n          78,\n          -117,\n          66,\n          66,\n          83,\n          -85,\n          68,\n          66,\n          122,\n          -56,\n          -100,\n          66,\n          114,\n          -28,\n          -80,\n          66,\n          -84,\n          116,\n          -113,\n          66,\n          119,\n          27,\n          89,\n          66,\n          -101,\n          -2,\n          -76,\n          66,\n          -73,\n          -34,\n          -43,\n          66,\n          -127,\n          -6,\n          109,\n          66,\n          107,\n          -12,\n          -50,\n          66,\n          -71,\n          -94,\n          -50,\n          66,\n          -82,\n          88,\n          -18,\n          66,\n          -107,\n          -51,\n          110,\n          66,\n          85,\n          98,\n          120,\n          66,\n          92,\n          49,\n          -115,\n          66,\n          -82,\n          77,\n          -39,\n          66,\n          -82,\n          -6,\n          -94,\n          66,\n          -109,\n          69,\n          7,\n          66,\n          -121,\n          -104,\n          27,\n          66,\n          93,\n          27,\n          99,\n          66,\n          -67,\n          -101,\n          -53,\n          66,\n          -128,\n          -70,\n          30,\n          66,\n          122,\n          -53,\n          -86,\n          66,\n          -82,\n          83,\n          81,\n          66,\n          -128,\n          112,\n          -114,\n          66,\n          -109,\n          -47,\n          10,\n          66,\n          93,\n          65,\n          -117,\n          66,\n          -79,\n          4,\n          -70,\n          66,\n          -91,\n          93,\n          91,\n          66,\n          -126,\n          -31,\n          -109,\n          66,\n          -84,\n          43,\n          104,\n          66,\n          102,\n          -13,\n          98,\n          66,\n          -91,\n          -123,\n          62,\n          66,\n          -125,\n          57,\n          43,\n          66,\n          -67,\n          -25,\n          82,\n          66,\n          -77,\n          16,\n          -16,\n          66,\n          87,\n          10,\n          33,\n          66,\n          -98,\n          -76,\n          91,\n          66,\n          -86,\n          120,\n          -122,\n          66,\n          -128,\n          -124,\n          -96,\n          66,\n          -120,\n          69,\n          -125,\n          66,\n          -81,\n          121,\n          -41,\n          66,\n          109,\n          99,\n          40,\n          66,\n          -99,\n          87,\n          98,\n          66,\n          -114,\n          67,\n          -15,\n          66,\n          -125,\n          108,\n          63,\n          66,\n          -117,\n          108,\n          -30,\n          66,\n          -110,\n          53,\n          -121,\n          66,\n          -97,\n          -34,\n          59,\n          66,\n          122,\n          12,\n          86,\n          66,\n          -86,\n          -63,\n          53,\n          66,\n          -125,\n          -76,\n          -96,\n          66,\n          -121,\n          -124,\n          62,\n          66,\n          -61,\n          -11,\n          -86,\n          66,\n          -87,\n          -43,\n          -62,\n          66,\n          -113,\n          72,\n          64,\n          66,\n          -80,\n          -48,\n          55,\n          66,\n          -106,\n          122,\n          -31,\n          66,\n          -106,\n          -37,\n          -57,\n          66,\n          -125,\n          33,\n          -11,\n          66,\n          -117,\n          100,\n          -49,\n          66,\n          -65,\n          -65,\n          20,\n          66,\n          -79,\n          -3,\n          70,\n          66,\n          -94,\n          11,\n          2,\n          66,\n          -89,\n          -77,\n          -93,\n          66,\n          -104,\n          45,\n          -43,\n          66,\n          -119,\n          -70,\n          33,\n          66,\n          -87,\n          -69,\n          -111,\n          66,\n          -115,\n          -41,\n          36,\n          66,\n          -127,\n          -100,\n          -88,\n          66,\n          -88,\n          109,\n          -3,\n          66,\n          -120,\n          0,\n          -61,\n          66,\n          -80,\n          118,\n          -67,\n          66,\n          -86,\n          -121,\n          15,\n          66,\n          -86,\n          61,\n          91,\n          66,\n          -115,\n          -118,\n          90,\n          66,\n          -99,\n          45,\n          -91,\n          66,\n          -74,\n          -103,\n          81,\n          66,\n          -115,\n          -128,\n          -112,\n          66,\n          115,\n          -10,\n          89,\n          66,\n          -96,\n          100,\n          -52,\n          66,\n          -120,\n          -36,\n          -92,\n          66,\n          -66,\n          6,\n          50,\n          66,\n          -106,\n          45,\n          102,\n          66,\n          -88,\n          -97,\n          55,\n          66,\n          -107,\n          -69,\n          73,\n          66,\n          -84,\n          68,\n          100,\n          66,\n          -85,\n          -13,\n          73,\n          66,\n          -105,\n          104,\n          -29,\n          66,\n          -97,\n          71,\n          -59,\n          66,\n          -102,\n          100,\n          -91,\n          66,\n          -104,\n          -15,\n          86,\n          66,\n          -94,\n          33,\n          37,\n          66,\n          -66,\n          -24,\n          7,\n          66,\n          -99,\n          -119,\n          95,\n          66,\n          -121,\n          81,\n          -62,\n          66,\n          107,\n          -88,\n          68,\n          66,\n          125,\n          -78,\n          -122,\n          66,\n          -88,\n          -21,\n          -48,\n          66,\n          -75,\n          38,\n          91,\n          66,\n          -67,\n          -109,\n          -104,\n          66,\n          88,\n          61,\n          -68,\n          66,\n          -114,\n          55,\n          -49,\n          66,\n          98,\n          -50,\n          -62,\n          66,\n          125,\n          21,\n          12,\n          66,\n          -100,\n          -5,\n          -110,\n          66,\n          71,\n          110,\n          -51,\n          66,\n          109,\n          39,\n          -48,\n          66,\n          73,\n          11,\n          74,\n          66,\n          -118,\n          -42,\n          -58,\n          66,\n          -125,\n          -43,\n          47,\n          66,\n          106,\n          -5,\n          77,\n          66,\n          -76,\n          -98,\n          -73,\n          66,\n          126,\n          -18,\n          -31,\n          66,\n          -112,\n          55,\n          -15,\n          66,\n          -105,\n          -36,\n          26,\n          66,\n          -76,\n          -126,\n          -8,\n          66,\n          -93,\n          9,\n          124,\n          66,\n          88,\n          18,\n          -10,\n          66,\n          -98,\n          56,\n          -84,\n          66,\n          -114,\n          94,\n          -7,\n          66,\n          -82,\n          -124,\n          60,\n          66,\n          -75,\n          104,\n          2,\n          66,\n          -72,\n          -69,\n          61,\n          66,\n          -94,\n          51,\n          89,\n          66,\n          -107,\n          51,\n          -70,\n          66,\n          -98,\n          -51,\n          -19,\n          66,\n          -114,\n          -45,\n          -96,\n          66,\n          -105,\n          -105,\n          -2,\n          66,\n          -92,\n          -97,\n          55,\n          66,\n          -104,\n          -5,\n          0,\n          66,\n          -99,\n          119,\n          106,\n          66,\n          -92,\n          -104,\n          -80,\n          66,\n          -107,\n          31,\n          88,\n          66,\n          -114,\n          -82,\n          -108,\n          66,\n          -79,\n          -12,\n          -87,\n          66,\n          117,\n          -31,\n          2,\n          66,\n          -93,\n          -119,\n          -67,\n          66,\n          -123,\n          37,\n          -32,\n          66,\n          -92,\n          1,\n          -46,\n          66,\n          -82,\n          102,\n          67,\n          66,\n          -71,\n          31,\n          17,\n          66,\n          -76,\n          125,\n          -81,\n          66,\n          126,\n          -44,\n          28,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162261466,\n          583338509,\n          1160666783,\n          715296712,\n          755531108,\n          1030736492,\n          1011775184,\n          1031438498,\n          755621260,\n          1141003547,\n          985271819,\n          725152217,\n          629676571,\n          1093\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          760491311,\n          1119028850,\n          1145767031,\n          600972007,\n          731786966,\n          1026478403,\n          581327819,\n          768259210,\n          1028092378,\n          1145549536,\n          729461849,\n          581133973,\n          597842330,\n          1123\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3745362813213582331,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1071894349,\n          262392401,\n          257511083,\n          98391255,\n          91742062,\n          850185766,\n          995206250,\n          984824490,\n          511667011,\n          870751138,\n          335264801,\n          124823161,\n          343369397,\n          442419129,\n          623434923,\n          741997686,\n          708038333,\n          227662061,\n          44246255,\n          402625571,\n          223692217,\n          225505141,\n          80866255,\n          914056386,\n          120429287,\n          1033366499,\n          190709614,\n          733935805,\n          332623726,\n          1003992757,\n          1042277686,\n          1001371058,\n          68803790,\n          384415567,\n          60989145,\n          774999078,\n          169796962,\n          224634973,\n          641390009,\n          844876479,\n          1004906978,\n          668584821,\n          847\n        ],\n        \"cutValueData\": [\n          66,\n          -125,\n          29,\n          -58,\n          66,\n          110,\n          -50,\n          -127,\n          66,\n          -127,\n          -5,\n          -3,\n          66,\n          -114,\n          121,\n          28,\n          66,\n          -71,\n          9,\n          36,\n          66,\n          123,\n          -110,\n          -87,\n          66,\n          -123,\n          119,\n          33,\n          66,\n          -82,\n          -98,\n          73,\n          66,\n          109,\n          84,\n          18,\n          66,\n          -89,\n          111,\n          99,\n          66,\n          -104,\n          -33,\n          -77,\n          66,\n          127,\n          -46,\n          103,\n          66,\n          -115,\n          -93,\n          6,\n          66,\n          -108,\n          98,\n          -17,\n          66,\n          -73,\n          105,\n          -12,\n          66,\n          119,\n          -13,\n          -123,\n          66,\n          -90,\n          -122,\n          -107,\n          66,\n          -105,\n          47,\n          -82,\n          66,\n          108,\n          18,\n          116,\n          66,\n          -95,\n          74,\n          -48,\n          66,\n          -127,\n          22,\n          -100,\n          66,\n          119,\n          -24,\n          61,\n          66,\n          84,\n          -120,\n          -73,\n          66,\n          -99,\n          46,\n          -49,\n          66,\n          -97,\n          113,\n          -17,\n          66,\n          113,\n          -68,\n          48,\n          66,\n          -81,\n          90,\n          88,\n          66,\n          109,\n          -26,\n          108,\n          66,\n          -85,\n          -82,\n          32,\n          66,\n          -123,\n          -16,\n          85,\n          66,\n          -118,\n          -83,\n          99,\n          66,\n          113,\n          48,\n          49,\n          66,\n          -99,\n          104,\n          5,\n          66,\n          -74,\n          33,\n          -50,\n          66,\n          -110,\n          84,\n          22,\n          66,\n          -110,\n          104,\n          7,\n          66,\n          -88,\n          -38,\n          -40,\n          66,\n          -104,\n          30,\n          -101,\n          66,\n          -103,\n          125,\n          -76,\n          66,\n          -128,\n          85,\n          -69,\n          66,\n          -89,\n          59,\n          -109,\n          66,\n          95,\n          13,\n          -29,\n          66,\n          -110,\n          60,\n          -24,\n          66,\n          -79,\n          60,\n          -112,\n          66,\n          -94,\n          80,\n          127,\n          66,\n          -74,\n          109,\n          26,\n          66,\n          -85,\n          122,\n          119,\n          66,\n          -90,\n          -101,\n          -52,\n          66,\n          73,\n          -87,\n          -118,\n          66,\n          -63,\n          6,\n          -74,\n          66,\n          -77,\n          -46,\n          -105,\n          66,\n          -123,\n          -92,\n          -54,\n          66,\n          123,\n          39,\n          122,\n          66,\n          97,\n          -110,\n          -50,\n          66,\n          -104,\n          30,\n          -2,\n          66,\n          110,\n          19,\n          121,\n          66,\n          -101,\n          -87,\n          48,\n          66,\n          127,\n          -25,\n          22,\n          66,\n          -99,\n          84,\n          26,\n          66,\n          -109,\n          -81,\n          68,\n          66,\n          -111,\n          51,\n          122,\n          66,\n          -74,\n          -55,\n          -57,\n          66,\n          123,\n          48,\n          -111,\n          66,\n          -119,\n          27,\n          48,\n          66,\n          -68,\n          -16,\n          122,\n          66,\n          -107,\n          -16,\n          -61,\n          66,\n          -99,\n          35,\n          54,\n          66,\n          69,\n          -68,\n          116,\n          66,\n          -101,\n          -101,\n          46,\n          66,\n          -106,\n          87,\n          -86,\n          66,\n          -122,\n          -95,\n          -78,\n          66,\n          -108,\n          -77,\n          95,\n          66,\n          -106,\n          13,\n          -99,\n          66,\n          119,\n          69,\n          -52,\n          66,\n          -95,\n          -80,\n          -19,\n          66,\n          -86,\n          4,\n          -61,\n          66,\n          -120,\n          44,\n          109,\n          66,\n          -82,\n          -53,\n          38,\n          66,\n          -88,\n          95,\n          94,\n          66,\n          124,\n          -13,\n          97,\n          66,\n          -64,\n          -45,\n          71,\n          66,\n          -90,\n          50,\n          -98,\n          66,\n          97,\n          -13,\n          108,\n          66,\n          85,\n          -87,\n          -25,\n          66,\n          -104,\n          -24,\n          9,\n          66,\n          -69,\n          -39,\n          -30,\n          66,\n          -83,\n          34,\n          -31,\n          66,\n          91,\n          119,\n          54,\n          66,\n          111,\n          47,\n          43,\n          66,\n          -120,\n          -48,\n          89,\n          66,\n          -69,\n          -105,\n          23,\n          66,\n          117,\n          -36,\n          21,\n          66,\n          -120,\n          -74,\n          5,\n          66,\n          -124,\n          38,\n          100,\n          66,\n          120,\n          37,\n          -125,\n          66,\n          -100,\n          97,\n          35,\n          66,\n          -76,\n          55,\n          6,\n          66,\n          116,\n          -76,\n          75,\n          66,\n          -118,\n          100,\n          75,\n          66,\n          73,\n          29,\n          -86,\n          66,\n          106,\n          -102,\n          73,\n          66,\n          -98,\n          117,\n          4,\n          66,\n          -79,\n          18,\n          57,\n          66,\n          116,\n          100,\n          12,\n          66,\n          113,\n          -12,\n          -11,\n          66,\n          -113,\n          -75,\n          123,\n          66,\n          -123,\n          58,\n          -82,\n          66,\n          -102,\n          4,\n          -98,\n          66,\n          -105,\n          68,\n          40,\n          66,\n          125,\n          -7,\n          40,\n          66,\n          -69,\n          -70,\n          -18,\n          66,\n          101,\n          -88,\n          29,\n          66,\n          118,\n          76,\n          -23,\n          66,\n          99,\n          24,\n          -84,\n          66,\n          -70,\n          -34,\n          72,\n          66,\n          -114,\n          -107,\n          -113,\n          66,\n          99,\n          119,\n          -38,\n          66,\n          -96,\n          108,\n          100,\n          66,\n          112,\n          -85,\n          23,\n          66,\n          -100,\n          5,\n          108,\n          66,\n          72,\n          -121,\n          98,\n          66,\n          103,\n          -127,\n          112,\n          66,\n          79,\n          70,\n          -14,\n          66,\n          -65,\n          -80,\n          -4,\n          66,\n          -100,\n          -76,\n          54,\n          66,\n          -88,\n          -85,\n          -108,\n          66,\n          -106,\n          -9,\n          -72,\n          66,\n          -75,\n          56,\n          -78,\n          66,\n          -111,\n          -111,\n          20,\n          66,\n          -98,\n          86,\n          81,\n          66,\n          -104,\n          -64,\n          -49,\n          66,\n          -99,\n          72,\n          -53,\n          66,\n          -121,\n          82,\n          79,\n          66,\n          -106,\n          82,\n          -124,\n          66,\n          -79,\n          -95,\n          -7,\n          66,\n          -104,\n          -82,\n          -61,\n          66,\n          -71,\n          32,\n          -52,\n          66,\n          -69,\n          -12,\n          -46,\n          66,\n          -89,\n          -85,\n          9,\n          66,\n          84,\n          -78,\n          110,\n          66,\n          -111,\n          109,\n          40,\n          66,\n          -95,\n          127,\n          -64,\n          66,\n          84,\n          -43,\n          81,\n          66,\n          119,\n          103,\n          38,\n          66,\n          -114,\n          36,\n          96,\n          66,\n          126,\n          -103,\n          -34,\n          66,\n          -117,\n          -118,\n          -99,\n          66,\n          -93,\n          77,\n          -36,\n          66,\n          -103,\n          75,\n          -114,\n          66,\n          -104,\n          25,\n          66,\n          66,\n          -69,\n          -52,\n          61,\n          66,\n          96,\n          97,\n          12,\n          66,\n          -76,\n          -89,\n          17,\n          66,\n          -90,\n          20,\n          -12,\n          66,\n          -107,\n          110,\n          102,\n          66,\n          -88,\n          -110,\n          -90,\n          66,\n          -95,\n          30,\n          -20,\n          66,\n          -106,\n          16,\n          105,\n          66,\n          -75,\n          100,\n          69,\n          66,\n          114,\n          31,\n          -68,\n          66,\n          -80,\n          -24,\n          -46,\n          66,\n          -93,\n          -113,\n          116,\n          66,\n          -102,\n          104,\n          54,\n          66,\n          94,\n          -116,\n          -81,\n          66,\n          -107,\n          86,\n          62,\n          66,\n          87,\n          110,\n          -96,\n          66,\n          115,\n          94,\n          -60,\n          66,\n          -79,\n          100,\n          89,\n          66,\n          -81,\n          -88,\n          27,\n          66,\n          -122,\n          -6,\n          -101,\n          66,\n          120,\n          63,\n          60,\n          66,\n          -95,\n          53,\n          125,\n          66,\n          -107,\n          -82,\n          101,\n          66,\n          -97,\n          52,\n          -73,\n          66,\n          -88,\n          -62,\n          -101,\n          66,\n          -61,\n          -11,\n          102,\n          66,\n          -72,\n          -15,\n          -38,\n          66,\n          -96,\n          64,\n          -31,\n          66,\n          121,\n          -53,\n          -66,\n          66,\n          123,\n          100,\n          -90,\n          66,\n          127,\n          106,\n          1,\n          66,\n          -110,\n          102,\n          72,\n          66,\n          -98,\n          58,\n          -104,\n          66,\n          -82,\n          -103,\n          -51,\n          66,\n          -84,\n          28,\n          -64,\n          66,\n          -86,\n          -37,\n          91,\n          66,\n          -109,\n          43,\n          113,\n          66,\n          -93,\n          94,\n          11,\n          66,\n          -96,\n          -36,\n          61,\n          66,\n          -114,\n          16,\n          106,\n          66,\n          -85,\n          104,\n          -40,\n          66,\n          -105,\n          119,\n          97,\n          66,\n          -93,\n          85,\n          -91,\n          66,\n          -68,\n          0,\n          -30,\n          66,\n          -83,\n          127,\n          -96,\n          66,\n          -93,\n          66,\n          -112,\n          66,\n          -101,\n          -38,\n          -81,\n          66,\n          121,\n          56,\n          52,\n          66,\n          -125,\n          98,\n          -118,\n          66,\n          126,\n          88,\n          72,\n          66,\n          89,\n          114,\n          -88,\n          66,\n          -74,\n          78,\n          60,\n          66,\n          -126,\n          -52,\n          -22,\n          66,\n          -112,\n          22,\n          -32,\n          66,\n          -89,\n          46,\n          -73,\n          66,\n          -93,\n          91,\n          125,\n          66,\n          120,\n          -56,\n          -92,\n          66,\n          -87,\n          7,\n          77,\n          66,\n          -64,\n          78,\n          -4,\n          66,\n          -126,\n          -66,\n          90,\n          66,\n          -84,\n          71,\n          3,\n          66,\n          -128,\n          110,\n          98,\n          66,\n          -122,\n          83,\n          -82,\n          66,\n          -111,\n          11,\n          87,\n          66,\n          86,\n          -14,\n          35,\n          66,\n          116,\n          -70,\n          -70,\n          66,\n          99,\n          -39,\n          -80,\n          66,\n          -119,\n          101,\n          -22,\n          66,\n          115,\n          19,\n          -63,\n          66,\n          -113,\n          70,\n          -107,\n          66,\n          -95,\n          -68,\n          14,\n          66,\n          -81,\n          71,\n          30,\n          66,\n          -80,\n          -28,\n          -60,\n          66,\n          105,\n          -91,\n          -127,\n          66,\n          -111,\n          -52,\n          91,\n          66,\n          -123,\n          119,\n          41,\n          66,\n          -102,\n          -104,\n          34,\n          66,\n          -95,\n          80,\n          61,\n          66,\n          -124,\n          99,\n          62,\n          66,\n          -73,\n          106,\n          30,\n          66,\n          105,\n          112,\n          -57,\n          66,\n          109,\n          50,\n          -120,\n          66,\n          -71,\n          -6,\n          -15,\n          66,\n          110,\n          98,\n          52,\n          66,\n          120,\n          -12,\n          -37,\n          66,\n          -57,\n          -33,\n          -68,\n          66,\n          -69,\n          86,\n          46,\n          66,\n          96,\n          58,\n          -19,\n          66,\n          -76,\n          126,\n          -21,\n          66,\n          -73,\n          125,\n          -63,\n          66,\n          -79,\n          -85,\n          53,\n          66,\n          -72,\n          58,\n          77,\n          66,\n          -106,\n          49,\n          121,\n          66,\n          -125,\n          61,\n          -59,\n          66,\n          122,\n          -121,\n          -65,\n          66,\n          -110,\n          -99,\n          45,\n          66,\n          -115,\n          92,\n          56,\n          66,\n          -116,\n          109,\n          -37,\n          66,\n          -121,\n          -107,\n          -78,\n          66,\n          -97,\n          -39,\n          -38,\n          66,\n          -107,\n          11,\n          -41,\n          66,\n          -108,\n          -54,\n          -41,\n          66,\n          119,\n          51,\n          56,\n          66,\n          -93,\n          87,\n          -87,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162254905,\n          1160126198,\n          1146318205,\n          768218714,\n          970323758,\n          1099876927,\n          643867019,\n          600501721,\n          774110435,\n          588217703,\n          1112592472,\n          638712340,\n          715239841,\n          1336\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162259279,\n          731733019,\n          1141513144,\n          774636602,\n          984674554,\n          1100001829,\n          600800612,\n          595561288,\n          731085628,\n          968735182,\n          638605348,\n          1026153239,\n          1016558170,\n          1123\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5933807794767235558,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1060563679,\n          69190726,\n          718516189,\n          577461197,\n          128264663,\n          240233213,\n          731333706,\n          439483711,\n          892984650,\n          863421774,\n          715044269,\n          915485741,\n          341097923,\n          527756130,\n          917234755,\n          917813929,\n          388605485,\n          1050916429,\n          99063898,\n          661487011,\n          226399306,\n          530525987,\n          187151915,\n          510891610,\n          171042099,\n          739313630,\n          731102061,\n          1019020399,\n          402126665,\n          876427621,\n          724770639,\n          514530557,\n          790624125,\n          597932745,\n          61642213,\n          393570995,\n          584509945,\n          856739626,\n          624420981,\n          933801047,\n          756661035,\n          342556667,\n          1\n        ],\n        \"cutValueData\": [\n          66,\n          -114,\n          -102,\n          21,\n          66,\n          -83,\n          -108,\n          -29,\n          66,\n          73,\n          111,\n          122,\n          66,\n          -59,\n          25,\n          14,\n          66,\n          -86,\n          1,\n          97,\n          66,\n          -82,\n          30,\n          -59,\n          66,\n          -112,\n          -48,\n          -84,\n          66,\n          -117,\n          120,\n          31,\n          66,\n          -114,\n          -27,\n          -77,\n          66,\n          86,\n          59,\n          35,\n          66,\n          -127,\n          26,\n          -100,\n          66,\n          -80,\n          -11,\n          -113,\n          66,\n          -110,\n          -17,\n          -13,\n          66,\n          -94,\n          -8,\n          40,\n          66,\n          -73,\n          5,\n          -66,\n          66,\n          104,\n          -44,\n          -19,\n          66,\n          -112,\n          3,\n          107,\n          66,\n          -125,\n          120,\n          27,\n          66,\n          -66,\n          112,\n          66,\n          66,\n          -99,\n          106,\n          10,\n          66,\n          -82,\n          -112,\n          -124,\n          66,\n          -115,\n          -88,\n          -31,\n          66,\n          93,\n          -65,\n          94,\n          66,\n          -66,\n          85,\n          1,\n          66,\n          76,\n          -82,\n          -120,\n          66,\n          -118,\n          124,\n          -23,\n          66,\n          113,\n          -57,\n          87,\n          66,\n          -69,\n          -76,\n          -126,\n          66,\n          118,\n          -105,\n          35,\n          66,\n          74,\n          -35,\n          -21,\n          66,\n          -63,\n          -95,\n          127,\n          66,\n          -92,\n          84,\n          -76,\n          66,\n          -103,\n          -110,\n          -94,\n          66,\n          82,\n          -23,\n          28,\n          66,\n          -101,\n          118,\n          -60,\n          66,\n          -106,\n          96,\n          -9,\n          66,\n          -115,\n          92,\n          95,\n          66,\n          100,\n          -29,\n          18,\n          66,\n          71,\n          -3,\n          24,\n          66,\n          -77,\n          76,\n          -68,\n          66,\n          -117,\n          60,\n          49,\n          66,\n          107,\n          73,\n          105,\n          66,\n          86,\n          -2,\n          -94,\n          66,\n          -100,\n          -102,\n          -7,\n          66,\n          82,\n          -33,\n          52,\n          66,\n          -78,\n          123,\n          -106,\n          66,\n          118,\n          -20,\n          27,\n          66,\n          -95,\n          -84,\n          108,\n          66,\n          -112,\n          -23,\n          -40,\n          66,\n          121,\n          -98,\n          53,\n          66,\n          -98,\n          106,\n          37,\n          66,\n          118,\n          -104,\n          19,\n          66,\n          -91,\n          101,\n          76,\n          66,\n          -73,\n          127,\n          -64,\n          66,\n          -88,\n          -24,\n          -81,\n          66,\n          -108,\n          67,\n          4,\n          66,\n          -79,\n          -16,\n          -46,\n          66,\n          -83,\n          30,\n          -84,\n          66,\n          108,\n          -41,\n          95,\n          66,\n          -78,\n          38,\n          55,\n          66,\n          -120,\n          49,\n          125,\n          66,\n          -97,\n          53,\n          48,\n          66,\n          -122,\n          62,\n          26,\n          66,\n          125,\n          -49,\n          38,\n          66,\n          122,\n          -24,\n          -83,\n          66,\n          -98,\n          1,\n          -3,\n          66,\n          -119,\n          74,\n          -95,\n          66,\n          -65,\n          88,\n          -49,\n          66,\n          75,\n          -117,\n          77,\n          66,\n          -95,\n          98,\n          -14,\n          66,\n          -108,\n          0,\n          -39,\n          66,\n          -118,\n          87,\n          73,\n          66,\n          118,\n          -71,\n          93,\n          66,\n          69,\n          -51,\n          21,\n          66,\n          -95,\n          -47,\n          -69,\n          66,\n          -103,\n          108,\n          20,\n          66,\n          -92,\n          -120,\n          -67,\n          66,\n          -112,\n          121,\n          110,\n          66,\n          96,\n          120,\n          64,\n          66,\n          116,\n          121,\n          -87,\n          66,\n          -100,\n          -27,\n          -107,\n          66,\n          -75,\n          -72,\n          -40,\n          66,\n          -122,\n          -30,\n          59,\n          66,\n          -106,\n          100,\n          22,\n          66,\n          -113,\n          116,\n          -82,\n          66,\n          -110,\n          69,\n          -14,\n          66,\n          -80,\n          -11,\n          40,\n          66,\n          -100,\n          -35,\n          -111,\n          66,\n          -75,\n          52,\n          91,\n          66,\n          -106,\n          -60,\n          37,\n          66,\n          105,\n          49,\n          52,\n          66,\n          -117,\n          16,\n          -36,\n          66,\n          -59,\n          -100,\n          -14,\n          66,\n          -125,\n          -115,\n          -30,\n          66,\n          -121,\n          111,\n          -52,\n          66,\n          -115,\n          90,\n          57,\n          66,\n          -98,\n          72,\n          -72,\n          66,\n          119,\n          98,\n          32,\n          66,\n          110,\n          -105,\n          22,\n          66,\n          -96,\n          98,\n          123,\n          66,\n          -102,\n          116,\n          14,\n          66,\n          118,\n          100,\n          -105,\n          66,\n          -122,\n          -26,\n          42,\n          66,\n          -117,\n          -74,\n          84,\n          66,\n          -124,\n          -32,\n          -126,\n          66,\n          -65,\n          -95,\n          -28,\n          66,\n          -99,\n          -71,\n          73,\n          66,\n          114,\n          -124,\n          100,\n          66,\n          -113,\n          69,\n          16,\n          66,\n          -110,\n          -70,\n          24,\n          66,\n          -127,\n          89,\n          38,\n          66,\n          -114,\n          -2,\n          19,\n          66,\n          118,\n          -39,\n          28,\n          66,\n          -108,\n          -18,\n          60,\n          66,\n          91,\n          -68,\n          -115,\n          66,\n          -107,\n          48,\n          -123,\n          66,\n          -102,\n          -111,\n          40,\n          66,\n          -94,\n          112,\n          -89,\n          66,\n          -77,\n          21,\n          44,\n          66,\n          -128,\n          115,\n          -37,\n          66,\n          -122,\n          -45,\n          64,\n          66,\n          -102,\n          -56,\n          -36,\n          66,\n          -123,\n          20,\n          79,\n          66,\n          -103,\n          76,\n          -90,\n          66,\n          -81,\n          75,\n          -2,\n          66,\n          97,\n          -115,\n          106,\n          66,\n          -104,\n          80,\n          114,\n          66,\n          -107,\n          -86,\n          76,\n          66,\n          -81,\n          112,\n          -42,\n          66,\n          -89,\n          80,\n          -60,\n          66,\n          -77,\n          63,\n          -50,\n          66,\n          70,\n          -106,\n          -67,\n          66,\n          -113,\n          12,\n          -43,\n          66,\n          -87,\n          16,\n          110,\n          66,\n          115,\n          60,\n          -15,\n          66,\n          -120,\n          78,\n          8,\n          66,\n          88,\n          -121,\n          32,\n          66,\n          -99,\n          -58,\n          -35,\n          66,\n          -123,\n          62,\n          110,\n          66,\n          120,\n          71,\n          -84,\n          66,\n          -87,\n          -45,\n          90,\n          66,\n          100,\n          14,\n          122,\n          66,\n          -113,\n          121,\n          -39,\n          66,\n          -98,\n          93,\n          80,\n          66,\n          -118,\n          70,\n          -48,\n          66,\n          -100,\n          -73,\n          -31,\n          66,\n          97,\n          1,\n          125,\n          66,\n          -94,\n          31,\n          15,\n          66,\n          -67,\n          -98,\n          -16,\n          66,\n          -111,\n          -15,\n          126,\n          66,\n          -80,\n          -52,\n          103,\n          66,\n          97,\n          -86,\n          81,\n          66,\n          -74,\n          22,\n          -49,\n          66,\n          -127,\n          -28,\n          39,\n          66,\n          -121,\n          -92,\n          55,\n          66,\n          -72,\n          -2,\n          27,\n          66,\n          -91,\n          96,\n          -36,\n          66,\n          -78,\n          50,\n          88,\n          66,\n          -79,\n          -34,\n          -97,\n          66,\n          -125,\n          -49,\n          123,\n          66,\n          -107,\n          20,\n          19,\n          66,\n          127,\n          34,\n          -116,\n          66,\n          112,\n          85,\n          73,\n          66,\n          -103,\n          36,\n          -110,\n          66,\n          -110,\n          57,\n          62,\n          66,\n          -122,\n          -92,\n          -71,\n          66,\n          -79,\n          -75,\n          121,\n          66,\n          -75,\n          -73,\n          60,\n          66,\n          87,\n          60,\n          105,\n          66,\n          -100,\n          46,\n          25,\n          66,\n          109,\n          -117,\n          -44,\n          66,\n          -115,\n          -105,\n          82,\n          66,\n          122,\n          -112,\n          2,\n          66,\n          84,\n          -111,\n          -7,\n          66,\n          91,\n          14,\n          55,\n          66,\n          -95,\n          -108,\n          -77,\n          66,\n          111,\n          -89,\n          49,\n          66,\n          -91,\n          34,\n          11,\n          66,\n          -77,\n          57,\n          55,\n          66,\n          -120,\n          -31,\n          76,\n          66,\n          123,\n          -27,\n          -70,\n          66,\n          -123,\n          -38,\n          -34,\n          66,\n          -125,\n          -12,\n          -115,\n          66,\n          -123,\n          -14,\n          9,\n          66,\n          -76,\n          75,\n          15,\n          66,\n          -124,\n          50,\n          73,\n          66,\n          87,\n          -83,\n          -62,\n          66,\n          -110,\n          55,\n          100,\n          66,\n          125,\n          -84,\n          36,\n          66,\n          -110,\n          48,\n          -73,\n          66,\n          101,\n          -33,\n          49,\n          66,\n          105,\n          28,\n          19,\n          66,\n          -107,\n          100,\n          19,\n          66,\n          -121,\n          55,\n          -109,\n          66,\n          -86,\n          -115,\n          84,\n          66,\n          -104,\n          67,\n          24,\n          66,\n          118,\n          -3,\n          67,\n          66,\n          -91,\n          73,\n          121,\n          66,\n          -123,\n          -13,\n          91,\n          66,\n          -128,\n          77,\n          6,\n          66,\n          102,\n          -51,\n          -37,\n          66,\n          -123,\n          -98,\n          -113,\n          66,\n          117,\n          68,\n          -76,\n          66,\n          -109,\n          19,\n          -46,\n          66,\n          -91,\n          80,\n          -47,\n          66,\n          95,\n          42,\n          30,\n          66,\n          86,\n          -35,\n          90,\n          66,\n          125,\n          93,\n          71,\n          66,\n          -76,\n          -51,\n          20,\n          66,\n          -65,\n          -36,\n          -69,\n          66,\n          -114,\n          -1,\n          58,\n          66,\n          86,\n          -24,\n          -87,\n          66,\n          -128,\n          100,\n          -78,\n          66,\n          -128,\n          -104,\n          62,\n          66,\n          -99,\n          76,\n          85,\n          66,\n          -118,\n          -89,\n          32,\n          66,\n          -91,\n          -44,\n          -121,\n          66,\n          -67,\n          20,\n          -61,\n          66,\n          -98,\n          -92,\n          111,\n          66,\n          -116,\n          115,\n          -74,\n          66,\n          -123,\n          -97,\n          38,\n          66,\n          126,\n          -99,\n          -115,\n          66,\n          -128,\n          -77,\n          -27,\n          66,\n          120,\n          -73,\n          94,\n          66,\n          -107,\n          80,\n          -22,\n          66,\n          -66,\n          34,\n          93,\n          66,\n          -127,\n          -78,\n          83,\n          66,\n          103,\n          -4,\n          -125,\n          66,\n          93,\n          -15,\n          -110,\n          66,\n          -94,\n          -43,\n          -74,\n          66,\n          -125,\n          89,\n          97,\n          66,\n          -124,\n          26,\n          -3,\n          66,\n          80,\n          -64,\n          -107,\n          66,\n          102,\n          -108,\n          43,\n          66,\n          -96,\n          -115,\n          -63,\n          66,\n          -75,\n          -85,\n          103,\n          66,\n          115,\n          39,\n          -124,\n          66,\n          91,\n          89,\n          -42,\n          66,\n          -91,\n          50,\n          -66,\n          66,\n          -122,\n          43,\n          60,\n          66,\n          -63,\n          88,\n          -69,\n          66,\n          -106,\n          108,\n          77,\n          66,\n          -90,\n          -63,\n          120,\n          66,\n          -113,\n          116,\n          0,\n          66,\n          113,\n          66,\n          60,\n          66,\n          -125,\n          -55,\n          -37,\n          66,\n          -105,\n          -128,\n          112,\n          66,\n          113,\n          52,\n          45,\n          66,\n          -115,\n          114,\n          72,\n          66,\n          125,\n          29,\n          -110,\n          66,\n          -91,\n          61,\n          17,\n          66,\n          -109,\n          -127,\n          -117,\n          66,\n          -100,\n          -111,\n          30,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 253,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1119194819,\n          1160645003,\n          1162241755,\n          1118427389,\n          640740667,\n          969169022,\n          582754243,\n          588121196,\n          987860569,\n          582961333,\n          968638810,\n          624243065,\n          988276516,\n          394\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1157478227,\n          1160660555,\n          759938759,\n          968738200,\n          628989106,\n          990013019,\n          712070797,\n          588296401,\n          983668091,\n          729658975,\n          767675542,\n          1097927590,\n          597080605,\n          368\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 1,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 1,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2736961492578362267,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1013443169,\n          921676590,\n          129582783,\n          245296757,\n          341366730,\n          753698021,\n          230382562,\n          353726331,\n          374681205,\n          526874929,\n          871585749,\n          1026469745,\n          786607786,\n          1009817027,\n          91003690,\n          979301591,\n          187745486,\n          439143994,\n          180000978,\n          749375469,\n          880502454,\n          359921235,\n          191974743,\n          253057958,\n          916515141,\n          746270301,\n          753583687,\n          984447703,\n          602649694,\n          880101359,\n          574072255,\n          765684939,\n          234039721,\n          586603589,\n          392416711,\n          668261841,\n          494078249,\n          719006946,\n          393916235,\n          110694101,\n          757009206,\n          61583357,\n          311\n        ],\n        \"cutValueData\": [\n          66,\n          -89,\n          4,\n          -101,\n          66,\n          -112,\n          101,\n          72,\n          66,\n          -73,\n          46,\n          28,\n          66,\n          -118,\n          16,\n          -79,\n          66,\n          -121,\n          -123,\n          -7,\n          66,\n          108,\n          116,\n          25,\n          66,\n          -99,\n          -98,\n          65,\n          66,\n          113,\n          36,\n          -93,\n          66,\n          -108,\n          55,\n          -4,\n          66,\n          -93,\n          44,\n          -62,\n          66,\n          124,\n          83,\n          126,\n          66,\n          -96,\n          120,\n          -69,\n          66,\n          -122,\n          93,\n          36,\n          66,\n          -86,\n          30,\n          13,\n          66,\n          -93,\n          112,\n          77,\n          66,\n          -113,\n          115,\n          -45,\n          66,\n          -121,\n          -94,\n          50,\n          66,\n          -97,\n          -36,\n          -88,\n          66,\n          -124,\n          85,\n          -83,\n          66,\n          -71,\n          -104,\n          -14,\n          66,\n          -122,\n          126,\n          38,\n          66,\n          -121,\n          62,\n          117,\n          66,\n          -112,\n          95,\n          8,\n          66,\n          120,\n          79,\n          107,\n          66,\n          -112,\n          58,\n          -119,\n          66,\n          -82,\n          -19,\n          80,\n          66,\n          -119,\n          75,\n          102,\n          66,\n          -126,\n          -112,\n          -40,\n          66,\n          -127,\n          70,\n          -19,\n          66,\n          115,\n          -26,\n          -7,\n          66,\n          -116,\n          88,\n          40,\n          66,\n          97,\n          125,\n          58,\n          66,\n          99,\n          116,\n          -122,\n          66,\n          -111,\n          58,\n          81,\n          66,\n          -74,\n          68,\n          -26,\n          66,\n          -99,\n          -30,\n          -68,\n          66,\n          -86,\n          34,\n          -53,\n          66,\n          -115,\n          34,\n          -35,\n          66,\n          -107,\n          13,\n          -50,\n          66,\n          -123,\n          68,\n          13,\n          66,\n          115,\n          -29,\n          86,\n          66,\n          -77,\n          -76,\n          78,\n          66,\n          -89,\n          -120,\n          9,\n          66,\n          -95,\n          13,\n          102,\n          66,\n          127,\n          19,\n          -14,\n          66,\n          -124,\n          -80,\n          99,\n          66,\n          -68,\n          20,\n          -56,\n          66,\n          -70,\n          91,\n          -114,\n          66,\n          -80,\n          -50,\n          -76,\n          66,\n          -104,\n          109,\n          99,\n          66,\n          -113,\n          -96,\n          66,\n          66,\n          -104,\n          27,\n          -74,\n          66,\n          -94,\n          29,\n          13,\n          66,\n          -90,\n          -116,\n          51,\n          66,\n          87,\n          -26,\n          92,\n          66,\n          -102,\n          13,\n          2,\n          66,\n          -59,\n          -83,\n          10,\n          66,\n          81,\n          83,\n          60,\n          66,\n          100,\n          -90,\n          -102,\n          66,\n          -86,\n          36,\n          63,\n          66,\n          -73,\n          84,\n          78,\n          66,\n          -82,\n          -120,\n          -22,\n          66,\n          84,\n          79,\n          -61,\n          66,\n          -84,\n          -31,\n          -49,\n          66,\n          -95,\n          123,\n          -41,\n          66,\n          -87,\n          -27,\n          68,\n          66,\n          -88,\n          -61,\n          122,\n          66,\n          104,\n          89,\n          118,\n          66,\n          -117,\n          -128,\n          -93,\n          66,\n          -79,\n          52,\n          -51,\n          66,\n          -73,\n          112,\n          38,\n          66,\n          93,\n          122,\n          77,\n          66,\n          -111,\n          80,\n          103,\n          66,\n          -99,\n          -102,\n          80,\n          66,\n          -121,\n          44,\n          -11,\n          66,\n          -104,\n          3,\n          -100,\n          66,\n          -90,\n          -85,\n          123,\n          66,\n          -124,\n          -4,\n          -15,\n          66,\n          -68,\n          -86,\n          24,\n          66,\n          -87,\n          108,\n          -31,\n          66,\n          -106,\n          124,\n          -61,\n          66,\n          -85,\n          -6,\n          -78,\n          66,\n          -72,\n          -33,\n          -89,\n          66,\n          -86,\n          -85,\n          -75,\n          66,\n          -120,\n          -41,\n          76,\n          66,\n          -124,\n          -83,\n          -79,\n          66,\n          -89,\n          -12,\n          108,\n          66,\n          -67,\n          -118,\n          123,\n          66,\n          -122,\n          50,\n          -96,\n          66,\n          -96,\n          -117,\n          83,\n          66,\n          -113,\n          75,\n          123,\n          66,\n          115,\n          -17,\n          -61,\n          66,\n          116,\n          29,\n          -85,\n          66,\n          -98,\n          -67,\n          -124,\n          66,\n          115,\n          -84,\n          -12,\n          66,\n          94,\n          5,\n          85,\n          66,\n          116,\n          -79,\n          -34,\n          66,\n          102,\n          11,\n          27,\n          66,\n          -118,\n          115,\n          2,\n          66,\n          -120,\n          -5,\n          17,\n          66,\n          82,\n          -63,\n          59,\n          66,\n          -78,\n          -39,\n          89,\n          66,\n          -107,\n          -28,\n          60,\n          66,\n          -127,\n          -17,\n          -16,\n          66,\n          116,\n          33,\n          -47,\n          66,\n          -100,\n          -51,\n          -104,\n          66,\n          107,\n          -68,\n          125,\n          66,\n          -114,\n          45,\n          21,\n          66,\n          -71,\n          122,\n          1,\n          66,\n          118,\n          -64,\n          16,\n          66,\n          -69,\n          -120,\n          47,\n          66,\n          -127,\n          125,\n          -25,\n          66,\n          -96,\n          -16,\n          26,\n          66,\n          -61,\n          108,\n          113,\n          66,\n          -58,\n          -84,\n          13,\n          66,\n          101,\n          84,\n          -9,\n          66,\n          -126,\n          -69,\n          -78,\n          66,\n          68,\n          -29,\n          25,\n          66,\n          -74,\n          24,\n          -90,\n          66,\n          88,\n          -72,\n          48,\n          66,\n          -102,\n          64,\n          -8,\n          66,\n          96,\n          107,\n          105,\n          66,\n          76,\n          -50,\n          75,\n          66,\n          -104,\n          -98,\n          109,\n          66,\n          103,\n          -81,\n          -34,\n          66,\n          76,\n          -33,\n          85,\n          66,\n          -123,\n          -81,\n          66,\n          66,\n          -70,\n          85,\n          -19,\n          66,\n          -73,\n          39,\n          87,\n          66,\n          -77,\n          -36,\n          -91,\n          66,\n          100,\n          14,\n          -5,\n          66,\n          111,\n          54,\n          30,\n          66,\n          -94,\n          -67,\n          -105,\n          66,\n          119,\n          -34,\n          18,\n          66,\n          94,\n          44,\n          -53,\n          66,\n          88,\n          78,\n          110,\n          66,\n          -94,\n          58,\n          96,\n          66,\n          -72,\n          27,\n          -111,\n          66,\n          -112,\n          6,\n          -69,\n          66,\n          -111,\n          -108,\n          16,\n          66,\n          95,\n          32,\n          -56,\n          66,\n          114,\n          -47,\n          115,\n          66,\n          -121,\n          -114,\n          -89,\n          66,\n          102,\n          -56,\n          119,\n          66,\n          -93,\n          7,\n          98,\n          66,\n          -94,\n          -115,\n          -119,\n          66,\n          -84,\n          -55,\n          47,\n          66,\n          83,\n          -40,\n          87,\n          66,\n          80,\n          -123,\n          -37,\n          66,\n          -72,\n          89,\n          -25,\n          66,\n          -83,\n          53,\n          54,\n          66,\n          -101,\n          90,\n          3,\n          66,\n          116,\n          96,\n          -69,\n          66,\n          -82,\n          16,\n          -120,\n          66,\n          -93,\n          -64,\n          110,\n          66,\n          -122,\n          56,\n          85,\n          66,\n          -96,\n          100,\n          98,\n          66,\n          -78,\n          84,\n          102,\n          66,\n          -68,\n          -83,\n          -73,\n          66,\n          93,\n          -59,\n          -99,\n          66,\n          -95,\n          14,\n          -44,\n          66,\n          -112,\n          110,\n          78,\n          66,\n          -119,\n          -45,\n          0,\n          66,\n          -94,\n          -45,\n          -96,\n          66,\n          -119,\n          -102,\n          -113,\n          66,\n          -94,\n          -99,\n          -10,\n          66,\n          86,\n          -115,\n          115,\n          66,\n          -79,\n          -115,\n          -128,\n          66,\n          96,\n          12,\n          -51,\n          66,\n          112,\n          56,\n          13,\n          66,\n          -99,\n          80,\n          -125,\n          66,\n          -109,\n          91,\n          -48,\n          66,\n          -78,\n          17,\n          25,\n          66,\n          109,\n          -87,\n          -20,\n          66,\n          91,\n          -123,\n          -128,\n          66,\n          85,\n          -104,\n          125,\n          66,\n          -92,\n          -75,\n          -67,\n          66,\n          72,\n          -60,\n          -7,\n          66,\n          -91,\n          -65,\n          57,\n          66,\n          -98,\n          87,\n          -54,\n          66,\n          -118,\n          125,\n          15,\n          66,\n          -102,\n          3,\n          81,\n          66,\n          -86,\n          30,\n          -34,\n          66,\n          -89,\n          49,\n          35,\n          66,\n          -71,\n          57,\n          28,\n          66,\n          127,\n          120,\n          121,\n          66,\n          -85,\n          5,\n          43,\n          66,\n          -100,\n          46,\n          72,\n          66,\n          -87,\n          98,\n          49,\n          66,\n          -86,\n          106,\n          -4,\n          66,\n          -99,\n          119,\n          114,\n          66,\n          -108,\n          -114,\n          66,\n          66,\n          -127,\n          -88,\n          -27,\n          66,\n          78,\n          -21,\n          -90,\n          66,\n          -84,\n          78,\n          106,\n          66,\n          -77,\n          -99,\n          -5,\n          66,\n          -93,\n          -85,\n          -25,\n          66,\n          124,\n          -118,\n          126,\n          66,\n          -128,\n          -126,\n          -122,\n          66,\n          121,\n          6,\n          -60,\n          66,\n          72,\n          83,\n          -68,\n          66,\n          116,\n          27,\n          69,\n          66,\n          -74,\n          94,\n          -128,\n          66,\n          -79,\n          -87,\n          114,\n          66,\n          -96,\n          29,\n          112,\n          66,\n          -85,\n          48,\n          -62,\n          66,\n          86,\n          5,\n          -122,\n          66,\n          -109,\n          34,\n          45,\n          66,\n          -105,\n          105,\n          127,\n          66,\n          121,\n          -92,\n          -106,\n          66,\n          -125,\n          84,\n          123,\n          66,\n          -128,\n          55,\n          87,\n          66,\n          -120,\n          -44,\n          -72,\n          66,\n          -105,\n          -126,\n          -21,\n          66,\n          -97,\n          -85,\n          -108,\n          66,\n          118,\n          120,\n          92,\n          66,\n          102,\n          46,\n          -66,\n          66,\n          -68,\n          86,\n          -108,\n          66,\n          -123,\n          -21,\n          -82,\n          66,\n          -62,\n          -122,\n          -84,\n          66,\n          -126,\n          100,\n          -50,\n          66,\n          -79,\n          -22,\n          -117,\n          66,\n          100,\n          14,\n          -60,\n          66,\n          86,\n          -29,\n          -32,\n          66,\n          104,\n          14,\n          31,\n          66,\n          -96,\n          78,\n          -105,\n          66,\n          -122,\n          -25,\n          29,\n          66,\n          -95,\n          112,\n          89,\n          66,\n          -99,\n          68,\n          48,\n          66,\n          -122,\n          20,\n          74,\n          66,\n          -97,\n          67,\n          75,\n          66,\n          89,\n          -111,\n          -36,\n          66,\n          -122,\n          -80,\n          -114,\n          66,\n          -99,\n          121,\n          -101,\n          66,\n          -61,\n          -103,\n          30,\n          66,\n          -67,\n          -6,\n          -81,\n          66,\n          110,\n          -45,\n          -29,\n          66,\n          -116,\n          92,\n          10,\n          66,\n          75,\n          -83,\n          102,\n          66,\n          -117,\n          96,\n          121,\n          66,\n          -111,\n          0,\n          59,\n          66,\n          -108,\n          6,\n          106,\n          66,\n          84,\n          37,\n          100,\n          66,\n          -101,\n          -61,\n          -10,\n          66,\n          -125,\n          120,\n          96,\n          66,\n          -112,\n          70,\n          85,\n          66,\n          90,\n          36,\n          -41,\n          66,\n          123,\n          29,\n          29,\n          66,\n          -122,\n          -28,\n          35,\n          66,\n          -108,\n          -63,\n          86,\n          66,\n          -105,\n          -37,\n          15,\n          66,\n          -93,\n          63,\n          -47,\n          66,\n          -107,\n          -79,\n          93,\n          66,\n          -104,\n          88,\n          29,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162261466,\n          1157419448,\n          1117082416,\n          725160833,\n          1097779625,\n          587573663,\n          983103602,\n          710823019,\n          753384229,\n          1116832748,\n          985045928,\n          595683841,\n          581153359,\n          1177\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1155707027,\n          1157417261,\n          755170369,\n          1160469332,\n          715080437,\n          625955849,\n          600466756,\n          970680173,\n          1145606612,\n          586117202,\n          597096914,\n          600816650,\n          968570995,\n          1094\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 139913034750054867,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          173344421,\n          926922319,\n          125094975,\n          104849077,\n          996340926,\n          128308961,\n          716421494,\n          895929981,\n          330169982,\n          45303595,\n          854808011,\n          120186078,\n          1042148470,\n          1052286139,\n          1072392027,\n          212793033,\n          926587978,\n          936551517,\n          771080187,\n          922220089,\n          711932081,\n          337168875,\n          204823778,\n          626301163,\n          668526305,\n          631039174,\n          222092901,\n          1017769562,\n          366822986,\n          447699281,\n          221028266,\n          882048966,\n          780140150,\n          707091531,\n          658438330,\n          925759037,\n          840869161,\n          614251182,\n          502513462,\n          327059393,\n          1073313478,\n          354489394,\n          559\n        ],\n        \"cutValueData\": [\n          66,\n          99,\n          -36,\n          -127,\n          66,\n          81,\n          109,\n          108,\n          66,\n          -106,\n          -119,\n          -66,\n          66,\n          -124,\n          62,\n          -90,\n          66,\n          -66,\n          -72,\n          -62,\n          66,\n          -128,\n          -50,\n          -52,\n          66,\n          110,\n          65,\n          -25,\n          66,\n          85,\n          -21,\n          76,\n          66,\n          -110,\n          -88,\n          -65,\n          66,\n          -97,\n          -33,\n          99,\n          66,\n          -102,\n          9,\n          -78,\n          66,\n          -106,\n          -11,\n          -93,\n          66,\n          -75,\n          36,\n          -85,\n          66,\n          -125,\n          26,\n          30,\n          66,\n          -90,\n          65,\n          -17,\n          66,\n          -101,\n          -101,\n          23,\n          66,\n          -115,\n          -45,\n          -97,\n          66,\n          -121,\n          71,\n          96,\n          66,\n          127,\n          -74,\n          124,\n          66,\n          101,\n          -92,\n          -36,\n          66,\n          -110,\n          -116,\n          122,\n          66,\n          -67,\n          127,\n          57,\n          66,\n          -85,\n          47,\n          35,\n          66,\n          -124,\n          27,\n          37,\n          66,\n          102,\n          -70,\n          -73,\n          66,\n          72,\n          86,\n          -98,\n          66,\n          -102,\n          26,\n          -128,\n          66,\n          83,\n          -112,\n          22,\n          66,\n          -126,\n          -93,\n          45,\n          66,\n          -104,\n          59,\n          -104,\n          66,\n          104,\n          -84,\n          113,\n          66,\n          -110,\n          -38,\n          -81,\n          66,\n          -104,\n          -117,\n          -40,\n          66,\n          -123,\n          -61,\n          -128,\n          66,\n          -108,\n          -59,\n          66,\n          66,\n          -99,\n          -21,\n          125,\n          66,\n          84,\n          59,\n          85,\n          66,\n          -78,\n          -21,\n          -47,\n          66,\n          -109,\n          -36,\n          93,\n          66,\n          -112,\n          -45,\n          -19,\n          66,\n          -73,\n          -90,\n          106,\n          66,\n          -99,\n          90,\n          -63,\n          66,\n          126,\n          -81,\n          57,\n          66,\n          110,\n          -19,\n          39,\n          66,\n          -98,\n          -20,\n          40,\n          66,\n          -84,\n          33,\n          13,\n          66,\n          -123,\n          -22,\n          30,\n          66,\n          -108,\n          -48,\n          -20,\n          66,\n          -68,\n          -106,\n          41,\n          66,\n          -114,\n          10,\n          118,\n          66,\n          125,\n          60,\n          -101,\n          66,\n          97,\n          -43,\n          25,\n          66,\n          -125,\n          28,\n          -33,\n          66,\n          -87,\n          88,\n          51,\n          66,\n          -127,\n          -79,\n          -38,\n          66,\n          -86,\n          48,\n          6,\n          66,\n          -98,\n          20,\n          59,\n          66,\n          -64,\n          49,\n          72,\n          66,\n          -104,\n          -91,\n          -93,\n          66,\n          -89,\n          -5,\n          74,\n          66,\n          86,\n          -13,\n          18,\n          66,\n          -121,\n          58,\n          99,\n          66,\n          -94,\n          -31,\n          105,\n          66,\n          -79,\n          -68,\n          49,\n          66,\n          -92,\n          -14,\n          -81,\n          66,\n          -97,\n          -28,\n          65,\n          66,\n          -123,\n          103,\n          -66,\n          66,\n          -102,\n          116,\n          23,\n          66,\n          -71,\n          -120,\n          -66,\n          66,\n          69,\n          30,\n          78,\n          66,\n          -93,\n          -50,\n          -124,\n          66,\n          -98,\n          5,\n          7,\n          66,\n          -70,\n          78,\n          -24,\n          66,\n          113,\n          67,\n          -46,\n          66,\n          -89,\n          -23,\n          -32,\n          66,\n          121,\n          69,\n          16,\n          66,\n          -117,\n          -108,\n          -42,\n          66,\n          -64,\n          -53,\n          12,\n          66,\n          117,\n          10,\n          53,\n          66,\n          -82,\n          102,\n          47,\n          66,\n          -96,\n          18,\n          -45,\n          66,\n          -108,\n          -32,\n          54,\n          66,\n          -126,\n          -80,\n          -31,\n          66,\n          -93,\n          1,\n          90,\n          66,\n          -103,\n          76,\n          -107,\n          66,\n          -106,\n          25,\n          119,\n          66,\n          -125,\n          -71,\n          95,\n          66,\n          -62,\n          53,\n          -29,\n          66,\n          -98,\n          -17,\n          0,\n          66,\n          -119,\n          55,\n          97,\n          66,\n          -116,\n          89,\n          -124,\n          66,\n          77,\n          -106,\n          -67,\n          66,\n          -114,\n          -61,\n          -23,\n          66,\n          79,\n          54,\n          -48,\n          66,\n          -82,\n          15,\n          -123,\n          66,\n          -104,\n          61,\n          -74,\n          66,\n          98,\n          1,\n          -22,\n          66,\n          -93,\n          -64,\n          -33,\n          66,\n          -102,\n          45,\n          47,\n          66,\n          -119,\n          -117,\n          -12,\n          66,\n          -76,\n          82,\n          26,\n          66,\n          -78,\n          16,\n          110,\n          66,\n          -94,\n          25,\n          -6,\n          66,\n          -96,\n          63,\n          -32,\n          66,\n          109,\n          23,\n          -105,\n          66,\n          83,\n          76,\n          31,\n          66,\n          -69,\n          -47,\n          55,\n          66,\n          -110,\n          26,\n          -90,\n          66,\n          -104,\n          11,\n          -54,\n          66,\n          -80,\n          117,\n          123,\n          66,\n          118,\n          46,\n          -112,\n          66,\n          -124,\n          53,\n          -21,\n          66,\n          96,\n          33,\n          50,\n          66,\n          -91,\n          42,\n          -27,\n          66,\n          -90,\n          -41,\n          59,\n          66,\n          -107,\n          16,\n          85,\n          66,\n          -74,\n          -80,\n          111,\n          66,\n          -108,\n          42,\n          -88,\n          66,\n          -64,\n          -107,\n          -82,\n          66,\n          -89,\n          12,\n          28,\n          66,\n          -69,\n          81,\n          90,\n          66,\n          -109,\n          76,\n          -102,\n          66,\n          -75,\n          62,\n          -59,\n          66,\n          110,\n          47,\n          -59,\n          66,\n          -76,\n          -99,\n          -86,\n          66,\n          -101,\n          20,\n          -98,\n          66,\n          -114,\n          -92,\n          -73,\n          66,\n          105,\n          -93,\n          6,\n          66,\n          -106,\n          -41,\n          -84,\n          66,\n          -68,\n          -107,\n          98,\n          66,\n          -65,\n          -33,\n          15,\n          66,\n          -67,\n          78,\n          16,\n          66,\n          -96,\n          114,\n          -12,\n          66,\n          -84,\n          27,\n          -115,\n          66,\n          -115,\n          3,\n          115,\n          66,\n          -105,\n          104,\n          -1,\n          66,\n          -87,\n          74,\n          30,\n          66,\n          -87,\n          -47,\n          -33,\n          66,\n          -124,\n          117,\n          113,\n          66,\n          -99,\n          -88,\n          -32,\n          66,\n          -119,\n          -113,\n          -95,\n          66,\n          -93,\n          -88,\n          25,\n          66,\n          120,\n          69,\n          -40,\n          66,\n          -125,\n          121,\n          8,\n          66,\n          -91,\n          120,\n          18,\n          66,\n          -93,\n          -50,\n          108,\n          66,\n          124,\n          -11,\n          52,\n          66,\n          -97,\n          -37,\n          -115,\n          66,\n          -101,\n          -107,\n          109,\n          66,\n          -122,\n          -49,\n          114,\n          66,\n          -75,\n          98,\n          50,\n          66,\n          -105,\n          40,\n          18,\n          66,\n          106,\n          -31,\n          -115,\n          66,\n          81,\n          98,\n          -52,\n          66,\n          -128,\n          -42,\n          123,\n          66,\n          -79,\n          -126,\n          -16,\n          66,\n          -96,\n          98,\n          51,\n          66,\n          -123,\n          -105,\n          7,\n          66,\n          86,\n          -80,\n          30,\n          66,\n          -95,\n          -9,\n          -97,\n          66,\n          -82,\n          105,\n          -101,\n          66,\n          -125,\n          -113,\n          46,\n          66,\n          108,\n          76,\n          -61,\n          66,\n          -97,\n          -22,\n          -21,\n          66,\n          -62,\n          -114,\n          -113,\n          66,\n          -117,\n          -19,\n          -6,\n          66,\n          104,\n          27,\n          -116,\n          66,\n          -113,\n          43,\n          64,\n          66,\n          116,\n          107,\n          15,\n          66,\n          -94,\n          108,\n          -110,\n          66,\n          -59,\n          -50,\n          -107,\n          66,\n          84,\n          13,\n          86,\n          66,\n          80,\n          -114,\n          5,\n          66,\n          -107,\n          28,\n          101,\n          66,\n          96,\n          120,\n          -27,\n          66,\n          -94,\n          -26,\n          23,\n          66,\n          -68,\n          -122,\n          -93,\n          66,\n          -83,\n          -93,\n          -117,\n          66,\n          -86,\n          9,\n          61,\n          66,\n          -82,\n          -73,\n          -20,\n          66,\n          -73,\n          52,\n          -29,\n          66,\n          119,\n          92,\n          -3,\n          66,\n          -124,\n          -73,\n          29,\n          66,\n          105,\n          91,\n          88,\n          66,\n          -69,\n          19,\n          88,\n          66,\n          -93,\n          66,\n          -43,\n          66,\n          104,\n          -11,\n          29,\n          66,\n          -78,\n          -110,\n          115,\n          66,\n          -85,\n          26,\n          104,\n          66,\n          -86,\n          -96,\n          84,\n          66,\n          -115,\n          105,\n          116,\n          66,\n          -61,\n          68,\n          -22,\n          66,\n          102,\n          9,\n          123,\n          66,\n          86,\n          86,\n          -73,\n          66,\n          121,\n          -36,\n          85,\n          66,\n          -101,\n          126,\n          51,\n          66,\n          127,\n          -26,\n          16,\n          66,\n          -83,\n          -95,\n          -123,\n          66,\n          -91,\n          -49,\n          80,\n          66,\n          -101,\n          -104,\n          7,\n          66,\n          -109,\n          -58,\n          -37,\n          66,\n          -99,\n          -74,\n          -58,\n          66,\n          -86,\n          41,\n          75,\n          66,\n          93,\n          47,\n          -33,\n          66,\n          126,\n          -126,\n          96,\n          66,\n          -78,\n          -120,\n          57,\n          66,\n          -111,\n          -106,\n          -81,\n          66,\n          113,\n          53,\n          87,\n          66,\n          -83,\n          -15,\n          -87,\n          66,\n          -121,\n          -15,\n          -11,\n          66,\n          -122,\n          46,\n          66,\n          66,\n          -120,\n          -48,\n          72,\n          66,\n          -107,\n          -53,\n          -23,\n          66,\n          115,\n          77,\n          24,\n          66,\n          -112,\n          -47,\n          105,\n          66,\n          125,\n          -23,\n          -56,\n          66,\n          -103,\n          -23,\n          -92,\n          66,\n          -106,\n          -59,\n          126,\n          66,\n          -92,\n          78,\n          -12,\n          66,\n          -106,\n          112,\n          -96,\n          66,\n          117,\n          -41,\n          33,\n          66,\n          -128,\n          -9,\n          4,\n          66,\n          87,\n          -65,\n          70,\n          66,\n          71,\n          46,\n          7,\n          66,\n          -92,\n          59,\n          105,\n          66,\n          -98,\n          -66,\n          -104,\n          66,\n          75,\n          12,\n          -77,\n          66,\n          -124,\n          78,\n          23,\n          66,\n          -90,\n          -109,\n          -54,\n          66,\n          106,\n          -31,\n          -12,\n          66,\n          -97,\n          122,\n          -79,\n          66,\n          -99,\n          -125,\n          -77,\n          66,\n          -110,\n          -34,\n          -123,\n          66,\n          -87,\n          32,\n          49,\n          66,\n          120,\n          -71,\n          -77,\n          66,\n          -126,\n          124,\n          -18,\n          66,\n          -95,\n          79,\n          104,\n          66,\n          -103,\n          50,\n          88,\n          66,\n          -119,\n          -33,\n          18,\n          66,\n          114,\n          -13,\n          -120,\n          66,\n          -104,\n          -69,\n          105,\n          66,\n          -81,\n          -121,\n          58,\n          66,\n          -91,\n          50,\n          -88,\n          66,\n          116,\n          -102,\n          -103,\n          66,\n          -107,\n          50,\n          69,\n          66,\n          -94,\n          42,\n          -57,\n          66,\n          -110,\n          -67,\n          -27,\n          66,\n          98,\n          1,\n          122,\n          66,\n          -115,\n          5,\n          -4,\n          66,\n          111,\n          16,\n          83,\n          66,\n          -96,\n          -86,\n          -95,\n          66,\n          -107,\n          111,\n          -81,\n          66,\n          107,\n          -111,\n          -55,\n          66,\n          -68,\n          -95,\n          55,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 254,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          774838787,\n          974123738,\n          726990791,\n          984753580,\n          1033101512,\n          768198292,\n          710333195,\n          1027541941,\n          970154305,\n          629494405,\n          970230928,\n          581724166,\n          582964438,\n          1201\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1032589862,\n          644106491,\n          774755684,\n          1142951710,\n          1155705935,\n          1159936414,\n          1099543865,\n          581190029,\n          630616823,\n          624356896,\n          582784870,\n          726805696,\n          595718023,\n          1102\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -950767441100490523,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    }\n  ],\n  \"executionContext\": {\n    \"parallelExecutionEnabled\": false,\n    \"threadPoolSize\": 0\n  },\n  \"saveTreeStateEnabled\": true,\n  \"saveSamplerStateEnabled\": true,\n  \"saveCoordinatorStateEnabled\": true\n}\n"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/state_2.json",
    "content": "{\n  \"version\": \"2.0\",\n  \"totalUpdates\": 505,\n  \"timeDecay\": 1.0E-4,\n  \"numberOfTrees\": 30,\n  \"sampleSize\": 256,\n  \"shingleSize\": 8,\n  \"dimensions\": 32,\n  \"outputAfter\": 32,\n  \"compressed\": true,\n  \"partialTreeState\": true,\n  \"boundingBoxCacheFraction\": 0.0,\n  \"storeSequenceIndexesEnabled\": false,\n  \"compact\": true,\n  \"internalShinglingEnabled\": false,\n  \"centerOfMassEnabled\": false,\n  \"precision\": \"FLOAT_32\",\n  \"pointStoreState\": {\n    \"version\": \"2.0\",\n    \"dimensions\": 32,\n    \"capacity\": 7681,\n    \"shingleSize\": 8,\n    \"precision\": \"FLOAT_32\",\n    \"startOfFreeSegment\": 2048,\n    \"pointData\": [\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      64,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      69,\n      -47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      23,\n      70,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      85,\n      85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -52,\n      -51,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      93,\n      23,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      28,\n      114,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      113,\n      -57,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -114,\n      57,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -103,\n      -102,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      69,\n      -47,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -64,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      51,\n      51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      102,\n      102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      113,\n      -57,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      -103,\n      -102,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      69,\n      -47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      51,\n      51,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      69,\n      -47,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      102,\n      102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -94,\n      -23,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      23,\n      70,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -128,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -103,\n      -102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -114,\n      57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      113,\n      -57,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      23,\n      70,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      -128,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -29,\n      -114,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -128,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      113,\n      -57,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      102,\n      102,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -94,\n      -23,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -114,\n      57,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      -86,\n      -85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      113,\n      -57,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -57,\n      28,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      28,\n      114,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      56,\n      -28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -117,\n      -93,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      67,\n      -5,\n      -26,\n      102,\n      69,\n      -121,\n      -72,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      102,\n      102,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      -103,\n      -102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -24,\n      -70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      -86,\n      -85,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      23,\n      70,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -86,\n      -85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -29,\n      -114,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -94,\n      -23,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      -103,\n      -102,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -86,\n      -85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      -114,\n      57,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      113,\n      -57,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      46,\n      -116,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -57,\n      28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      116,\n      93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      113,\n      -57,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -24,\n      -70,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      64,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      64,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      69,\n      -47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -29,\n      -114,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      120,\n      102,\n      102,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -47,\n      116,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -24,\n      -70,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      93,\n      23,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      46,\n      -116,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      -29,\n      -114,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      42,\n      -85,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      -86,\n      -85,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      102,\n      102,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -94,\n      -23,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      127,\n      -114,\n      57,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      23,\n      70,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      -52,\n      -51,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      56,\n      -28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      -47,\n      116,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      56,\n      -28,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      51,\n      51,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      102,\n      102,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      66,\n      2,\n      -52,\n      -51,\n      66,\n      -66,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      51,\n      51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      113,\n      -57,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -128,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      51,\n      51,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      42,\n      -85,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      -103,\n      -102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      56,\n      -28,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      113,\n      -57,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      102,\n      102,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -64,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      46,\n      -116,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -24,\n      -70,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -94,\n      -23,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      51,\n      51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      -103,\n      -102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      113,\n      -57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -103,\n      -102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      51,\n      51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      -103,\n      -102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -52,\n      -51,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -103,\n      -102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -86,\n      -85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      113,\n      -57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -117,\n      -93,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      51,\n      51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      51,\n      51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -117,\n      -93,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      23,\n      70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      -29,\n      -114,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -128,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      -70,\n      47,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      119,\n      -103,\n      -102,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -43,\n      85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      46,\n      -116,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -117,\n      -93,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -103,\n      -102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      46,\n      -116,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -114,\n      57,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -52,\n      -51,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      56,\n      -28,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      51,\n      51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      -117,\n      -93,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -94,\n      -23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      93,\n      23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -70,\n      47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      85,\n      85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      -114,\n      57,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -103,\n      -102,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -24,\n      -70,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      28,\n      114,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -70,\n      47,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -103,\n      -102,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      56,\n      -28,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      46,\n      -116,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -86,\n      -85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      23,\n      70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      69,\n      -47,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -24,\n      -70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -117,\n      -93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      -110,\n      -86,\n      -85,\n      66,\n      -78,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      42,\n      -85,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      93,\n      23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -94,\n      -23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -103,\n      -102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      85,\n      85,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -29,\n      -114,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      102,\n      102,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      46,\n      -116,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      113,\n      -57,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -57,\n      28,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -70,\n      47,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -103,\n      -102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -114,\n      57,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      -20,\n      79,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      -114,\n      57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -29,\n      -114,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      46,\n      -116,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      51,\n      51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      69,\n      -47,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      46,\n      -116,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -114,\n      57,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -24,\n      -70,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      113,\n      -57,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      51,\n      51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      102,\n      102,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      51,\n      51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -114,\n      57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      85,\n      85,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      113,\n      -57,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      51,\n      51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -103,\n      -102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      113,\n      -57,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -24,\n      -70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      56,\n      -28,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -86,\n      -85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -88,\n      -103,\n      -102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -94,\n      -23,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -52,\n      -51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -86,\n      -85,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -52,\n      -51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      28,\n      114,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -52,\n      -51,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      56,\n      -28,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      -94,\n      -23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      -57,\n      28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      93,\n      23,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -57,\n      28,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      -70,\n      47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      64,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -70,\n      47,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -128,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      51,\n      51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -91,\n      116,\n      93,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      113,\n      -57,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -70,\n      47,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      93,\n      23,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      -57,\n      28,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      42,\n      -85,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      93,\n      23,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -29,\n      -114,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -70,\n      47,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -117,\n      -93,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      56,\n      -28,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      122,\n      85,\n      85,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -86,\n      -85,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -103,\n      -102,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -95,\n      -47,\n      116,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      -24,\n      -70,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -70,\n      47,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      113,\n      -57,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      42,\n      -85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      66,\n      26,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -29,\n      -114,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -70,\n      47,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      85,\n      85,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      85,\n      85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -70,\n      47,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -114,\n      57,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      69,\n      -47,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      56,\n      -28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      102,\n      102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      51,\n      51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      85,\n      85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      102,\n      102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      28,\n      114,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      51,\n      51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -24,\n      -70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -29,\n      -114,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -70,\n      47,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      93,\n      23,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -57,\n      28,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      28,\n      114,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      56,\n      -28,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      64,\n      0,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      85,\n      85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      -103,\n      -102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      64,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      51,\n      51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      122,\n      56,\n      -28,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -114,\n      57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -114,\n      57,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      -52,\n      -51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      113,\n      -57,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      42,\n      -85,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      51,\n      51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      -37,\n      110,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -117,\n      -93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      69,\n      -47,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      46,\n      -116,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      102,\n      102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -97,\n      -57,\n      28,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -100,\n      51,\n      51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      51,\n      51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -52,\n      -51,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      51,\n      51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -86,\n      -85,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -103,\n      -102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      116,\n      93,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      113,\n      -57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      -103,\n      -102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      56,\n      -28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      51,\n      51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      69,\n      -47,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -114,\n      57,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      69,\n      -47,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      85,\n      85,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      -57,\n      28,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -57,\n      28,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -24,\n      -70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      118,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -103,\n      -102,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      -86,\n      -85,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      69,\n      -47,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -117,\n      -93,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -103,\n      -102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -101,\n      51,\n      51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      93,\n      23,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -47,\n      116,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      69,\n      -47,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -96,\n      113,\n      -57,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      51,\n      51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      102,\n      102,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      102,\n      102,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      126,\n      -86,\n      -85,\n      66,\n      -96,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -79,\n      23,\n      70,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      46,\n      -116,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -52,\n      -51,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -84,\n      -86,\n      -85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      42,\n      -85,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      102,\n      102,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      -103,\n      -102,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      113,\n      -57,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      46,\n      -116,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -103,\n      -102,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -52,\n      -51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      -64,\n      0,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -86,\n      -85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      56,\n      -28,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -52,\n      -51,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      102,\n      102,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      116,\n      93,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      28,\n      114,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      -94,\n      -23,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -47,\n      116,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -43,\n      85,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      116,\n      93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -103,\n      -102,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -64,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      125,\n      51,\n      51,\n      66,\n      -86,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -117,\n      -93,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -57,\n      28,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -124,\n      56,\n      -28,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -120,\n      113,\n      -57,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -125,\n      23,\n      70,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      -57,\n      28,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      102,\n      102,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -86,\n      -85,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      -117,\n      -93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      117,\n      85,\n      85,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -24,\n      -70,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      116,\n      93,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      -70,\n      47,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -123,\n      28,\n      114,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -57,\n      28,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      93,\n      23,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -114,\n      -43,\n      85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      56,\n      -28,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      56,\n      -28,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      -86,\n      -85,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -117,\n      -93,\n      66,\n      -74,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -89,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      123,\n      -128,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      -52,\n      -51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -93,\n      28,\n      114,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      -117,\n      -93,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      42,\n      -85,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      -52,\n      -51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -109,\n      69,\n      -47,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      102,\n      102,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -119,\n      102,\n      102,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -52,\n      -51,\n      66,\n      -76,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -108,\n      113,\n      -57,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      -52,\n      -51,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -117,\n      -93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      56,\n      -28,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      51,\n      51,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      85,\n      85,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -121,\n      -52,\n      -51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      -103,\n      -102,\n      66,\n      -66,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -106,\n      56,\n      -28,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      51,\n      51,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      116,\n      93,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      46,\n      -116,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      -117,\n      -93,\n      66,\n      -56,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -127,\n      51,\n      51,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -126,\n      102,\n      102,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -107,\n      -47,\n      116,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -99,\n      116,\n      93,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -98,\n      -117,\n      -93,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -117,\n      23,\n      70,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -110,\n      51,\n      51,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      100,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      102,\n      102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -115,\n      -103,\n      -102,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -103,\n      -114,\n      57,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      124,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      -128,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -104,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -92,\n      -128,\n      0,\n      66,\n      -58,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      102,\n      102,\n      66,\n      -68,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -113,\n      -57,\n      28,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -122,\n      102,\n      102,\n      66,\n      -78,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      116,\n      93,\n      66,\n      -70,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -94,\n      51,\n      51,\n      66,\n      -60,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -111,\n      -64,\n      0,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -128,\n      -103,\n      -102,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -87,\n      -52,\n      -51,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -112,\n      56,\n      -28,\n      66,\n      -72,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -116,\n      -70,\n      47,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      76,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -85,\n      85,\n      85,\n      66,\n      -64,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      66,\n      -105,\n      -64,\n      0,\n      66,\n      -62,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0\n    ],\n    \"compressed\": true,\n    \"refCount\": [\n      4,\n      22,\n      505,\n      488645087,\n      627314938,\n      582700772,\n      399780962,\n      393065644,\n      493329789,\n      348405217,\n      399993616,\n      204415313,\n      496738328,\n      681754803,\n      538579750,\n      504435967,\n      497026482,\n      498871798,\n      592916307,\n      452127812,\n      511204333,\n      403138722,\n      491772828,\n      550815193,\n      452278665,\n      733388952,\n      508324222,\n      528467679,\n      449961106,\n      350404130,\n      449138598,\n      466853076,\n      362626119,\n      439225686,\n      248861557,\n      429062015,\n      624839501,\n      362886465,\n      719423305,\n      219046244,\n      263992202,\n      628055381,\n      499098559,\n      482781665,\n      504168810,\n      501925309,\n      630311957,\n      533598347,\n      556015254,\n      602409978,\n      499090983,\n      578059376,\n      345154108,\n      350733058,\n      588319029,\n      452650544,\n      461820581,\n      501176100,\n      452560325,\n      254595844,\n      533593920,\n      407318270,\n      412155743,\n      503627008,\n      454598108,\n      496752770,\n      449959602,\n      538428890,\n      455440652,\n      461942735,\n      452528503,\n      305537155,\n      360787231,\n      536178278,\n      456428308,\n      10\n    ],\n    \"directLocationMap\": false,\n    \"locationList\": [\n      0,\n      2016,\n      505,\n      8068,\n      24212,\n      40356,\n      56500,\n      72644,\n      88788,\n      104932,\n      121076,\n      137220,\n      153364,\n      169508,\n      185652,\n      201796,\n      217940,\n      234084,\n      250228,\n      266372,\n      282516,\n      298660,\n      314804,\n      330948,\n      347092,\n      363236,\n      379380,\n      395524,\n      411668,\n      427812,\n      443956,\n      460100,\n      476244,\n      492388,\n      508532,\n      524676,\n      540820,\n      556964,\n      573108,\n      589252,\n      605396,\n      621540,\n      637684,\n      653828,\n      669972,\n      686116,\n      702260,\n      718404,\n      734548,\n      750692,\n      766836,\n      782980,\n      799124,\n      815268,\n      831412,\n      847556,\n      863700,\n      879844,\n      895988,\n      912132,\n      928276,\n      944420,\n      960564,\n      976708,\n      992852,\n      1008996,\n      1025140,\n      1041284,\n      1057428,\n      1073572,\n      1089716,\n      1105860,\n      1122004,\n      1138148,\n      1154292,\n      1170436,\n      1186580,\n      1202724,\n      1218868,\n      1235012,\n      1251156,\n      1267300,\n      1283444,\n      1299588,\n      1315732,\n      1331876,\n      1348020,\n      1364164,\n      1380308,\n      1396452,\n      1412596,\n      1428740,\n      1444884,\n      1461028,\n      1477172,\n      1493316,\n      1509460,\n      1525604,\n      1541748,\n      1557892,\n      1574036,\n      1590180,\n      1606324,\n      1622468,\n      1638612,\n      1654756,\n      1670900,\n      1687044,\n      1703188,\n      1719332,\n      1735476,\n      1751620,\n      1767764,\n      1783908,\n      1800052,\n      1816196,\n      1832340,\n      1848484,\n      1864628,\n      1880772,\n      1896916,\n      1913060,\n      1929204,\n      1945348,\n      1961492,\n      1977636,\n      1993780,\n      2009924,\n      2026068,\n      2042212,\n      2058356,\n      2074500,\n      2090644,\n      2106788,\n      2122932,\n      2139076,\n      2155220,\n      2171364,\n      2187508,\n      2203652,\n      2219796,\n      2235940,\n      2252084,\n      2268228,\n      2284372,\n      2300516,\n      2316660,\n      2332804,\n      2348948,\n      2365092,\n      2381236,\n      2397380,\n      2413524,\n      2429668,\n      2445812,\n      2461956,\n      2478100,\n      2494244,\n      2510388,\n      2526532,\n      2542676,\n      2558820,\n      2574964,\n      2591108,\n      2607252,\n      2623396,\n      2639540,\n      2655684,\n      2671828,\n      2687972,\n      2704116,\n      2720260,\n      2736404,\n      2752548,\n      2768692,\n      2784836,\n      2800980,\n      2817124,\n      2833268,\n      2849412,\n      2865556,\n      2881700,\n      2897844,\n      2913988,\n      2930132,\n      2946276,\n      2962420,\n      2978564,\n      2994708,\n      3010852,\n      3026996,\n      3043140,\n      3059284,\n      3075428,\n      3091572,\n      3107716,\n      3123860,\n      3140004,\n      3156148,\n      3172292,\n      3188436,\n      3204580,\n      3220724,\n      3236868,\n      3253012,\n      3269156,\n      3285300,\n      3301444,\n      3317588,\n      3333732,\n      3349876,\n      3366020,\n      3382164,\n      3398308,\n      3414452,\n      3430596,\n      3446740,\n      3462884,\n      3479028,\n      3495172,\n      3511316,\n      3527460,\n      3543604,\n      3559748,\n      3575892,\n      3592036,\n      3608180,\n      3624324,\n      3640468,\n      3656612,\n      3672756,\n      3688900,\n      3705044,\n      3721188,\n      3737332,\n      3753476,\n      3769620,\n      3785764,\n      3801908,\n      3818052,\n      3834196,\n      3850340,\n      3866484,\n      3882628,\n      3898772,\n      3914916,\n      3931060,\n      3947204,\n      3963348,\n      3979492,\n      3995636,\n      4011780,\n      4027924,\n      4044068,\n      4060212,\n      2016\n    ],\n    \"reverseAvailable\": false,\n    \"internalShinglingEnabled\": false,\n    \"lastTimeStamp\": 505,\n    \"rotationEnabled\": false,\n    \"dynamicResizingEnabled\": true,\n    \"currentStoreCapacity\": 512,\n    \"indexCapacity\": 512\n  },\n  \"compactSamplerStates\": [\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.56757265,\n        -0.5781482,\n        -0.5900879,\n        -0.6161663,\n        -0.74962693,\n        -0.6151643,\n        -0.61670864,\n        -0.67350674,\n        -0.62119406,\n        -0.7689292,\n        -0.7968979,\n        -0.63657176,\n        -0.6180514,\n        -0.7882518,\n        -0.663554,\n        -0.94079185,\n        -1.0720736,\n        -0.6319752,\n        -0.6278517,\n        -0.8380472,\n        -0.9604324,\n        -0.8651731,\n        -0.798815,\n        -0.6567839,\n        -0.6820692,\n        -0.9421621,\n        -0.88186073,\n        -0.7905466,\n        -0.8823367,\n        -0.8736085,\n        -0.85339946,\n        -0.9529896,\n        -1.5253699,\n        -1.1973315,\n        -1.3102506,\n        -0.7358565,\n        -0.8139095,\n        -0.7017528,\n        -1.0430143,\n        -1.0908111,\n        -0.89280295,\n        -1.3095568,\n        -0.98362166,\n        -0.8744968,\n        -0.96791035,\n        -1.2037274,\n        -1.0653238,\n        -0.7173222,\n        -0.696716,\n        -0.9508131,\n        -0.7733483,\n        -1.1465819,\n        -1.3489482,\n        -1.1664346,\n        -1.076245,\n        -0.8089149,\n        -1.9500605,\n        -1.164026,\n        -2.239261,\n        -1.3196678,\n        -1.0025091,\n        -1.0488088,\n        -1.221525,\n        -2.491666,\n        -1.0704556,\n        -1.9203418,\n        -1.6465822,\n        -1.5930494,\n        -2.1173043,\n        -1.454771,\n        -2.3791387,\n        -1.2175801,\n        -1.4976561,\n        -0.97529536,\n        -0.9776957,\n        -0.7076328,\n        -1.9713205,\n        -1.994005,\n        -1.4144034,\n        -1.7856992,\n        -1.8549204,\n        -1.3750582,\n        -1.182027,\n        -2.3264935,\n        -3.4964423,\n        -1.971149,\n        -1.2945454,\n        -1.548858,\n        -1.2041738,\n        -1.468671,\n        -1.4650595,\n        -2.2554448,\n        -1.3044451,\n        -1.1853373,\n        -1.730136,\n        -1.241813,\n        -1.1292748,\n        -0.71116483,\n        -0.79887444,\n        -1.2845273,\n        -1.105379,\n        -1.2366987,\n        -0.92921096,\n        -1.1657026,\n        -2.6717122,\n        -1.7439715,\n        -1.9934863,\n        -1.2263087,\n        -1.7673148,\n        -2.3491075,\n        -1.0961275,\n        -0.89520067,\n        -1.0248098,\n        -4.909743,\n        -4.153232,\n        -2.1522255,\n        -1.7650629,\n        -2.4412453,\n        -2.7482073,\n        -3.0675554,\n        -3.1178305,\n        -1.8242788,\n        -1.892877,\n        -3.210243,\n        -1.9061444,\n        -1.2413985,\n        -1.2598059,\n        -4.203074,\n        -2.7223814,\n        -2.2999434,\n        -1.3408724,\n        -7.0299697,\n        -4.8111978,\n        -3.3258471,\n        -2.0125356,\n        -3.1982787,\n        -2.8312833,\n        -2.5334399,\n        -2.3389008,\n        -2.9442873,\n        -1.8930559,\n        -2.3911982,\n        -2.5047672,\n        -4.4145966,\n        -1.2764541,\n        -3.9048793,\n        -3.238948,\n        -1.5198247,\n        -1.2750254,\n        -1.8376037,\n        -3.6638694,\n        -1.5739453,\n        -1.1896442,\n        -2.0554008,\n        -3.439994,\n        -3.6345048,\n        -4.235151,\n        -4.323954,\n        -1.6634188,\n        -3.0570233,\n        -2.0862427,\n        -3.161959,\n        -3.9133036,\n        -2.0392869,\n        -3.2230003,\n        -1.7840478,\n        -1.9822911,\n        -2.66759,\n        -2.8719387,\n        -4.63498,\n        -3.9776332,\n        -2.2720344,\n        -3.0024054,\n        -3.007087,\n        -1.3444325,\n        -5.0787582,\n        -2.1520054,\n        -1.8487936,\n        -1.926888,\n        -2.18643,\n        -4.2005286,\n        -1.6425658,\n        -1.5144346,\n        -5.0267115,\n        -2.5702274,\n        -2.866389,\n        -2.01247,\n        -1.2021661,\n        -3.4260905,\n        -1.7879579,\n        -3.3959882,\n        -4.2575583,\n        -2.2555158,\n        -2.5958145,\n        -3.0608132,\n        -1.8771381,\n        -0.9143291,\n        -1.0414093,\n        -1.7113725,\n        -1.3806139,\n        -1.5017926,\n        -1.4232012,\n        -2.1246378,\n        -1.4538587,\n        -1.328111,\n        -2.9455528,\n        -1.5375834,\n        -2.6173167,\n        -1.3469445,\n        -2.9239001,\n        -3.6207416,\n        -2.3897822,\n        -4.7668986,\n        -2.2512138,\n        -3.4965847,\n        -2.324559,\n        -1.2498051,\n        -2.5054939,\n        -2.3657448,\n        -4.1115704,\n        -2.3869483,\n        -2.610965,\n        -6.1069016,\n        -4.5214825,\n        -2.4539022,\n        -1.4635035\n      ],\n      \"pointIndex\": [\n        4,\n        504,\n        226,\n        35529227,\n        112927561,\n        49466190,\n        87494404,\n        14543996,\n        46779260,\n        60007490,\n        107327202,\n        98951852,\n        22404378,\n        16035566,\n        95163157,\n        53840153,\n        109554390,\n        71544057,\n        80474669,\n        91089142,\n        113143202,\n        13047000,\n        38971917,\n        119024767,\n        816686,\n        43233034,\n        17622851,\n        2195915,\n        19686486,\n        78660725,\n        110063416,\n        98516391,\n        23698955,\n        75550084,\n        26028907,\n        84501483,\n        88406807,\n        95864199,\n        104978001,\n        115947880,\n        30120112,\n        8535367,\n        33647170,\n        12618829,\n        37229842,\n        6852326,\n        42331373,\n        50854248,\n        43799784,\n        24507333,\n        3305776,\n        74196777,\n        89702118,\n        22617836,\n        26248412,\n        111735732,\n        20711699,\n        57937369,\n        125458152,\n        9661186,\n        67170512,\n        66812728,\n        10178588,\n        23985659,\n        121632067,\n        111096052,\n        1915242,\n        81124642,\n        109101203,\n        116266930,\n        226297,\n        89366943,\n        94814190,\n        2307544,\n        100343308,\n        106639341,\n        121462560,\n        119722949,\n        402\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 226,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 6231208282143323767\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.6413124,\n        -0.6741439,\n        -0.6560893,\n        -0.6797705,\n        -0.74844384,\n        -0.66546565,\n        -0.67169714,\n        -0.7227025,\n        -0.68723875,\n        -0.7745091,\n        -0.76826525,\n        -0.6939699,\n        -0.7236429,\n        -0.77351135,\n        -0.67195445,\n        -0.80146384,\n        -0.82743365,\n        -0.7136329,\n        -0.7724732,\n        -0.9194121,\n        -0.79330975,\n        -0.88315845,\n        -0.81832063,\n        -0.7143264,\n        -0.94557303,\n        -0.785616,\n        -0.8377497,\n        -0.7925366,\n        -1.0203317,\n        -0.9742233,\n        -0.80757946,\n        -0.9707234,\n        -0.90228313,\n        -0.8993705,\n        -1.5087696,\n        -1.0732617,\n        -0.8467861,\n        -0.8320001,\n        -1.2694417,\n        -0.9735975,\n        -1.0536188,\n        -0.8474321,\n        -0.9971086,\n        -0.9269841,\n        -0.92866796,\n        -1.0152696,\n        -0.9399167,\n        -0.84670246,\n        -0.7283,\n        -1.119393,\n        -1.1513233,\n        -0.9878237,\n        -1.0573844,\n        -0.9405707,\n        -0.9608169,\n        -0.93561846,\n        -2.0413952,\n        -1.3069793,\n        -1.4010427,\n        -1.0850079,\n        -1.1713014,\n        -0.8807193,\n        -1.6299798,\n        -1.897046,\n        -1.1469332,\n        -1.4496559,\n        -1.254596,\n        -1.1360562,\n        -1.3226919,\n        -2.6269495,\n        -1.5345647,\n        -1.209115,\n        -1.4497056,\n        -0.9226162,\n        -1.5420644,\n        -1.4579161,\n        -1.810374,\n        -1.7850882,\n        -1.4213358,\n        -2.0186284,\n        -1.1599022,\n        -1.2460217,\n        -1.8482556,\n        -1.623476,\n        -1.0834888,\n        -1.0720153,\n        -1.5517547,\n        -1.0411204,\n        -1.9863597,\n        -1.015509,\n        -1.0393355,\n        -1.1298063,\n        -1.161867,\n        -1.8876396,\n        -1.513813,\n        -1.0615944,\n        -1.0518087,\n        -1.0012404,\n        -0.96941787,\n        -1.4972963,\n        -1.5582548,\n        -1.3225719,\n        -1.9551991,\n        -1.137049,\n        -1.1723896,\n        -1.282302,\n        -1.3758812,\n        -0.9793669,\n        -1.1025751,\n        -1.0587844,\n        -1.3776231,\n        -0.9507962,\n        -3.5233085,\n        -2.147424,\n        -3.5976098,\n        -3.1932046,\n        -1.3534847,\n        -2.8176162,\n        -2.3236613,\n        -2.8634186,\n        -1.4690521,\n        -1.3486727,\n        -1.8834981,\n        -3.77391,\n        -1.4108094,\n        -2.0396273,\n        -1.6383679,\n        -3.9983404,\n        -2.399237,\n        -1.210579,\n        -1.3090019,\n        -1.5072656,\n        -2.0974455,\n        -4.0409245,\n        -2.2359335,\n        -1.7645096,\n        -2.3572674,\n        -1.6028525,\n        -1.357799,\n        -3.522606,\n        -3.2044213,\n        -1.55917,\n        -2.6649456,\n        -1.7507952,\n        -1.51323,\n        -4.9820375,\n        -1.7488083,\n        -1.6731998,\n        -3.0470006,\n        -1.6102738,\n        -3.8390405,\n        -2.1085012,\n        -1.6249218,\n        -2.0381305,\n        -3.8072212,\n        -1.9960753,\n        -5.4924846,\n        -2.476714,\n        -2.6750987,\n        -2.3795562,\n        -2.590261,\n        -1.3741866,\n        -1.1860225,\n        -2.5269265,\n        -1.8842105,\n        -5.061702,\n        -2.7306504,\n        -1.7086381,\n        -2.189826,\n        -2.2177384,\n        -2.156229,\n        -1.7054623,\n        -1.239881,\n        -2.8083131,\n        -4.8517027,\n        -1.7392544,\n        -1.1228195,\n        -3.4175887,\n        -2.3523648,\n        -1.8688406,\n        -1.4306833,\n        -1.4683838,\n        -1.6348096,\n        -1.8269991,\n        -2.3902066,\n        -2.7796946,\n        -4.2938395,\n        -3.7553287,\n        -2.3795054,\n        -1.584336,\n        -2.9925914,\n        -2.4059489,\n        -1.5080501,\n        -2.1570578,\n        -1.4014544,\n        -1.555314,\n        -1.0247319,\n        -2.5908108,\n        -1.0894471,\n        -2.9602163,\n        -2.5347006,\n        -1.7199854,\n        -2.3623478,\n        -2.3948119,\n        -2.9381523,\n        -4.0739775,\n        -3.5914814,\n        -2.88282,\n        -4.8593273,\n        -2.0374897,\n        -3.0119255,\n        -3.012831,\n        -3.3967464,\n        -3.4023802,\n        -2.5469584,\n        -1.0278263,\n        -1.0037334,\n        -4.807614,\n        -1.4685438,\n        -1.847896,\n        -1.6382484,\n        -2.4243822,\n        -1.3925955,\n        -2.429869,\n        -1.9127114\n      ],\n      \"pointIndex\": [\n        2,\n        504,\n        225,\n        111671647,\n        118198531,\n        47366142,\n        82611799,\n        15422422,\n        18950857,\n        62711760,\n        5746078,\n        103679816,\n        89022947,\n        43163095,\n        91680981,\n        74392388,\n        35584963,\n        10637222,\n        73258702,\n        93278378,\n        19939543,\n        33895632,\n        119743310,\n        7157833,\n        16779238,\n        87639544,\n        116476223,\n        72966754,\n        106712,\n        58701826,\n        4475707,\n        23909850,\n        66472368,\n        68701516,\n        27495848,\n        79357249,\n        91508298,\n        104685242,\n        14140464,\n        79803883,\n        14234893,\n        93429684,\n        15144965,\n        37198795,\n        92147146,\n        100692200,\n        41931150,\n        51576948,\n        20643713,\n        76949619,\n        1748312,\n        32986811,\n        19136205,\n        77524577,\n        28191894,\n        57981206,\n        21495427,\n        89797535,\n        94962464,\n        63377850,\n        75190544,\n        27960305,\n        116953797,\n        89193273,\n        25578096,\n        29811451,\n        72161665,\n        73395529,\n        82882943,\n        125153256,\n        90067599,\n        96460508,\n        108002029,\n        100994745,\n        2733205,\n        112295199,\n        118907112,\n        127013007\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 225,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 6910102835766708129\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.6361297,\n        -0.6427886,\n        -0.64978087,\n        -0.70756996,\n        -0.66756016,\n        -0.6652314,\n        -0.67686117,\n        -0.738501,\n        -0.86563295,\n        -0.8297991,\n        -0.7050236,\n        -0.69653434,\n        -0.7010046,\n        -0.8382546,\n        -1.0409559,\n        -0.74255246,\n        -0.9452534,\n        -0.8861673,\n        -1.0988894,\n        -0.86182266,\n        -0.9004108,\n        -0.8393799,\n        -0.7601388,\n        -0.8780445,\n        -0.7044544,\n        -0.74571294,\n        -0.8152079,\n        -1.0929025,\n        -1.0292293,\n        -1.3781352,\n        -1.4376355,\n        -1.3130229,\n        -1.0308373,\n        -0.9641338,\n        -1.1163272,\n        -0.89741856,\n        -1.0924834,\n        -1.4286077,\n        -1.2312396,\n        -1.0095732,\n        -0.90361285,\n        -0.96598524,\n        -0.9863629,\n        -0.87630546,\n        -1.1279582,\n        -1.368041,\n        -0.8136685,\n        -1.3092943,\n        -0.9266263,\n        -0.74128693,\n        -0.8960323,\n        -0.8825871,\n        -0.79810524,\n        -0.9010219,\n        -0.97901225,\n        -1.4633446,\n        -1.5441315,\n        -1.0377563,\n        -1.0494223,\n        -2.1532643,\n        -1.5144418,\n        -1.4966323,\n        -1.640983,\n        -1.9846714,\n        -1.8514707,\n        -1.2629799,\n        -2.4018369,\n        -1.8355949,\n        -1.6449745,\n        -1.356063,\n        -1.2787979,\n        -1.4097439,\n        -1.1225487,\n        -1.9527433,\n        -1.655858,\n        -2.0141113,\n        -2.74355,\n        -1.4586512,\n        -1.5609925,\n        -2.3343732,\n        -1.0382442,\n        -1.4256576,\n        -1.0880938,\n        -1.2506218,\n        -2.26243,\n        -1.0773342,\n        -1.0644708,\n        -0.9891659,\n        -0.9573225,\n        -2.5247407,\n        -1.1891508,\n        -1.4889679,\n        -2.2274039,\n        -1.5068356,\n        -1.1923847,\n        -1.7803392,\n        -1.8480661,\n        -1.2104905,\n        -1.089501,\n        -0.8458398,\n        -0.7424034,\n        -1.1034354,\n        -0.95535153,\n        -1.430842,\n        -1.4128844,\n        -1.2114067,\n        -0.8276177,\n        -0.92598075,\n        -0.91475046,\n        -1.0110403,\n        -3.731133,\n        -2.5332088,\n        -1.8363091,\n        -2.5755098,\n        -1.9488442,\n        -1.2307419,\n        -1.0537357,\n        -1.9857179,\n        -2.2417715,\n        -3.314498,\n        -3.8412423,\n        -2.8418424,\n        -2.4061882,\n        -1.7295107,\n        -2.1518376,\n        -1.7952791,\n        -2.0380082,\n        -4.090556,\n        -2.7759373,\n        -2.0462887,\n        -2.7760093,\n        -3.8271885,\n        -3.2707825,\n        -3.4327483,\n        -2.5401921,\n        -1.8877523,\n        -3.4253228,\n        -1.8831528,\n        -1.8994313,\n        -1.6148615,\n        -5.4198527,\n        -1.314268,\n        -1.3719753,\n        -2.688971,\n        -2.585797,\n        -2.2576668,\n        -1.1549048,\n        -5.106076,\n        -1.954672,\n        -6.7728157,\n        -3.7644725,\n        -3.1551533,\n        -4.8891025,\n        -4.727654,\n        -4.586425,\n        -1.7163793,\n        -3.6443462,\n        -1.732317,\n        -2.1879315,\n        -4.6237965,\n        -2.814348,\n        -1.2486494,\n        -2.1599646,\n        -1.7666686,\n        -1.4898446,\n        -1.3215132,\n        -4.152714,\n        -1.413151,\n        -4.8497276,\n        -2.790615,\n        -3.3900342,\n        -2.3118632,\n        -1.1577141,\n        -1.6863161,\n        -3.1471484,\n        -2.5949144,\n        -1.1936404,\n        -5.16345,\n        -1.0650976,\n        -3.213641,\n        -2.6836355,\n        -3.2522056,\n        -1.2563653,\n        -2.3604238,\n        -2.864063,\n        -2.9894278,\n        -4.0283117,\n        -1.9091228,\n        -2.1098876,\n        -1.6736158,\n        -1.4758114,\n        -1.9855922,\n        -2.9283683,\n        -3.017182,\n        -2.0328176,\n        -3.7195306,\n        -1.5892555,\n        -3.5429056,\n        -2.7333112,\n        -1.3380169,\n        -0.9410527,\n        -2.6401072,\n        -0.9119615,\n        -2.025092,\n        -3.1671853,\n        -1.0942268,\n        -3.079749,\n        -3.9915621,\n        -2.765839,\n        -1.9798393,\n        -1.5078577,\n        -3.4867995,\n        -2.388034,\n        -3.0964606,\n        -2.8108213,\n        -2.1528425,\n        -1.8067331,\n        -1.5127798,\n        -1.1197842\n      ],\n      \"pointIndex\": [\n        1,\n        503,\n        219,\n        7019155,\n        86646336,\n        120179378,\n        91734547,\n        17320238,\n        96151162,\n        105435557,\n        36079528,\n        127178896,\n        60140693,\n        42538739,\n        83950375,\n        55457344,\n        12546722,\n        20888350,\n        81270008,\n        112022852,\n        32868989,\n        35156862,\n        98160653,\n        68597093,\n        7927708,\n        46520646,\n        1883114,\n        112105826,\n        124338430,\n        93506926,\n        63630244,\n        37324365,\n        72408637,\n        76885895,\n        80009297,\n        109326382,\n        101401805,\n        115582259,\n        123203273,\n        33876601,\n        4654804,\n        35893754,\n        37607557,\n        38489150,\n        103329392,\n        40243586,\n        17148944,\n        63002618,\n        8336288,\n        67760943,\n        50594140,\n        95952481,\n        52730153,\n        84864861,\n        28444654,\n        59823071,\n        67866944,\n        61754053,\n        10357866,\n        66015676,\n        67433952,\n        91270347,\n        109234443,\n        74395631,\n        81912440,\n        78081117,\n        122867865,\n        106319468,\n        91502132,\n        93797791,\n        102667445,\n        104529340,\n        109866779,\n        116915765,\n        124217325,\n        32637650\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 219,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -8153191400930165074\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.48931122,\n        -0.49723798,\n        -0.49012178,\n        -0.623267,\n        -0.6048579,\n        -0.5018685,\n        -0.493251,\n        -0.6810186,\n        -0.62533385,\n        -0.611521,\n        -0.6176875,\n        -0.5930478,\n        -0.57753223,\n        -0.52694714,\n        -0.6676626,\n        -0.7207007,\n        -0.7123615,\n        -0.6789895,\n        -0.745815,\n        -0.80155474,\n        -0.8633884,\n        -0.6581785,\n        -0.6463497,\n        -0.62930876,\n        -0.6465614,\n        -0.64662224,\n        -0.63142097,\n        -0.5293907,\n        -1.0326748,\n        -0.7933024,\n        -1.0779095,\n        -0.77808034,\n        -1.0023601,\n        -0.74175876,\n        -0.9819902,\n        -0.9997212,\n        -0.953454,\n        -0.9269736,\n        -0.77646047,\n        -0.88533807,\n        -1.0808892,\n        -0.911517,\n        -1.1113745,\n        -1.2881032,\n        -0.6981712,\n        -1.048881,\n        -0.82391083,\n        -0.7688017,\n        -0.8863816,\n        -0.67885447,\n        -0.7831837,\n        -0.73852074,\n        -0.75421745,\n        -0.6855453,\n        -0.6836569,\n        -1.1590838,\n        -0.6129658,\n        -1.337165,\n        -1.1796199,\n        -0.91301686,\n        -1.7960355,\n        -1.1085035,\n        -2.0368264,\n        -1.0979167,\n        -1.5597198,\n        -2.3726456,\n        -1.6531286,\n        -0.7789359,\n        -0.9969333,\n        -1.3209299,\n        -1.2398386,\n        -1.0943171,\n        -1.8565011,\n        -1.2082129,\n        -2.0254157,\n        -1.1507211,\n        -1.0858723,\n        -0.9761007,\n        -1.6726984,\n        -1.3714278,\n        -1.1769344,\n        -1.2964422,\n        -1.3787014,\n        -1.0774325,\n        -1.2595168,\n        -1.5023205,\n        -1.7526349,\n        -2.0585146,\n        -1.3601263,\n        -1.2020801,\n        -0.76575804,\n        -2.5528324,\n        -1.1102475,\n        -0.977003,\n        -0.85832506,\n        -1.0666026,\n        -0.96740466,\n        -1.0918508,\n        -2.1194818,\n        -0.7036158,\n        -1.0782622,\n        -1.2990319,\n        -1.7673608,\n        -1.3664817,\n        -1.101482,\n        -0.86120254,\n        -1.0904459,\n        -0.82473683,\n        -1.0382458,\n        -0.7014238,\n        -1.1469412,\n        -1.5819608,\n        -2.0776114,\n        -0.8331923,\n        -0.74183834,\n        -2.0028663,\n        -1.5265291,\n        -2.3708613,\n        -1.6433748,\n        -2.1847932,\n        -2.2085743,\n        -3.0770442,\n        -4.579025,\n        -1.9978226,\n        -2.0566041,\n        -3.6105764,\n        -3.1895306,\n        -1.7005802,\n        -1.2436558,\n        -1.7310462,\n        -3.3567033,\n        -2.6761308,\n        -2.8754559,\n        -2.2890341,\n        -2.090929,\n        -1.3141505,\n        -1.9325581,\n        -4.143366,\n        -1.8410652,\n        -1.6836159,\n        -1.4787452,\n        -1.5515575,\n        -1.5433555,\n        -3.4945877,\n        -2.9452395,\n        -2.9747286,\n        -3.8249507,\n        -4.712332,\n        -2.8805053,\n        -2.1511056,\n        -2.2605977,\n        -1.9543904,\n        -1.7370383,\n        -2.8557973,\n        -2.027881,\n        -1.4113812,\n        -1.6777797,\n        -3.6317258,\n        -2.9565845,\n        -1.5805341,\n        -2.0916042,\n        -1.8768723,\n        -1.4425573,\n        -1.6574762,\n        -5.694653,\n        -2.0396917,\n        -1.9360616,\n        -2.9818797,\n        -1.3613492,\n        -1.7539183,\n        -5.2183347,\n        -5.3925138,\n        -3.69513,\n        -7.285011,\n        -2.0467029,\n        -4.6431193,\n        -3.5842743,\n        -2.338261,\n        -1.6631751,\n        -2.7470868,\n        -1.5942484,\n        -1.2338036,\n        -1.5301003,\n        -3.2176993,\n        -2.6857648,\n        -1.5410172,\n        -2.2258418,\n        -1.2013426,\n        -1.2898548,\n        -4.8211923,\n        -5.0544915,\n        -1.8151265,\n        -1.3625166,\n        -3.4235814,\n        -2.0515249,\n        -1.1709322,\n        -1.9283103,\n        -2.2851682,\n        -2.3307283,\n        -2.4420938,\n        -1.9790992,\n        -1.7172705,\n        -2.305028,\n        -1.4039015,\n        -2.8155386,\n        -3.0897534,\n        -3.5799522,\n        -2.252491,\n        -1.934329,\n        -6.9373517,\n        -2.9747658,\n        -2.2050023,\n        -0.9547009,\n        -2.517102,\n        -1.4843043,\n        -2.2253036,\n        -1.3988092,\n        -2.5037396,\n        -3.1673834,\n        -0.8072798,\n        -1.4884133,\n        -1.7703872,\n        -2.2750254,\n        -1.9589535,\n        -2.5536778,\n        -2.4836578,\n        -3.569326,\n        -1.6090255,\n        -1.1384836,\n        -0.7648988\n      ],\n      \"pointIndex\": [\n        2,\n        502,\n        230,\n        111339292,\n        85613591,\n        58324908,\n        87013905,\n        37649672,\n        8623547,\n        65372694,\n        68426884,\n        115159395,\n        23409781,\n        16687454,\n        114499214,\n        52593671,\n        62770336,\n        22136302,\n        11966504,\n        24715439,\n        102196100,\n        124284645,\n        63854009,\n        37351203,\n        11321662,\n        17604151,\n        59108384,\n        53958592,\n        53080528,\n        30732930,\n        84649389,\n        65556598,\n        70498491,\n        24491168,\n        68752529,\n        31292774,\n        92071582,\n        2570990,\n        13264593,\n        116182818,\n        117957071,\n        33697231,\n        3043710,\n        82913134,\n        19169290,\n        72793028,\n        39782858,\n        42751492,\n        96796374,\n        8120527,\n        41760334,\n        78600692,\n        49909194,\n        51222082,\n        65924290,\n        59617474,\n        21024187,\n        105381725,\n        60317624,\n        63447385,\n        45559957,\n        82210418,\n        11007062,\n        2150073,\n        73502844,\n        114103554,\n        25757433,\n        79620234,\n        124966752,\n        27534372,\n        56910093,\n        90668857,\n        94056352,\n        98591664,\n        101355199,\n        108140259,\n        68998662,\n        121992128,\n        120269931,\n        245471\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 230,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 4045460986159022609\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.4681739,\n        -0.48007405,\n        -0.49789903,\n        -0.5071157,\n        -0.5392349,\n        -0.49877673,\n        -0.5173413,\n        -0.55151767,\n        -0.56650174,\n        -0.5500999,\n        -0.5561777,\n        -0.5550377,\n        -0.5654364,\n        -0.55124235,\n        -0.6208702,\n        -0.59977484,\n        -0.7121681,\n        -0.5872569,\n        -0.57015973,\n        -0.60092586,\n        -0.5919172,\n        -0.57651204,\n        -0.599501,\n        -0.84314567,\n        -0.71985126,\n        -0.61190027,\n        -0.607676,\n        -0.61616707,\n        -0.87906057,\n        -0.88150966,\n        -1.2262063,\n        -0.6288519,\n        -0.90566653,\n        -0.74081695,\n        -0.78718567,\n        -0.6843046,\n        -0.5967524,\n        -0.97782314,\n        -0.9634063,\n        -1.1774516,\n        -0.6383265,\n        -1.3105494,\n        -0.61028373,\n        -0.71040356,\n        -0.9213385,\n        -0.8167878,\n        -0.65758455,\n        -0.8537457,\n        -1.0144536,\n        -1.1103842,\n        -0.73576075,\n        -0.65716136,\n        -0.7518719,\n        -0.65931726,\n        -0.6404282,\n        -0.6632596,\n        -1.0990821,\n        -1.1170622,\n        -1.5678968,\n        -1.3625112,\n        -1.085547,\n        -1.5416995,\n        -1.5436345,\n        -0.9560163,\n        -1.1595485,\n        -1.1146106,\n        -0.9999493,\n        -0.9884563,\n        -1.445272,\n        -1.1867231,\n        -0.94520855,\n        -1.1810359,\n        -1.6496556,\n        -1.3205277,\n        -0.87346005,\n        -1.2355236,\n        -1.2484058,\n        -1.0776839,\n        -1.8207626,\n        -2.0081627,\n        -2.1486614,\n        -1.8236513,\n        -1.2014854,\n        -1.3567494,\n        -1.7071117,\n        -1.3450822,\n        -1.1705921,\n        -1.4246175,\n        -1.3039186,\n        -1.0957643,\n        -0.9249299,\n        -1.356931,\n        -0.8482996,\n        -0.675301,\n        -0.84562135,\n        -1.1149482,\n        -1.6455381,\n        -1.2188505,\n        -1.4722234,\n        -1.828188,\n        -1.5412449,\n        -0.8473445,\n        -0.92642033,\n        -0.8599541,\n        -1.0990387,\n        -0.83823735,\n        -1.4376352,\n        -0.8778756,\n        -0.66725403,\n        -1.1006153,\n        -0.7513238,\n        -0.7345586,\n        -0.66469234,\n        -1.3874843,\n        -2.4865832,\n        -2.9973478,\n        -2.306394,\n        -2.673725,\n        -3.3250866,\n        -2.1260333,\n        -2.1913078,\n        -1.8699329,\n        -1.7517766,\n        -1.6185035,\n        -1.6238472,\n        -2.273655,\n        -5.387515,\n        -4.84232,\n        -3.1442783,\n        -3.4106052,\n        -1.8919024,\n        -1.4764744,\n        -1.1394618,\n        -4.4950013,\n        -1.0300931,\n        -4.261308,\n        -1.3040004,\n        -1.6697409,\n        -2.3310206,\n        -1.9800646,\n        -2.5040128,\n        -0.9702708,\n        -1.6211085,\n        -2.4045038,\n        -2.6541598,\n        -1.83864,\n        -3.9018402,\n        -1.9505028,\n        -3.0774148,\n        -1.2140405,\n        -1.0377613,\n        -2.676499,\n        -2.0348115,\n        -2.026429,\n        -1.398141,\n        -1.1595172,\n        -2.031862,\n        -1.9018892,\n        -1.907572,\n        -3.091949,\n        -4.789639,\n        -2.4214916,\n        -2.600685,\n        -3.3066297,\n        -3.311136,\n        -1.543751,\n        -4.41873,\n        -1.721474,\n        -2.1571455,\n        -2.8674223,\n        -1.771004,\n        -3.224615,\n        -1.9476156,\n        -2.4125326,\n        -2.195531,\n        -2.5996487,\n        -1.4842075,\n        -2.531535,\n        -2.6502254,\n        -1.8549097,\n        -2.5735629,\n        -1.4825006,\n        -2.5768104,\n        -2.2504616,\n        -1.3600206,\n        -2.5184896,\n        -1.0576596,\n        -3.8762393,\n        -2.7213616,\n        -0.9605226,\n        -1.5147715,\n        -2.870183,\n        -1.8479155,\n        -3.3377554,\n        -1.9698049,\n        -1.9063296,\n        -1.5209743,\n        -3.7611253,\n        -1.6307871,\n        -2.031272,\n        -4.151252,\n        -1.6233153,\n        -1.9489214,\n        -6.4323297,\n        -1.1256434,\n        -1.313254,\n        -2.584117,\n        -4.35672,\n        -1.3673528,\n        -1.216091,\n        -2.888076,\n        -1.3286314,\n        -0.9383582,\n        -3.279279,\n        -4.4881206,\n        -0.95496076,\n        -1.2240533,\n        -1.0146334,\n        -2.168913,\n        -1.5143352,\n        -1.1024603,\n        -2.4158843,\n        -2.5341992,\n        -1.4697461,\n        -0.8430382,\n        -1.953946,\n        -1.1216923,\n        -3.570344,\n        -1.8596222\n      ],\n      \"pointIndex\": [\n        0,\n        503,\n        229,\n        97308902,\n        90546316,\n        60531530,\n        25415445,\n        20053538,\n        44909203,\n        59035730,\n        85880856,\n        114508858,\n        32430183,\n        38734551,\n        45130336,\n        24225300,\n        57518001,\n        88017364,\n        81431559,\n        95179440,\n        109730194,\n        30224840,\n        32023550,\n        34779612,\n        37411646,\n        59797001,\n        78525232,\n        93423098,\n        48613000,\n        20423406,\n        125847509,\n        78012884,\n        92848752,\n        90257220,\n        79655512,\n        87435428,\n        93908915,\n        116022841,\n        104712945,\n        111236741,\n        14469842,\n        38373353,\n        7382757,\n        45236435,\n        28703822,\n        117425585,\n        36904534,\n        29186408,\n        17099365,\n        20913138,\n        5216832,\n        125480067,\n        90774671,\n        4649981,\n        29814811,\n        21945113,\n        71558967,\n        54887893,\n        101510858,\n        88783696,\n        64133978,\n        64525352,\n        113952641,\n        115717676,\n        73090102,\n        74664864,\n        79148335,\n        84019128,\n        74340922,\n        86377870,\n        26855188,\n        96460424,\n        3240594,\n        98755278,\n        105951844,\n        110712086,\n        118850697,\n        123183109,\n        127513496,\n        503\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 229,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -7062140655107411726\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.53298694,\n        -0.5370285,\n        -0.53302014,\n        -0.5869225,\n        -0.5386616,\n        -0.53426373,\n        -0.54588836,\n        -0.6144738,\n        -0.6666626,\n        -0.55950403,\n        -0.7248929,\n        -0.5607794,\n        -0.5471208,\n        -0.57251537,\n        -0.56006366,\n        -0.6167674,\n        -0.61541486,\n        -0.817127,\n        -0.6857935,\n        -0.5771496,\n        -0.6130053,\n        -0.7751004,\n        -0.80550486,\n        -0.64279586,\n        -0.67447525,\n        -0.67872816,\n        -0.5948092,\n        -0.6225593,\n        -0.7412373,\n        -0.63900876,\n        -0.6328384,\n        -0.777648,\n        -0.76546454,\n        -0.74156725,\n        -0.82844687,\n        -0.8392029,\n        -1.044082,\n        -0.7329778,\n        -0.99735993,\n        -0.7560849,\n        -0.7613802,\n        -0.85758793,\n        -0.62227815,\n        -0.7803536,\n        -0.88435996,\n        -0.9483788,\n        -0.8897424,\n        -0.84419423,\n        -0.76181144,\n        -1.5871549,\n        -0.9929117,\n        -0.75543576,\n        -0.7979167,\n        -0.735058,\n        -0.6214327,\n        -0.7944632,\n        -0.7902639,\n        -0.83461565,\n        -1.4979233,\n        -0.7796874,\n        -1.0374615,\n        -0.8105296,\n        -1.8733882,\n        -0.8792849,\n        -0.9162315,\n        -1.1558676,\n        -1.2506269,\n        -1.1495136,\n        -1.5213449,\n        -2.2541447,\n        -1.1921371,\n        -1.3501981,\n        -0.9838202,\n        -1.0595492,\n        -1.3465357,\n        -0.9763687,\n        -1.0712547,\n        -1.4653171,\n        -1.2853703,\n        -2.0836577,\n        -0.9410978,\n        -0.96253556,\n        -0.81756365,\n        -1.3851339,\n        -1.179112,\n        -0.6709964,\n        -0.76329947,\n        -1.1769576,\n        -1.4954052,\n        -0.960525,\n        -1.0389729,\n        -1.2045083,\n        -1.8349122,\n        -1.461973,\n        -1.300356,\n        -0.95158595,\n        -1.0056021,\n        -2.020996,\n        -1.2476102,\n        -2.0049386,\n        -1.8346143,\n        -1.1478754,\n        -1.8596087,\n        -0.98516494,\n        -0.99060625,\n        -0.9482102,\n        -0.8543665,\n        -0.9581024,\n        -0.94102937,\n        -0.6761013,\n        -0.95146936,\n        -1.0973043,\n        -0.88721204,\n        -0.8015781,\n        -1.2003738,\n        -1.0721217,\n        -0.8620344,\n        -1.6259612,\n        -3.8667881,\n        -0.93236417,\n        -0.8828107,\n        -1.493874,\n        -1.5099301,\n        -2.5947087,\n        -3.358613,\n        -4.7279215,\n        -2.1498632,\n        -0.8850714,\n        -0.9867246,\n        -5.2405734,\n        -1.5883927,\n        -1.1818252,\n        -1.1974761,\n        -1.7496891,\n        -4.0008407,\n        -1.476634,\n        -1.3821882,\n        -1.875585,\n        -1.6439567,\n        -5.709731,\n        -3.068541,\n        -2.4429452,\n        -1.776948,\n        -2.922071,\n        -1.7195975,\n        -1.1998038,\n        -1.2773302,\n        -3.1672099,\n        -1.4617462,\n        -1.7898146,\n        -3.2232401,\n        -1.1349226,\n        -1.3632929,\n        -1.8463753,\n        -4.629285,\n        -1.6867559,\n        -1.8657526,\n        -1.5905323,\n        -2.1952798,\n        -3.075461,\n        -2.5903156,\n        -1.5011766,\n        -1.6764424,\n        -2.241862,\n        -1.2524456,\n        -2.0902004,\n        -1.5791374,\n        -1.4672202,\n        -1.4199411,\n        -1.2729433,\n        -1.6486052,\n        -1.5432082,\n        -3.741604,\n        -4.3343306,\n        -3.1124523,\n        -1.3878208,\n        -1.808157,\n        -2.4290116,\n        -2.043054,\n        -1.5419066,\n        -1.6230438,\n        -2.0333831,\n        -1.5723906,\n        -2.873767,\n        -1.4621615,\n        -2.5015512,\n        -3.94249,\n        -1.5258577,\n        -1.8479875,\n        -2.240764,\n        -1.8996323,\n        -1.2670697,\n        -1.111706,\n        -3.5020432,\n        -2.166442,\n        -2.0529845,\n        -3.032947,\n        -1.8734413,\n        -1.9137822,\n        -5.747706,\n        -6.5535336,\n        -1.8551661,\n        -2.2223253,\n        -3.246324,\n        -2.2672668,\n        -3.214827,\n        -1.8734428,\n        -1.175713,\n        -1.2449504,\n        -5.0981402,\n        -1.1101719,\n        -3.6191673,\n        -1.1534905,\n        -2.1083336,\n        -0.95823085,\n        -4.0686164,\n        -1.2263622,\n        -3.3285706,\n        -1.6448618,\n        -1.0919825,\n        -2.5784686,\n        -3.4817815,\n        -1.2479918,\n        -3.134906,\n        -1.1962954,\n        -1.6323433,\n        -2.1114237,\n        -0.8559951\n      ],\n      \"pointIndex\": [\n        0,\n        503,\n        228,\n        93009322,\n        32391791,\n        6112720,\n        29612775,\n        49771418,\n        18883514,\n        16121665,\n        86770664,\n        115266187,\n        34589577,\n        59516381,\n        18350350,\n        57759317,\n        39037044,\n        70805195,\n        26572791,\n        94168921,\n        109425411,\n        127831981,\n        15016203,\n        110821980,\n        85958860,\n        41693101,\n        72741503,\n        59203443,\n        116104014,\n        58329384,\n        62243579,\n        64305944,\n        121055088,\n        24881265,\n        111164926,\n        90454212,\n        93322042,\n        126810352,\n        106388250,\n        117830318,\n        126742863,\n        65345741,\n        3622619,\n        116568751,\n        73871814,\n        95430527,\n        114891825,\n        23449004,\n        108834772,\n        83860728,\n        850823,\n        61508947,\n        61397268,\n        103677019,\n        83368800,\n        70259113,\n        77359180,\n        66256039,\n        5203535,\n        64136609,\n        3428296,\n        127212426,\n        70500267,\n        8683920,\n        114219368,\n        78898989,\n        42583776,\n        84243156,\n        88572891,\n        1987234,\n        31681933,\n        53278978,\n        98360665,\n        117555367,\n        6829606,\n        109949533,\n        43916474,\n        120383937,\n        14474349\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 228,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 8531256490203702939\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5294597,\n        -0.5576223,\n        -0.5410471,\n        -0.5649329,\n        -0.58059883,\n        -0.59872955,\n        -0.59176224,\n        -0.6084886,\n        -0.7097818,\n        -0.68805045,\n        -0.585696,\n        -0.6078537,\n        -0.63380736,\n        -0.59650224,\n        -0.6027426,\n        -0.61310583,\n        -0.61372364,\n        -0.7964455,\n        -0.87056744,\n        -0.7759881,\n        -1.0372149,\n        -0.65877354,\n        -0.61011463,\n        -0.72138506,\n        -0.70972306,\n        -0.66512847,\n        -0.6612416,\n        -0.5998607,\n        -0.99996316,\n        -0.9549136,\n        -0.61410373,\n        -0.8476963,\n        -0.71036553,\n        -1.085258,\n        -0.96238244,\n        -0.910108,\n        -1.0033214,\n        -1.1844999,\n        -1.3858411,\n        -0.8568252,\n        -0.7952146,\n        -1.0685524,\n        -1.4095951,\n        -0.66511136,\n        -1.051108,\n        -1.0629483,\n        -0.8007286,\n        -0.73972476,\n        -0.78043693,\n        -1.4360243,\n        -0.74637586,\n        -0.67966276,\n        -0.74322665,\n        -1.1533587,\n        -0.8437019,\n        -0.607716,\n        -0.9872282,\n        -1.437123,\n        -1.2489737,\n        -1.6970297,\n        -1.6194949,\n        -1.318194,\n        -1.1215625,\n        -1.0581961,\n        -0.970132,\n        -0.95887625,\n        -0.8250626,\n        -1.3179755,\n        -1.2062294,\n        -1.3604482,\n        -0.9787357,\n        -1.2869112,\n        -1.631505,\n        -1.0290068,\n        -1.0062145,\n        -1.8089368,\n        -1.9163202,\n        -1.7504554,\n        -2.6891043,\n        -0.92384803,\n        -1.4413592,\n        -1.8508644,\n        -1.0570983,\n        -1.1877431,\n        -1.1719575,\n        -1.9445239,\n        -2.1189156,\n        -1.4971766,\n        -1.1050333,\n        -1.5659459,\n        -1.3083023,\n        -1.3377016,\n        -1.8765005,\n        -1.0915056,\n        -0.85164684,\n        -0.8110967,\n        -1.1261318,\n        -0.7932666,\n        -1.508754,\n        -1.6883552,\n        -1.631404,\n        -1.2834219,\n        -1.4985572,\n        -0.74091995,\n        -0.8910905,\n        -0.76595217,\n        -0.93938935,\n        -1.5934051,\n        -1.5612308,\n        -1.3203666,\n        -1.582214,\n        -0.61047065,\n        -0.9571381,\n        -2.2664058,\n        -4.521733,\n        -1.4690771,\n        -2.5001209,\n        -1.6353573,\n        -1.7051553,\n        -4.229778,\n        -2.168278,\n        -2.514496,\n        -2.2526808,\n        -3.2943108,\n        -2.1315482,\n        -1.8274347,\n        -4.216294,\n        -1.5538868,\n        -1.3124804,\n        -1.8857366,\n        -1.1115198,\n        -3.7827122,\n        -1.3951958,\n        -2.5932121,\n        -1.3097647,\n        -1.6431139,\n        -3.0072067,\n        -2.1572142,\n        -3.3944185,\n        -2.2084262,\n        -1.4533173,\n        -1.2706913,\n        -1.0741483,\n        -1.6193907,\n        -2.244457,\n        -2.4748313,\n        -2.0723145,\n        -1.7034297,\n        -1.3629124,\n        -2.333396,\n        -2.0661693,\n        -3.5874333,\n        -2.2120068,\n        -2.2768779,\n        -3.0420141,\n        -1.7942852,\n        -2.5977976,\n        -3.8865256,\n        -3.352928,\n        -2.1355355,\n        -0.93818676,\n        -2.0091407,\n        -2.7621565,\n        -3.4339957,\n        -2.5531046,\n        -1.8565471,\n        -1.6133701,\n        -1.9098023,\n        -3.2153764,\n        -2.1790311,\n        -2.8811343,\n        -1.9575641,\n        -2.8316908,\n        -3.0212128,\n        -2.1982098,\n        -2.3480704,\n        -1.7099223,\n        -2.6372912,\n        -1.7484987,\n        -2.5965567,\n        -2.7568269,\n        -3.3684866,\n        -1.8365451,\n        -4.9798565,\n        -2.7800128,\n        -2.0807607,\n        -4.2281303,\n        -3.197505,\n        -2.5512059,\n        -4.6846085,\n        -1.9073757,\n        -5.087554,\n        -1.0783356,\n        -1.7837888,\n        -3.4602354,\n        -0.9112789,\n        -1.9554101,\n        -2.2751474,\n        -2.8101778,\n        -4.778091,\n        -4.4575386,\n        -2.9218266,\n        -2.4827263,\n        -2.644842,\n        -1.5498966,\n        -1.7656147,\n        -1.668538,\n        -1.9522663,\n        -2.826835,\n        -2.6780727,\n        -0.95048386,\n        -0.98014146,\n        -2.483194,\n        -3.975097,\n        -1.8736228,\n        -2.7426178,\n        -1.6407981,\n        -2.2753592,\n        -2.0097847,\n        -1.4792215,\n        -1.7406603,\n        -1.9646327,\n        -2.2271302,\n        -3.3145914,\n        -1.0689687\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        225,\n        13403184,\n        111495485,\n        110075349,\n        95286179,\n        9082452,\n        105107231,\n        68503793,\n        92229860,\n        115741825,\n        7426527,\n        45483998,\n        54101854,\n        60557798,\n        69292652,\n        100886814,\n        28223841,\n        102230303,\n        35674633,\n        37336130,\n        3777454,\n        42419097,\n        45683484,\n        50447787,\n        51025294,\n        26013912,\n        101107105,\n        22461981,\n        30742553,\n        77687452,\n        78700000,\n        61768436,\n        35877561,\n        92404425,\n        98119292,\n        111182215,\n        227696,\n        3552227,\n        100345500,\n        38328637,\n        107160848,\n        41221529,\n        109380134,\n        52167691,\n        97181098,\n        51851055,\n        21633223,\n        125705045,\n        41500469,\n        53916530,\n        56440578,\n        33704439,\n        36584834,\n        19904467,\n        23136818,\n        86675926,\n        23851670,\n        71295809,\n        74103306,\n        75498188,\n        96884548,\n        6794077,\n        84836487,\n        89715102,\n        71070215,\n        91733595,\n        53489144,\n        29386823,\n        106303479,\n        20601240,\n        110642482,\n        33376894,\n        116785738,\n        2786544,\n        122652855,\n        128784580\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 225,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 86567694310489782\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.548254,\n        -0.5607937,\n        -0.57866526,\n        -0.5624324,\n        -0.5818964,\n        -0.6089347,\n        -0.6727932,\n        -0.562878,\n        -0.5639682,\n        -0.6208581,\n        -0.59887177,\n        -0.72257894,\n        -0.620159,\n        -0.68043953,\n        -0.7551201,\n        -0.73569727,\n        -0.6401625,\n        -0.84797335,\n        -0.6484228,\n        -0.6759319,\n        -0.8306258,\n        -0.6635486,\n        -0.64221096,\n        -0.7404873,\n        -0.7288819,\n        -0.62848085,\n        -0.76633126,\n        -0.7984979,\n        -0.9023936,\n        -1.33871,\n        -1.1491896,\n        -0.7609833,\n        -0.9475599,\n        -0.7572917,\n        -1.2838012,\n        -1.1049031,\n        -1.1746876,\n        -0.6899676,\n        -0.88186604,\n        -0.8181633,\n        -0.680215,\n        -0.8640347,\n        -1.1342883,\n        -0.7576444,\n        -0.8179945,\n        -0.6939694,\n        -0.8686436,\n        -0.8221472,\n        -0.84974766,\n        -0.73687667,\n        -1.1154488,\n        -0.7298011,\n        -0.76152563,\n        -1.2177804,\n        -0.9150309,\n        -0.96158725,\n        -0.8462979,\n        -1.1513401,\n        -0.9344139,\n        -1.6353195,\n        -1.3437028,\n        -1.4996282,\n        -1.3432597,\n        -2.301112,\n        -0.8197572,\n        -1.2027588,\n        -1.1205764,\n        -0.87856936,\n        -1.5738864,\n        -2.232956,\n        -3.8641906,\n        -1.4949442,\n        -1.4932362,\n        -1.2822062,\n        -1.7245877,\n        -0.9485146,\n        -1.4527066,\n        -0.93614507,\n        -1.469078,\n        -0.85976386,\n        -1.3209335,\n        -0.7144693,\n        -0.68033046,\n        -0.9082166,\n        -1.9164375,\n        -1.7172593,\n        -1.4276229,\n        -1.3900268,\n        -1.3021207,\n        -2.0078642,\n        -1.8958111,\n        -0.9743969,\n        -1.3751584,\n        -1.2233984,\n        -1.2176247,\n        -0.9189738,\n        -0.91843736,\n        -0.9539633,\n        -0.8822781,\n        -1.2415289,\n        -1.7231786,\n        -1.578393,\n        -1.3576747,\n        -1.1325957,\n        -1.0613661,\n        -1.2737293,\n        -1.2220944,\n        -1.7856517,\n        -1.8723731,\n        -1.3435957,\n        -1.2633096,\n        -1.1389157,\n        -1.1266003,\n        -0.8861719,\n        -0.85776955,\n        -1.6702726,\n        -4.3272867,\n        -1.3503591,\n        -1.742206,\n        -2.5929413,\n        -2.5635533,\n        -1.3737376,\n        -1.3955579,\n        -1.5927967,\n        -4.3128505,\n        -4.2600355,\n        -2.501178,\n        -3.3870676,\n        -3.9008584,\n        -4.1824965,\n        -3.6452065,\n        -1.3949506,\n        -1.9210273,\n        -1.967188,\n        -1.536652,\n        -3.3252275,\n        -2.2511718,\n        -1.9874296,\n        -1.7335365,\n        -2.4373407,\n        -2.9089544,\n        -3.9528072,\n        -4.4839187,\n        -1.9656321,\n        -1.8781765,\n        -3.0906434,\n        -5.065713,\n        -4.014744,\n        -3.8616838,\n        -1.7609141,\n        -2.0917294,\n        -1.860987,\n        -2.1770885,\n        -3.6821332,\n        -3.2795827,\n        -1.9278725,\n        -2.6800344,\n        -5.78242,\n        -1.4867266,\n        -3.017378,\n        -1.3494822,\n        -2.7585256,\n        -1.5266467,\n        -7.224387,\n        -1.0098683,\n        -1.9480876,\n        -1.9703789,\n        -3.3225334,\n        -1.389721,\n        -1.9282464,\n        -2.6578705,\n        -2.8685904,\n        -2.8504508,\n        -6.6986213,\n        -1.6179367,\n        -1.5962142,\n        -1.4038852,\n        -1.6786649,\n        -2.472646,\n        -3.1394675,\n        -2.1794803,\n        -2.0659873,\n        -3.158665,\n        -3.1141348,\n        -2.071478,\n        -4.5385494,\n        -3.6139581,\n        -1.9906989,\n        -3.6913047,\n        -3.636456,\n        -1.5532333,\n        -5.768992,\n        -4.82517,\n        -1.4060777,\n        -2.8837552,\n        -2.9799287,\n        -1.0938119,\n        -1.9560072,\n        -1.0404767,\n        -1.7204325,\n        -2.9698248,\n        -3.0281875,\n        -2.0829344,\n        -2.106153,\n        -1.9701356,\n        -3.1649125,\n        -5.150452,\n        -1.2197741,\n        -2.5101361,\n        -2.1056838,\n        -2.6430736,\n        -2.108021,\n        -4.4657874,\n        -3.626636,\n        -1.2784696,\n        -2.9364944,\n        -1.8963417,\n        -2.7525795,\n        -3.784847,\n        -2.3326762,\n        -4.461001,\n        -1.4812615,\n        -1.3003786,\n        -1.3552235,\n        -1.7875011,\n        -3.5720217,\n        -1.2042733,\n        -1.1229393,\n        -2.3410068,\n        -1.7073672,\n        -1.6788995,\n        -3.0060105\n      ],\n      \"pointIndex\": [\n        1,\n        499,\n        232,\n        35644102,\n        95620906,\n        18805749,\n        81317621,\n        34604771,\n        47007327,\n        62586261,\n        93525848,\n        105772867,\n        1836297,\n        36431055,\n        52379127,\n        52079831,\n        61376238,\n        85044418,\n        41659805,\n        90062343,\n        31183374,\n        117254947,\n        107884975,\n        30201518,\n        39283927,\n        70478305,\n        27333242,\n        49959075,\n        52821453,\n        57541686,\n        61873590,\n        66612382,\n        45591007,\n        26443451,\n        49454191,\n        82171151,\n        99534729,\n        96420884,\n        101045942,\n        30411828,\n        59671855,\n        37696441,\n        3305404,\n        35179351,\n        67193486,\n        79432577,\n        115617457,\n        55276385,\n        21088404,\n        113881020,\n        8555817,\n        47485812,\n        75662933,\n        9315052,\n        74555298,\n        57313488,\n        114520082,\n        59780069,\n        10015857,\n        112869350,\n        82799122,\n        116286262,\n        106461743,\n        112690456,\n        55923115,\n        104375627,\n        76843016,\n        80456232,\n        111216669,\n        84333327,\n        88958071,\n        90820851,\n        94593823,\n        98821972,\n        16298556,\n        103044415,\n        104144207,\n        111522983,\n        116762199,\n        123031926,\n        498\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 232,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 6252646384454912542\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.48593023,\n        -0.49811634,\n        -0.5051228,\n        -0.52852297,\n        -0.4998905,\n        -0.50856495,\n        -0.56792754,\n        -0.58318883,\n        -0.5442299,\n        -0.57571816,\n        -0.5104713,\n        -0.5443268,\n        -0.5515343,\n        -0.60608447,\n        -0.6757374,\n        -0.6830418,\n        -0.6755409,\n        -0.59016246,\n        -0.5454576,\n        -0.7096835,\n        -0.61583054,\n        -0.5266673,\n        -0.51681995,\n        -0.56019515,\n        -0.5993234,\n        -0.6495098,\n        -0.6438212,\n        -0.61164343,\n        -0.65656334,\n        -0.72602475,\n        -0.8520472,\n        -0.8298695,\n        -0.68893903,\n        -0.80443794,\n        -0.8151453,\n        -0.6248744,\n        -0.7932084,\n        -0.5832469,\n        -0.59996295,\n        -0.7268257,\n        -0.8978948,\n        -0.628976,\n        -0.703311,\n        -0.9101348,\n        -0.7230928,\n        -0.7509946,\n        -0.81770027,\n        -1.0394619,\n        -0.5726214,\n        -0.9880306,\n        -0.7951019,\n        -0.8072624,\n        -0.8245114,\n        -0.7996366,\n        -0.64646524,\n        -0.6428294,\n        -0.62054175,\n        -0.7876593,\n        -0.95186293,\n        -1.3845879,\n        -1.0309538,\n        -1.4924188,\n        -0.8735874,\n        -0.9024334,\n        -1.887193,\n        -0.73925686,\n        -0.7173001,\n        -0.89899385,\n        -1.2966541,\n        -2.1132953,\n        -2.0209932,\n        -0.92569464,\n        -0.66766036,\n        -1.3062632,\n        -1.2196876,\n        -1.1793561,\n        -0.70380116,\n        -2.359043,\n        -1.2396822,\n        -1.0203435,\n        -0.9604665,\n        -1.758841,\n        -1.2739277,\n        -0.69405615,\n        -1.3159864,\n        -0.9570915,\n        -1.4841802,\n        -1.0174404,\n        -1.4231768,\n        -0.9732836,\n        -0.95332456,\n        -0.87739766,\n        -1.1183963,\n        -1.3045577,\n        -2.6690967,\n        -1.071694,\n        -1.3986906,\n        -0.6189145,\n        -0.8985403,\n        -1.2814728,\n        -1.7861696,\n        -1.1929153,\n        -0.8898458,\n        -0.86690474,\n        -0.80744475,\n        -1.1774896,\n        -1.108945,\n        -2.056511,\n        -2.0676262,\n        -0.7846663,\n        -0.81444526,\n        -0.6766059,\n        -0.9041935,\n        -0.73590857,\n        -0.6264732,\n        -0.81851256,\n        -2.6052191,\n        -1.8843722,\n        -1.044584,\n        -1.6357023,\n        -1.6401404,\n        -1.5270939,\n        -2.2869985,\n        -1.7568169,\n        -1.7991313,\n        -3.001768,\n        -5.336341,\n        -1.6224685,\n        -1.0219606,\n        -4.603571,\n        -4.1932673,\n        -3.042347,\n        -1.9126862,\n        -0.7629551,\n        -3.189313,\n        -2.7016556,\n        -1.118586,\n        -2.1607187,\n        -2.0055864,\n        -2.7484715,\n        -3.1684074,\n        -3.83769,\n        -2.0820234,\n        -1.0559535,\n        -1.1772017,\n        -1.330181,\n        -1.6856858,\n        -1.6979719,\n        -2.530777,\n        -2.7738214,\n        -2.2110868,\n        -1.4996016,\n        -1.2965591,\n        -4.6498537,\n        -1.8551095,\n        -2.5414307,\n        -3.570963,\n        -1.2399164,\n        -2.8060694,\n        -2.1512334,\n        -1.8322636,\n        -4.8060484,\n        -1.9000674,\n        -2.0965776,\n        -2.3850784,\n        -4.606346,\n        -2.907361,\n        -3.545009,\n        -1.0164293,\n        -2.7142386,\n        -3.3618717,\n        -1.5112023,\n        -0.9955293,\n        -2.7666876,\n        -2.1661737,\n        -1.4990155,\n        -1.3076473,\n        -2.456527,\n        -1.6418482,\n        -2.2069795,\n        -1.9657013,\n        -3.255251,\n        -1.8241007,\n        -1.5874321,\n        -3.0413682,\n        -2.5288503,\n        -1.122015,\n        -3.3506832,\n        -1.7865183,\n        -3.0265481,\n        -3.1617188,\n        -2.043597,\n        -1.6243017,\n        -1.6693872,\n        -1.8202813,\n        -6.2588425,\n        -0.658507,\n        -1.9051749,\n        -1.0312359,\n        -1.6516533,\n        -2.6110055,\n        -2.2804751,\n        -2.2768223,\n        -2.914403,\n        -3.3968577,\n        -1.5329814,\n        -4.7806764,\n        -1.8757861,\n        -2.1339443,\n        -1.2909119,\n        -3.560735,\n        -1.4998031,\n        -1.5749999,\n        -1.4923608,\n        -1.1537538,\n        -4.7296,\n        -2.440965,\n        -4.2241488,\n        -2.3822827,\n        -2.3952546,\n        -1.0642797,\n        -1.7057232,\n        -1.2805316,\n        -1.9372311,\n        -1.3188204,\n        -4.5041175,\n        -2.726005,\n        -0.8250985,\n        -1.1002651,\n        -1.376234,\n        -0.9265303\n      ],\n      \"pointIndex\": [\n        1,\n        498,\n        231,\n        121903249,\n        82609045,\n        53657487,\n        83469870,\n        10321709,\n        50627244,\n        59885212,\n        77692286,\n        98141695,\n        87402624,\n        42160830,\n        8468170,\n        77416666,\n        34591308,\n        12532761,\n        26185935,\n        91499350,\n        75331836,\n        123483511,\n        15452988,\n        8325712,\n        21363363,\n        118013789,\n        86219309,\n        88635229,\n        28187500,\n        91925545,\n        114866803,\n        64359771,\n        1014791,\n        70322355,\n        116900125,\n        101094304,\n        84425561,\n        104223812,\n        97907689,\n        110328330,\n        121265951,\n        35245592,\n        15199966,\n        38695244,\n        37808698,\n        4744599,\n        87014766,\n        41946632,\n        10751397,\n        44730152,\n        21616370,\n        52919691,\n        114313344,\n        12902561,\n        117409059,\n        113520849,\n        57750588,\n        58573017,\n        114622407,\n        72330758,\n        23440117,\n        26354919,\n        68648928,\n        68002172,\n        25188338,\n        71817058,\n        87196418,\n        12687349,\n        25351371,\n        82891487,\n        84141569,\n        88845534,\n        90204855,\n        95439583,\n        117258970,\n        6922222,\n        110083821,\n        115550392,\n        14129703,\n        84815374\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 231,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -3921434996928360891\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.59787774,\n        -0.60145414,\n        -0.6037318,\n        -0.60338205,\n        -0.6137846,\n        -0.6112629,\n        -0.62412167,\n        -0.62555826,\n        -0.692133,\n        -0.703087,\n        -0.62788475,\n        -0.6285306,\n        -0.6236274,\n        -0.73515266,\n        -0.738029,\n        -0.7654694,\n        -0.6870156,\n        -0.73044413,\n        -0.7317749,\n        -0.7283749,\n        -0.9046549,\n        -0.63757515,\n        -0.90129393,\n        -0.63576126,\n        -0.65300924,\n        -0.6393257,\n        -0.624173,\n        -1.0669246,\n        -0.8360199,\n        -0.88732535,\n        -0.9085496,\n        -0.8281918,\n        -1.0321151,\n        -0.69639766,\n        -1.056624,\n        -0.7568813,\n        -0.83730763,\n        -1.3531302,\n        -0.79367673,\n        -0.746458,\n        -1.0654767,\n        -0.9303029,\n        -1.0939734,\n        -0.8892214,\n        -0.6607892,\n        -1.0712175,\n        -0.97386837,\n        -1.092996,\n        -0.82881105,\n        -0.70330036,\n        -0.80185133,\n        -0.8514173,\n        -0.69828564,\n        -0.70050687,\n        -0.6848632,\n        -1.1613108,\n        -1.4207476,\n        -0.8566658,\n        -1.1068753,\n        -1.0668347,\n        -1.0640814,\n        -1.2468433,\n        -1.1292615,\n        -1.0954584,\n        -1.9068737,\n        -1.4202605,\n        -2.034829,\n        -0.73155093,\n        -0.86568165,\n        -2.4108014,\n        -1.6762967,\n        -1.0485967,\n        -1.0873181,\n        -0.904395,\n        -1.1199968,\n        -2.1598327,\n        -1.4283174,\n        -1.0384696,\n        -0.84582597,\n        -2.4886782,\n        -0.7779316,\n        -1.0970025,\n        -1.1856172,\n        -1.9569024,\n        -3.981397,\n        -1.3820696,\n        -1.8495582,\n        -1.303945,\n        -1.7128863,\n        -0.851952,\n        -1.3258555,\n        -2.0642643,\n        -1.0822942,\n        -1.0181471,\n        -1.0585626,\n        -1.5800385,\n        -1.1739537,\n        -0.9560621,\n        -1.034065,\n        -0.7476173,\n        -1.2199632,\n        -0.8563682,\n        -1.7596116,\n        -0.9308764,\n        -0.9294558,\n        -0.700496,\n        -0.9780324,\n        -1.3246034,\n        -0.9090146,\n        -1.4302996,\n        -0.76940614,\n        -3.9980063,\n        -2.1296775,\n        -1.9396099,\n        -2.2312093,\n        -1.1239054,\n        -3.0561671,\n        -1.9985267,\n        -1.5552648,\n        -1.8671099,\n        -1.8968765,\n        -1.5244303,\n        -1.5921745,\n        -1.4288629,\n        -1.7421844,\n        -2.1218357,\n        -1.7104133,\n        -2.6510508,\n        -1.3104323,\n        -3.057764,\n        -2.031021,\n        -1.4565883,\n        -2.0348382,\n        -2.2601967,\n        -2.920707,\n        -1.2729836,\n        -3.1982338,\n        -0.99266785,\n        -1.7109475,\n        -2.6560607,\n        -2.4753838,\n        -2.1877108,\n        -2.064123,\n        -1.1081226,\n        -3.0854433,\n        -5.3121643,\n        -1.3218622,\n        -2.3300076,\n        -3.1619358,\n        -1.2176294,\n        -1.3897915,\n        -4.1942477,\n        -2.4851782,\n        -2.5883253,\n        -2.5248022,\n        -1.5893021,\n        -3.42884,\n        -1.4305729,\n        -1.9520689,\n        -3.5295646,\n        -2.5297673,\n        -2.0775554,\n        -1.0084162,\n        -1.5056899,\n        -1.9147689,\n        -1.6083453,\n        -1.1871455,\n        -2.0726132,\n        -2.1473198,\n        -4.442302,\n        -4.670319,\n        -3.5520089,\n        -1.6042694,\n        -3.1528566,\n        -4.5269594,\n        -2.1927118,\n        -1.8097452,\n        -2.7539496,\n        -4.4474444,\n        -1.2035335,\n        -1.1168379,\n        -1.3293927,\n        -2.0163567,\n        -2.2053635,\n        -3.6734867,\n        -2.1019855,\n        -1.3774385,\n        -1.8108878,\n        -1.7328309,\n        -4.0557847,\n        -6.4212666,\n        -2.2095923,\n        -5.359125,\n        -1.4784194,\n        -2.1430738,\n        -3.644639,\n        -1.284663,\n        -3.362314,\n        -1.093306,\n        -2.4446,\n        -1.1570675,\n        -1.4576575,\n        -2.2837992,\n        -4.856039,\n        -1.7351286,\n        -2.236084,\n        -2.0651164,\n        -1.4864243,\n        -1.4940932,\n        -1.6566368,\n        -1.8886653,\n        -0.74667937,\n        -2.0818756,\n        -1.1767255,\n        -1.7952884,\n        -1.5501437,\n        -1.3899502,\n        -1.6304728,\n        -1.4943737,\n        -2.758686,\n        -2.8887265,\n        -2.2842896,\n        -1.0909864\n      ],\n      \"pointIndex\": [\n        4,\n        503,\n        223,\n        84054172,\n        109366651,\n        51339656,\n        88791098,\n        4072988,\n        54556737,\n        75121037,\n        86904619,\n        102956360,\n        91321010,\n        118585526,\n        78577257,\n        63311712,\n        67181236,\n        52778,\n        82908703,\n        94184616,\n        106563399,\n        78881456,\n        66679141,\n        41331958,\n        7050563,\n        48465170,\n        96787920,\n        37268129,\n        82110402,\n        59620455,\n        119453249,\n        71640439,\n        75552290,\n        77771305,\n        46659868,\n        28981394,\n        62183364,\n        12446746,\n        13219385,\n        115972942,\n        52801009,\n        61792140,\n        572087,\n        39266840,\n        2980561,\n        32084687,\n        45569065,\n        39983677,\n        47592681,\n        3347835,\n        123538200,\n        52604221,\n        20106550,\n        54042751,\n        22117839,\n        58149627,\n        116452996,\n        61076830,\n        66799983,\n        69136378,\n        40642821,\n        22894287,\n        123149293,\n        33336803,\n        77739806,\n        81458609,\n        81692046,\n        85920934,\n        87924055,\n        30059320,\n        92925472,\n        95823877,\n        99698152,\n        121454403,\n        59960944,\n        116220425,\n        117475741,\n        499\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 223,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -7058296641388226677\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.54927164,\n        -0.55799776,\n        -0.56177443,\n        -0.5784826,\n        -0.5607427,\n        -0.5943863,\n        -0.56446606,\n        -0.63234544,\n        -0.6444895,\n        -0.6149962,\n        -0.5726348,\n        -0.6144211,\n        -0.5976512,\n        -0.9134983,\n        -0.5969545,\n        -0.6978777,\n        -0.662781,\n        -0.6462334,\n        -0.82320976,\n        -0.62632024,\n        -0.78237295,\n        -0.6009164,\n        -0.71237713,\n        -0.63300943,\n        -0.68879,\n        -0.8736334,\n        -0.6628779,\n        -1.0856007,\n        -0.92329025,\n        -1.0283417,\n        -0.772051,\n        -0.78350765,\n        -0.8919709,\n        -0.7718917,\n        -1.0579466,\n        -0.7923724,\n        -0.69287163,\n        -0.8952094,\n        -0.85189104,\n        -0.65595937,\n        -0.7328316,\n        -1.1698253,\n        -0.95717037,\n        -0.8587923,\n        -0.64600074,\n        -0.73141485,\n        -0.87741977,\n        -1.103363,\n        -0.63726646,\n        -0.8150458,\n        -0.74368244,\n        -0.8759372,\n        -1.081512,\n        -0.71728975,\n        -0.688069,\n        -1.2152156,\n        -1.1525712,\n        -1.2191999,\n        -1.0493709,\n        -1.5191622,\n        -1.5661151,\n        -1.6331143,\n        -1.2799627,\n        -1.7562692,\n        -1.150978,\n        -1.178991,\n        -0.9250156,\n        -0.86997193,\n        -1.4348046,\n        -1.4207041,\n        -1.1504325,\n        -0.81934685,\n        -1.3765908,\n        -1.1598345,\n        -0.76282966,\n        -1.2485176,\n        -0.9627283,\n        -1.0175,\n        -1.3302476,\n        -1.4262129,\n        -0.73058355,\n        -0.7768302,\n        -1.0742236,\n        -1.2367499,\n        -1.4979956,\n        -1.3816236,\n        -1.5363247,\n        -1.4642798,\n        -1.3516219,\n        -0.72886014,\n        -0.84243894,\n        -1.1391242,\n        -0.816415,\n        -1.2249358,\n        -1.1653829,\n        -1.1568464,\n        -1.409465,\n        -0.7573363,\n        -1.8553492,\n        -0.9267509,\n        -1.277529,\n        -0.84015083,\n        -1.4367083,\n        -1.1704016,\n        -1.0822623,\n        -1.9117612,\n        -1.4373354,\n        -1.1935893,\n        -0.72218204,\n        -0.9773071,\n        -2.307135,\n        -1.6119989,\n        -1.4311005,\n        -1.6152966,\n        -3.8505075,\n        -2.5684729,\n        -1.4745631,\n        -2.283275,\n        -1.9480804,\n        -1.5828394,\n        -2.2038364,\n        -1.8381954,\n        -2.7674043,\n        -3.1706169,\n        -1.6801208,\n        -1.4045823,\n        -3.458629,\n        -5.5322,\n        -2.0492415,\n        -1.3397441,\n        -1.2684066,\n        -1.728175,\n        -4.826077,\n        -1.3611041,\n        -1.0132012,\n        -2.3263867,\n        -1.668477,\n        -5.789353,\n        -1.8627526,\n        -3.0538514,\n        -4.4253926,\n        -1.650529,\n        -2.3139496,\n        -2.480115,\n        -3.4141963,\n        -2.1213675,\n        -1.5043126,\n        -1.4033012,\n        -3.1311145,\n        -0.9516954,\n        -2.1204515,\n        -3.598058,\n        -1.4977916,\n        -1.0788697,\n        -1.3113519,\n        -3.0442638,\n        -4.627087,\n        -2.0146139,\n        -1.3869731,\n        -2.342794,\n        -1.5375688,\n        -1.2935922,\n        -4.4602623,\n        -2.0731196,\n        -4.6459804,\n        -3.7177625,\n        -3.0546892,\n        -2.069629,\n        -1.2756584,\n        -3.0119967,\n        -1.6468534,\n        -1.5275029,\n        -2.2072217,\n        -2.840689,\n        -1.5935591,\n        -1.7980634,\n        -2.1122496,\n        -3.8701308,\n        -2.2193303,\n        -3.620466,\n        -1.0340712,\n        -1.5489097,\n        -2.2800333,\n        -1.4681567,\n        -4.295331,\n        -0.97169226,\n        -1.1701701,\n        -1.4693524,\n        -1.9215053,\n        -1.4616832,\n        -2.4838266,\n        -1.2926298,\n        -1.2791344,\n        -1.4447894,\n        -2.3319173,\n        -2.0730073,\n        -2.3841429,\n        -1.9727724,\n        -1.9029855,\n        -3.8191602,\n        -3.4143884,\n        -1.6520147,\n        -3.1560063,\n        -1.1492236,\n        -1.8619269,\n        -2.8565183,\n        -1.4807147,\n        -2.1151505,\n        -2.7476447,\n        -1.7275034,\n        -2.7608826,\n        -1.9752641,\n        -2.1960561,\n        -1.7852536,\n        -1.7566513,\n        -1.6960467,\n        -1.3483127,\n        -1.7708883,\n        -2.5635688,\n        -1.6335524,\n        -1.4366835,\n        -3.7009428,\n        -2.626339,\n        -2.0646389,\n        -2.1659148,\n        -1.484712,\n        -3.0297914,\n        -2.1841908\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        228,\n        97198270,\n        59470988,\n        65381995,\n        79952849,\n        36792402,\n        51276755,\n        81752643,\n        80230109,\n        118281718,\n        109930419,\n        80413679,\n        71166889,\n        21318653,\n        24855703,\n        67265095,\n        120795767,\n        29750424,\n        113188732,\n        33589531,\n        31694451,\n        112099069,\n        13232518,\n        45225703,\n        49060933,\n        51105009,\n        20662083,\n        57749492,\n        63758790,\n        24384450,\n        110219280,\n        70322865,\n        77580408,\n        53260142,\n        91836161,\n        96841947,\n        6839269,\n        120096698,\n        105828287,\n        108808445,\n        19274606,\n        119428017,\n        44259364,\n        8363462,\n        41650447,\n        68138301,\n        125558726,\n        47495101,\n        4398592,\n        49768486,\n        11787755,\n        53595347,\n        76192272,\n        56214662,\n        31031249,\n        58698945,\n        22561727,\n        61418247,\n        62348906,\n        64188278,\n        66035284,\n        86677956,\n        26292697,\n        28510053,\n        6018892,\n        126800998,\n        83629329,\n        90448353,\n        29246911,\n        99134894,\n        119796088,\n        45558469,\n        108328045,\n        60162587,\n        121171674,\n        124007771,\n        85941694\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 228,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 1617599988107384674\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.49086484,\n        -0.49165273,\n        -0.49508175,\n        -0.6431985,\n        -0.5937614,\n        -0.50219905,\n        -0.573946,\n        -0.6606333,\n        -0.67695975,\n        -0.66005003,\n        -0.63093185,\n        -0.5548935,\n        -0.5702439,\n        -0.71829444,\n        -0.60590047,\n        -0.7012481,\n        -0.7132402,\n        -0.729588,\n        -0.697213,\n        -0.69351006,\n        -0.72366875,\n        -0.6569782,\n        -0.7441715,\n        -0.58781534,\n        -0.5558926,\n        -0.67732096,\n        -0.5800482,\n        -0.7528965,\n        -0.81201863,\n        -0.7412216,\n        -0.6288245,\n        -1.2486026,\n        -0.75286245,\n        -0.72401667,\n        -0.71564144,\n        -0.76534486,\n        -0.9099728,\n        -1.0183343,\n        -0.70542717,\n        -0.70135754,\n        -0.91260487,\n        -0.8092986,\n        -0.7336066,\n        -0.7567574,\n        -0.7708158,\n        -0.84383863,\n        -0.81836164,\n        -0.68041724,\n        -0.6287525,\n        -0.6440457,\n        -0.7410818,\n        -0.7212312,\n        -0.71242625,\n        -0.67231476,\n        -0.63468635,\n        -0.7622313,\n        -0.8208822,\n        -1.1445706,\n        -0.94941014,\n        -0.8612036,\n        -1.707661,\n        -0.9243854,\n        -1.9626482,\n        -1.5958457,\n        -1.3077794,\n        -1.1861492,\n        -1.1635512,\n        -2.1751227,\n        -0.8469007,\n        -1.4496542,\n        -1.5342058,\n        -0.96538514,\n        -1.3460073,\n        -1.419018,\n        -1.0653356,\n        -1.2852613,\n        -1.2012281,\n        -0.8473538,\n        -0.8943203,\n        -0.8048798,\n        -1.167146,\n        -1.3002725,\n        -1.0605211,\n        -0.92645526,\n        -1.69568,\n        -0.7741866,\n        -1.0942045,\n        -1.6122143,\n        -0.9987477,\n        -1.2992978,\n        -1.3553501,\n        -1.4368896,\n        -1.2315027,\n        -1.1274419,\n        -1.8796052,\n        -0.9360587,\n        -1.1173197,\n        -1.2018306,\n        -1.2060379,\n        -0.7550845,\n        -1.3723761,\n        -0.92328745,\n        -0.9710938,\n        -1.2305354,\n        -1.0390061,\n        -1.0124133,\n        -0.734787,\n        -0.6868354,\n        -1.0925033,\n        -1.7397844,\n        -0.75704724,\n        -0.93412423,\n        -1.0038755,\n        -0.8340533,\n        -1.0903175,\n        -1.6320931,\n        -5.0747957,\n        -1.2733006,\n        -2.6070294,\n        -2.010462,\n        -1.7844871,\n        -3.348369,\n        -2.3121226,\n        -1.5482043,\n        -0.97831064,\n        -2.7482018,\n        -2.2871487,\n        -2.119465,\n        -2.905391,\n        -2.1168177,\n        -3.059811,\n        -1.3497925,\n        -2.189309,\n        -1.8456689,\n        -2.1114528,\n        -2.5084443,\n        -2.2272942,\n        -1.8464441,\n        -2.7921028,\n        -4.0985055,\n        -1.874381,\n        -4.627585,\n        -2.7565491,\n        -1.4172932,\n        -1.0251176,\n        -2.5863686,\n        -2.255447,\n        -5.3265395,\n        -1.7419072,\n        -1.784064,\n        -1.4473236,\n        -2.1382859,\n        -1.6768821,\n        -2.059615,\n        -2.2160065,\n        -0.94110626,\n        -2.160445,\n        -1.3646688,\n        -1.2020136,\n        -1.2484066,\n        -2.6778018,\n        -2.6019099,\n        -2.7626283,\n        -1.5040901,\n        -2.4336689,\n        -1.3975806,\n        -5.2075863,\n        -1.9458374,\n        -1.9337393,\n        -2.611955,\n        -2.93051,\n        -2.350819,\n        -1.2660667,\n        -1.3496511,\n        -1.3058685,\n        -3.110164,\n        -3.809094,\n        -3.3404458,\n        -1.8760198,\n        -1.8080118,\n        -1.6777174,\n        -2.7298841,\n        -1.9431881,\n        -2.405735,\n        -1.4490008,\n        -1.3650403,\n        -1.4648993,\n        -1.7826031,\n        -1.1778947,\n        -1.9751103,\n        -2.531089,\n        -2.5696335,\n        -6.8766155,\n        -3.561331,\n        -1.7344441,\n        -2.2454484,\n        -1.7390339,\n        -1.3827872,\n        -1.3778417,\n        -1.3080233,\n        -1.2370479,\n        -4.110881,\n        -2.2114515,\n        -2.4566734,\n        -1.7254595,\n        -1.7614281,\n        -0.99026966,\n        -2.1424553,\n        -1.7575904,\n        -4.0985904,\n        -1.8833361,\n        -1.3063549,\n        -1.2607809,\n        -0.82764417,\n        -0.8102777,\n        -2.1919854,\n        -1.403665,\n        -1.3009878,\n        -2.792962,\n        -3.3374386,\n        -2.8087707,\n        -2.2060149,\n        -0.84837985,\n        -2.9999259,\n        -1.3997679,\n        -2.5588124,\n        -2.0231915,\n        -1.9132456,\n        -0.84803754,\n        -1.3362615\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        230,\n        110086351,\n        101424723,\n        52573038,\n        95008443,\n        15560022,\n        19982898,\n        64057438,\n        79464555,\n        107052269,\n        93002383,\n        42159278,\n        49496892,\n        119779632,\n        111113351,\n        26064893,\n        12176857,\n        6657738,\n        108593992,\n        126506494,\n        72496706,\n        42621811,\n        18129767,\n        4097229,\n        1824239,\n        9341177,\n        109152575,\n        22442466,\n        82928928,\n        91678956,\n        79276268,\n        118732690,\n        78958311,\n        6067895,\n        97094175,\n        103002712,\n        107319471,\n        120864622,\n        2795147,\n        34753592,\n        3642607,\n        37322672,\n        25049365,\n        75391554,\n        12835239,\n        106173089,\n        43755132,\n        117583954,\n        46761671,\n        8965966,\n        50974275,\n        95736136,\n        97367112,\n        59029487,\n        86855671,\n        126257878,\n        123292235,\n        65843424,\n        64392139,\n        65590301,\n        72222837,\n        118300295,\n        76401247,\n        77680057,\n        27451418,\n        81917883,\n        28498094,\n        33457347,\n        99015206,\n        98379937,\n        100477882,\n        105021239,\n        106810149,\n        116519442,\n        74534426,\n        122909395,\n        125209175,\n        255019\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 230,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -8928687457762026561\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.56832755,\n        -0.5909885,\n        -0.5748976,\n        -0.60423553,\n        -0.60178524,\n        -0.60157627,\n        -0.587748,\n        -0.7172052,\n        -0.6241009,\n        -0.60629946,\n        -0.6160531,\n        -0.63146806,\n        -0.608597,\n        -0.59321743,\n        -0.6879986,\n        -0.783219,\n        -0.8522558,\n        -0.6727227,\n        -0.8268062,\n        -0.62091374,\n        -0.61043733,\n        -0.74049455,\n        -0.7057798,\n        -0.796559,\n        -0.87044466,\n        -0.70782137,\n        -0.7786673,\n        -0.6604127,\n        -0.8732423,\n        -1.4029797,\n        -0.7945969,\n        -0.8747885,\n        -0.8368457,\n        -0.95190823,\n        -0.9993142,\n        -0.8970017,\n        -0.93625695,\n        -0.84799105,\n        -0.8354849,\n        -0.79125404,\n        -0.799949,\n        -0.67009306,\n        -0.7891741,\n        -1.4105603,\n        -0.8896512,\n        -1.0135485,\n        -0.929001,\n        -0.886674,\n        -0.8006716,\n        -1.4901229,\n        -1.2618842,\n        -0.90933096,\n        -0.8073693,\n        -0.8621052,\n        -1.0471725,\n        -0.7722495,\n        -1.3036199,\n        -0.87458843,\n        -1.4554368,\n        -1.5855098,\n        -1.9130875,\n        -0.94120586,\n        -1.0607948,\n        -0.9088619,\n        -1.4801517,\n        -0.99234706,\n        -1.3816286,\n        -1.2490814,\n        -1.1207249,\n        -1.8138934,\n        -1.0425998,\n        -1.4596838,\n        -1.6276789,\n        -1.2308221,\n        -1.0301083,\n        -1.1402522,\n        -1.4639021,\n        -0.9087255,\n        -1.0546715,\n        -0.9879512,\n        -1.1541631,\n        -0.82525426,\n        -1.0790806,\n        -0.9471458,\n        -0.85101986,\n        -1.233984,\n        -2.3894157,\n        -2.9223077,\n        -1.5428509,\n        -1.5711381,\n        -1.0467322,\n        -1.8167585,\n        -1.3438088,\n        -1.2582407,\n        -1.2467229,\n        -1.6579001,\n        -1.990238,\n        -1.5339531,\n        -1.0503933,\n        -1.6122134,\n        -1.8513618,\n        -1.6831448,\n        -1.2689091,\n        -1.0429959,\n        -1.3003299,\n        -0.8789561,\n        -0.94471043,\n        -1.4198842,\n        -2.455136,\n        -1.2004322,\n        -1.2269163,\n        -1.12566,\n        -0.7759718,\n        -1.4541564,\n        -4.4826355,\n        -1.3858997,\n        -1.4449863,\n        -1.9620763,\n        -1.6177365,\n        -1.7349641,\n        -1.9133793,\n        -3.974498,\n        -2.1262734,\n        -1.7611657,\n        -1.681921,\n        -2.7635946,\n        -3.0031888,\n        -5.914061,\n        -1.1342934,\n        -2.911582,\n        -1.8979477,\n        -1.1300302,\n        -2.3564131,\n        -2.2195525,\n        -1.4680218,\n        -1.3823087,\n        -3.8461316,\n        -2.432869,\n        -1.3012831,\n        -3.8942091,\n        -2.4783866,\n        -1.3676889,\n        -1.2583317,\n        -2.443607,\n        -3.2685487,\n        -2.3278441,\n        -4.7823668,\n        -1.8398348,\n        -2.6126237,\n        -2.504085,\n        -1.9899489,\n        -4.835528,\n        -2.721473,\n        -2.3454478,\n        -3.7778504,\n        -1.8538367,\n        -1.1873996,\n        -1.3453918,\n        -1.1090251,\n        -5.224681,\n        -1.4397637,\n        -1.180208,\n        -1.9746033,\n        -1.0095749,\n        -1.1477605,\n        -4.054302,\n        -1.5409443,\n        -1.0316113,\n        -5.2108626,\n        -3.1235392,\n        -1.0136346,\n        -4.280433,\n        -1.3601183,\n        -3.8717458,\n        -2.4440918,\n        -4.286065,\n        -5.50645,\n        -2.420915,\n        -1.6485515,\n        -2.066975,\n        -3.7196562,\n        -2.0036898,\n        -1.4285997,\n        -3.475406,\n        -3.0394547,\n        -2.7931194,\n        -1.4155692,\n        -3.79178,\n        -5.0270667,\n        -2.0082972,\n        -1.690726,\n        -2.5187201,\n        -1.95259,\n        -3.773377,\n        -2.530886,\n        -2.3352246,\n        -2.2920215,\n        -2.7772734,\n        -1.3019248,\n        -2.7455304,\n        -3.8654735,\n        -4.183722,\n        -1.9184648,\n        -2.168456,\n        -2.1282635,\n        -3.9138813,\n        -1.7724588,\n        -1.7226524,\n        -1.1068506,\n        -1.533566,\n        -2.2625072,\n        -1.0837358,\n        -2.4536111,\n        -1.4014043,\n        -1.1351576,\n        -1.6732528,\n        -1.6587623,\n        -3.43352,\n        -3.4383984,\n        -3.6564918,\n        -1.3637918,\n        -2.2445352,\n        -5.1128774,\n        -3.1354551,\n        -2.1848104,\n        -1.7619399,\n        -1.5470188\n      ],\n      \"pointIndex\": [\n        0,\n        503,\n        227,\n        90623523,\n        26277715,\n        80852668,\n        96154707,\n        16002266,\n        21880427,\n        115108,\n        28454336,\n        115537322,\n        58754191,\n        101519352,\n        45299020,\n        20168772,\n        63804658,\n        71557155,\n        80167543,\n        935797,\n        115024005,\n        32761132,\n        34830558,\n        16272046,\n        19323507,\n        94634636,\n        3442190,\n        49227875,\n        52614915,\n        87997601,\n        47361516,\n        71328471,\n        11621999,\n        21571484,\n        126664324,\n        29477886,\n        91369324,\n        97478496,\n        112493209,\n        118859279,\n        14477878,\n        103491542,\n        100150288,\n        121802827,\n        1599154,\n        37090009,\n        39107810,\n        40467407,\n        17810789,\n        42202527,\n        43379152,\n        74173352,\n        22488157,\n        60973194,\n        77940776,\n        75755120,\n        54860923,\n        72404857,\n        52348407,\n        69212122,\n        65407855,\n        100722638,\n        116220869,\n        24021941,\n        75225950,\n        105001139,\n        68756504,\n        82152011,\n        6260319,\n        93904377,\n        27998054,\n        112965178,\n        106109504,\n        105443757,\n        48993265,\n        116075177,\n        119873334,\n        126231308,\n        253639\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 227,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -1388995391054092274\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.31804442,\n        -0.43489292,\n        -0.42907506,\n        -0.4363616,\n        -0.45597282,\n        -0.46680713,\n        -0.46190825,\n        -0.5677014,\n        -0.4563058,\n        -0.6166242,\n        -0.5039146,\n        -0.6246354,\n        -0.4707326,\n        -0.47779357,\n        -0.49204805,\n        -0.64231163,\n        -0.6764985,\n        -0.46582282,\n        -0.51274574,\n        -0.6255991,\n        -0.6194547,\n        -0.5232377,\n        -0.63157463,\n        -0.6706966,\n        -0.63934845,\n        -0.5277276,\n        -0.48863217,\n        -0.49394917,\n        -0.4862815,\n        -1.0154939,\n        -0.64515084,\n        -0.7793933,\n        -0.6908473,\n        -0.7567321,\n        -0.7171721,\n        -0.46691778,\n        -0.8643776,\n        -0.51696473,\n        -0.84084165,\n        -0.6723087,\n        -1.7153779,\n        -0.62704635,\n        -0.9834292,\n        -0.6648248,\n        -0.6621618,\n        -0.82280827,\n        -0.82331115,\n        -0.679924,\n        -1.0636495,\n        -0.6468792,\n        -0.8472379,\n        -0.5683926,\n        -0.61848426,\n        -0.9037221,\n        -0.49113053,\n        -0.638287,\n        -0.5134129,\n        -0.88270605,\n        -0.5657373,\n        -1.0913377,\n        -1.5473504,\n        -0.8016271,\n        -0.75376856,\n        -1.2727239,\n        -1.094562,\n        -0.7468163,\n        -0.72751355,\n        -0.81397045,\n        -0.93953127,\n        -0.94911826,\n        -1.8311458,\n        -1.4125476,\n        -0.5176985,\n        -0.87438446,\n        -1.5910777,\n        -0.9631172,\n        -0.62825584,\n        -0.92554176,\n        -1.268599,\n        -1.4888319,\n        -1.1603714,\n        -1.9172816,\n        -1.8976915,\n        -0.7566482,\n        -0.62849605,\n        -1.0634012,\n        -1.5461874,\n        -2.2122335,\n        -0.96318877,\n        -0.95302933,\n        -0.94853485,\n        -0.9550196,\n        -1.105756,\n        -0.8768027,\n        -1.3013245,\n        -0.70743793,\n        -1.454751,\n        -1.20427,\n        -1.0981193,\n        -0.9563045,\n        -0.75259453,\n        -1.0261632,\n        -0.9617375,\n        -0.5910247,\n        -0.9938949,\n        -0.99083114,\n        -0.6719092,\n        -0.9257738,\n        -1.8758082,\n        -1.348569,\n        -0.65144736,\n        -0.71926266,\n        -0.78749126,\n        -1.4659787,\n        -0.6161229,\n        -0.9727178,\n        -0.8877861,\n        -1.0888059,\n        -3.7083657,\n        -1.3748552,\n        -2.400187,\n        -2.0805056,\n        -3.7978256,\n        -0.9706547,\n        -1.2985879,\n        -1.1572261,\n        -1.219806,\n        -1.541234,\n        -1.8615499,\n        -1.7287426,\n        -1.1255622,\n        -2.70784,\n        -1.9170482,\n        -1.4877199,\n        -0.9863343,\n        -2.0110765,\n        -4.1267276,\n        -1.8231001,\n        -1.3878483,\n        -2.3702366,\n        -1.6555223,\n        -1.9235884,\n        -1.8364646,\n        -1.6715032,\n        -2.3337338,\n        -1.5348499,\n        -1.0097706,\n        -2.8215172,\n        -3.0130804,\n        -1.6238242,\n        -2.696633,\n        -2.2997658,\n        -4.003807,\n        -1.4826674,\n        -2.679388,\n        -1.0749161,\n        -1.9095988,\n        -1.6551454,\n        -4.2207875,\n        -3.1003954,\n        -1.5809857,\n        -1.9965923,\n        -4.100097,\n        -3.2477396,\n        -3.553958,\n        -2.510146,\n        -2.1376574,\n        -4.343759,\n        -2.1196232,\n        -1.479722,\n        -1.9746994,\n        -1.1078998,\n        -1.1593446,\n        -2.383345,\n        -2.3589444,\n        -3.51402,\n        -2.245927,\n        -3.295619,\n        -1.8285325,\n        -1.9398905,\n        -1.5956552,\n        -3.3970203,\n        -1.0234499,\n        -2.2899134,\n        -3.5824833,\n        -7.324093,\n        -1.2620987,\n        -1.6717616,\n        -1.4615635,\n        -1.7714953,\n        -4.0805354,\n        -1.5767369,\n        -0.87090784,\n        -3.132244,\n        -3.5286536,\n        -1.3311883,\n        -1.6633921,\n        -1.9951661,\n        -1.3551797,\n        -1.4291936,\n        -1.3036424,\n        -1.4725003,\n        -1.4442756,\n        -2.8375487,\n        -3.914979,\n        -1.1315118,\n        -3.6979558,\n        -1.0114822,\n        -0.91020817,\n        -2.0747359,\n        -2.1834383,\n        -4.4955087,\n        -1.40393,\n        -1.6697918,\n        -0.9387991,\n        -1.2710674,\n        -1.8251337,\n        -2.156713,\n        -3.0485265,\n        -1.4951202,\n        -2.7489276,\n        -3.3636158,\n        -1.2925293,\n        -1.7190237,\n        -1.1842874,\n        -1.8158754,\n        -1.7622224,\n        -2.1716104,\n        -2.079569,\n        -3.835428,\n        -1.0511314,\n        -2.020286,\n        -1.0010629,\n        -1.6464177,\n        -2.954303,\n        -3.444575\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        236,\n        93894141,\n        57235403,\n        51292171,\n        29464013,\n        37198160,\n        20519697,\n        54427788,\n        84068462,\n        106255160,\n        1774023,\n        40074331,\n        4427759,\n        3079778,\n        85663388,\n        63804879,\n        12519718,\n        114932833,\n        98378907,\n        15180335,\n        36213183,\n        37758740,\n        119174445,\n        105153785,\n        46290997,\n        62143965,\n        41946504,\n        52128933,\n        23058516,\n        59278899,\n        2045518,\n        69088865,\n        72568272,\n        81874749,\n        123341018,\n        96074422,\n        31051790,\n        105784766,\n        60160276,\n        127761926,\n        2368005,\n        117854012,\n        38401570,\n        73269329,\n        24952653,\n        63326851,\n        106430149,\n        47568139,\n        19470288,\n        1875865,\n        48137184,\n        47189058,\n        109022512,\n        54731519,\n        11246436,\n        124472607,\n        101606598,\n        56372736,\n        57755438,\n        58921712,\n        105448807,\n        64277816,\n        86411819,\n        26917341,\n        652065,\n        115585707,\n        86867912,\n        28333852,\n        115424301,\n        91242285,\n        6613002,\n        96842457,\n        2992981,\n        116744267,\n        32198787,\n        112944186,\n        117543238,\n        123927971,\n        18612777,\n        207553\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 236,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -1996687561581955494\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5846756,\n        -0.58689344,\n        -0.60667974,\n        -0.6030738,\n        -0.5918051,\n        -0.6205509,\n        -0.61719275,\n        -0.6469364,\n        -0.6475224,\n        -0.65886086,\n        -0.618924,\n        -0.7007284,\n        -0.6607422,\n        -0.6618274,\n        -0.82475907,\n        -0.73422694,\n        -0.6774076,\n        -0.75282896,\n        -1.0639298,\n        -0.8307625,\n        -0.88619107,\n        -0.65084374,\n        -0.7746913,\n        -0.8276962,\n        -0.73104393,\n        -0.7153365,\n        -0.6641185,\n        -0.69109845,\n        -0.78577524,\n        -0.8427952,\n        -1.0567325,\n        -1.8381795,\n        -0.73889744,\n        -0.92306626,\n        -0.73201686,\n        -0.80058616,\n        -0.9315472,\n        -1.1340989,\n        -1.1253422,\n        -1.049055,\n        -1.1059141,\n        -1.3064846,\n        -1.0086405,\n        -0.92371744,\n        -0.6526815,\n        -0.7937004,\n        -0.8038962,\n        -1.2402302,\n        -0.8927977,\n        -0.7664857,\n        -0.8206795,\n        -1.4141115,\n        -0.7470262,\n        -0.717384,\n        -0.7282268,\n        -0.7670583,\n        -0.71265644,\n        -1.056092,\n        -0.94361395,\n        -0.95633894,\n        -1.4659557,\n        -1.0672771,\n        -3.3476443,\n        -3.6351054,\n        -1.8766991,\n        -1.0499516,\n        -0.8640289,\n        -1.3418787,\n        -1.6710471,\n        -1.0518789,\n        -1.3284807,\n        -1.0672903,\n        -1.2984877,\n        -1.4317946,\n        -1.2819399,\n        -1.1776006,\n        -2.0117435,\n        -1.1909988,\n        -1.6325643,\n        -1.3755174,\n        -1.8467388,\n        -1.1327515,\n        -1.5188427,\n        -1.3899642,\n        -1.6420921,\n        -1.043407,\n        -1.6026639,\n        -1.138417,\n        -0.97101223,\n        -1.271242,\n        -0.7115588,\n        -1.3566486,\n        -0.9234316,\n        -0.85723275,\n        -0.8918241,\n        -1.5019754,\n        -1.6031859,\n        -1.1208738,\n        -1.133076,\n        -1.1482856,\n        -0.82539636,\n        -0.97691697,\n        -0.90151674,\n        -1.9680749,\n        -1.5732255,\n        -0.97369736,\n        -1.610725,\n        -1.5830237,\n        -1.2785949,\n        -1.0099097,\n        -0.8909851,\n        -0.77351177,\n        -1.0141395,\n        -0.7269804,\n        -1.4642861,\n        -2.7214887,\n        -1.4997181,\n        -14.602708,\n        -1.403811,\n        -1.4172581,\n        -1.310325,\n        -1.5547988,\n        -2.1956336,\n        -3.7234614,\n        -1.7775134,\n        -3.4160905,\n        -4.9316273,\n        -4.792233,\n        -4.167746,\n        -2.1372902,\n        -2.0618155,\n        -1.0708857,\n        -2.642648,\n        -1.2613289,\n        -1.8870069,\n        -1.3915552,\n        -4.009344,\n        -1.8726304,\n        -1.9056902,\n        -2.2972603,\n        -2.2165756,\n        -2.9336991,\n        -2.7637823,\n        -1.2901676,\n        -1.5869136,\n        -1.9698185,\n        -2.1646268,\n        -2.960697,\n        -2.566998,\n        -1.4355865,\n        -2.5717368,\n        -2.0413668,\n        -1.290169,\n        -2.516158,\n        -2.759129,\n        -2.0104089,\n        -3.2819536,\n        -1.6685455,\n        -2.3512256,\n        -2.0406234,\n        -1.4104943,\n        -3.3107772,\n        -5.470009,\n        -3.6898227,\n        -2.2685525,\n        -2.7050624,\n        -3.1616924,\n        -2.3806925,\n        -2.7841287,\n        -1.9129984,\n        -3.2932487,\n        -1.6767087,\n        -1.1507784,\n        -2.3390803,\n        -4.5349946,\n        -3.1582544,\n        -1.7180965,\n        -5.6086135,\n        -2.3687649,\n        -2.3038986,\n        -1.8158605,\n        -3.0817351,\n        -2.2010934,\n        -1.9180917,\n        -1.4120785,\n        -1.3320959,\n        -3.7883961,\n        -1.5549222,\n        -0.9354667,\n        -1.0546124,\n        -1.2253877,\n        -2.1193779,\n        -1.5984664,\n        -4.7134676,\n        -1.7268164,\n        -2.1174202,\n        -1.297949,\n        -1.8061957,\n        -1.2423681,\n        -2.1758392,\n        -1.4506269,\n        -2.4918299,\n        -0.90987223,\n        -2.2017498,\n        -1.0510653,\n        -1.9787481,\n        -2.1033976,\n        -2.4857621,\n        -3.798583,\n        -2.7556937,\n        -2.8681157,\n        -3.3335586,\n        -1.1036271,\n        -2.1468067,\n        -1.7182089,\n        -9.625127,\n        -1.7497753,\n        -1.3092595,\n        -1.6229994,\n        -1.8266094,\n        -1.1855419,\n        -1.1022376,\n        -2.5157192,\n        -1.8676955,\n        -3.8111587,\n        -2.3706863,\n        -1.3407114,\n        -1.2293477\n      ],\n      \"pointIndex\": [\n        0,\n        502,\n        228,\n        85417915,\n        87177656,\n        52405554,\n        24939584,\n        81211067,\n        63171332,\n        68876814,\n        88857761,\n        107085546,\n        50678915,\n        43230496,\n        51144720,\n        9727721,\n        73492326,\n        27971076,\n        84051766,\n        100252284,\n        33351039,\n        122947737,\n        10429343,\n        109197430,\n        8155808,\n        46643364,\n        2120434,\n        112945328,\n        84785397,\n        88789216,\n        64828394,\n        23919421,\n        11277540,\n        77949708,\n        26985267,\n        94800518,\n        99115340,\n        102070153,\n        6282878,\n        50070055,\n        37438927,\n        35424417,\n        62523043,\n        68161840,\n        4127142,\n        5313773,\n        43325589,\n        38238130,\n        47910376,\n        66853413,\n        28640695,\n        22449973,\n        65688760,\n        55518842,\n        10740481,\n        58487096,\n        96556615,\n        23345688,\n        5188696,\n        11772972,\n        95503364,\n        115673175,\n        125130528,\n        14571204,\n        79344496,\n        82642712,\n        54565943,\n        98714084,\n        22037788,\n        92278340,\n        97731258,\n        85795585,\n        98369318,\n        103507242,\n        107488494,\n        110471801,\n        116862439,\n        123458410,\n        127258485\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 228,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -128508364556705108\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.630898,\n        -0.6477811,\n        -0.63280094,\n        -0.6948641,\n        -0.6490363,\n        -0.6381175,\n        -0.70697945,\n        -0.7485131,\n        -0.7632588,\n        -0.8179374,\n        -0.6676431,\n        -0.6843537,\n        -0.6571992,\n        -0.7652821,\n        -0.7276874,\n        -0.7827385,\n        -0.77456915,\n        -0.7726149,\n        -0.77507704,\n        -0.9687915,\n        -1.0675865,\n        -0.7493418,\n        -0.88513535,\n        -0.6983723,\n        -0.8357457,\n        -0.7440659,\n        -0.69447714,\n        -0.77738136,\n        -1.0133171,\n        -0.87642634,\n        -0.857812,\n        -1.0985663,\n        -1.9370906,\n        -1.2371792,\n        -1.0567256,\n        -1.0878961,\n        -0.80589247,\n        -0.7802553,\n        -0.9365398,\n        -0.9947,\n        -1.1050628,\n        -1.1739084,\n        -1.0834794,\n        -1.295832,\n        -0.79162145,\n        -0.94784623,\n        -0.90933746,\n        -0.8757789,\n        -0.9412581,\n        -0.9697154,\n        -1.4111537,\n        -0.77407503,\n        -0.76541466,\n        -0.7894012,\n        -0.7536822,\n        -0.8625097,\n        -1.4588612,\n        -1.042716,\n        -1.2608161,\n        -1.3634186,\n        -1.0306897,\n        -1.1214379,\n        -0.93017733,\n        -1.5569626,\n        -1.2675877,\n        -2.3300815,\n        -2.037261,\n        -1.3787063,\n        -2.078333,\n        -1.4332216,\n        -2.1206706,\n        -1.2226236,\n        -1.1829647,\n        -0.8473785,\n        -1.4364246,\n        -1.0885451,\n        -1.7373513,\n        -1.0789704,\n        -1.4983333,\n        -1.2914145,\n        -1.1032754,\n        -1.6687708,\n        -2.7030573,\n        -1.1990012,\n        -1.7063648,\n        -2.1159866,\n        -2.342888,\n        -1.6727512,\n        -1.564358,\n        -1.372533,\n        -0.9340407,\n        -0.99187064,\n        -1.2056274,\n        -1.0800625,\n        -1.6576957,\n        -1.3551583,\n        -1.0228239,\n        -1.7431831,\n        -1.286001,\n        -1.1640606,\n        -1.6876549,\n        -1.7610623,\n        -1.6307021,\n        -0.8457055,\n        -0.8424614,\n        -1.2389152,\n        -1.2091107,\n        -1.1098007,\n        -1.0474402,\n        -0.86699677,\n        -1.1676563,\n        -0.87737286,\n        -3.3185194,\n        -1.7179166,\n        -1.6006385,\n        -2.3141143,\n        -3.4334073,\n        -2.3130846,\n        -2.0194337,\n        -1.8243642,\n        -1.720521,\n        -1.9310031,\n        -1.4816298,\n        -2.4737897,\n        -1.7918673,\n        -1.5567455,\n        -2.0214505,\n        -2.3170557,\n        -1.7804987,\n        -2.2804224,\n        -3.51206,\n        -2.5557177,\n        -3.2283506,\n        -2.9984992,\n        -2.7410605,\n        -1.5036118,\n        -2.0997918,\n        -2.5839639,\n        -2.6720161,\n        -1.7679182,\n        -1.5598592,\n        -2.9722614,\n        -2.615845,\n        -2.8201096,\n        -2.10161,\n        -4.986848,\n        -2.4506004,\n        -1.3532511,\n        -1.1694721,\n        -1.619396,\n        -1.6742814,\n        -2.174175,\n        -4.5054655,\n        -2.4169295,\n        -2.1824694,\n        -1.1939554,\n        -3.3943212,\n        -2.303942,\n        -2.0812685,\n        -3.2173884,\n        -2.0529165,\n        -2.0515125,\n        -2.9678133,\n        -1.671063,\n        -1.9606991,\n        -2.8123155,\n        -5.5252957,\n        -2.4556065,\n        -1.4970556,\n        -2.6249,\n        -2.0755422,\n        -3.0332034,\n        -3.7715864,\n        -2.7577121,\n        -3.0361726,\n        -2.2537212,\n        -3.0970392,\n        -1.8873868,\n        -2.558867,\n        -3.7742202,\n        -2.937425,\n        -2.6971843,\n        -1.9580126,\n        -2.1035142,\n        -1.6116444,\n        -1.6787056,\n        -4.688275,\n        -2.2149107,\n        -1.08943,\n        -1.8120431,\n        -1.8196094,\n        -2.3866353,\n        -2.839746,\n        -2.0566387,\n        -1.9034048,\n        -4.365361,\n        -2.675564,\n        -1.5361451,\n        -4.6711106,\n        -3.8471017,\n        -1.9594274,\n        -2.5981467,\n        -3.6338663,\n        -4.5456915,\n        -2.0403075,\n        -2.2516203,\n        -2.0036118,\n        -3.7112646,\n        -0.85280895,\n        -4.2003527,\n        -1.4760265,\n        -2.7323143,\n        -1.7102116,\n        -2.1479855,\n        -1.9858385,\n        -2.095076,\n        -3.0762653,\n        -2.08245,\n        -2.035378,\n        -1.4645298,\n        -2.5723915,\n        -2.4020085,\n        -2.0320628,\n        -3.3489451,\n        -0.9732658\n      ],\n      \"pointIndex\": [\n        0,\n        501,\n        225,\n        92365140,\n        12468747,\n        20753240,\n        92141865,\n        95005479,\n        68640148,\n        62615683,\n        103838480,\n        66742972,\n        116439537,\n        104268231,\n        8858295,\n        54224740,\n        65521261,\n        74614563,\n        105769197,\n        104643027,\n        115637626,\n        111636229,\n        1331364,\n        64087795,\n        96602030,\n        98980111,\n        65777520,\n        89080983,\n        58892339,\n        22644552,\n        94256497,\n        69757173,\n        2334587,\n        79822833,\n        50828536,\n        109548796,\n        101251341,\n        30702731,\n        114884835,\n        122206416,\n        86253129,\n        35635296,\n        36615887,\n        29815484,\n        17668545,\n        41848385,\n        33060378,\n        118682512,\n        94592436,\n        9165700,\n        48507835,\n        49743418,\n        114459716,\n        24705761,\n        53719230,\n        57642889,\n        72129372,\n        118372695,\n        23655649,\n        123107717,\n        69213579,\n        75295761,\n        6699983,\n        116981543,\n        26403286,\n        80827840,\n        126197891,\n        88762477,\n        91658553,\n        107618253,\n        103016808,\n        105082782,\n        103281533,\n        110847554,\n        113373220,\n        32790595,\n        121450923,\n        124577307\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 225,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 7222263662094774283\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.4580384,\n        -0.46075743,\n        -0.46211502,\n        -0.4929041,\n        -0.4730218,\n        -0.4747222,\n        -0.46597216,\n        -0.66657495,\n        -0.56379455,\n        -0.47808835,\n        -0.5287002,\n        -0.50128,\n        -0.5075093,\n        -0.4686614,\n        -0.73368955,\n        -0.708327,\n        -0.673227,\n        -0.57508063,\n        -0.7542673,\n        -0.5368351,\n        -0.47845197,\n        -0.5562689,\n        -0.72655267,\n        -0.5083273,\n        -0.503371,\n        -0.5842113,\n        -0.6419362,\n        -0.5932558,\n        -0.67286736,\n        -0.7619549,\n        -0.8213134,\n        -0.90004736,\n        -0.7194303,\n        -0.87810415,\n        -0.9601538,\n        -0.6465579,\n        -0.9196468,\n        -0.89380914,\n        -0.8300188,\n        -1.2643598,\n        -0.76223713,\n        -0.858839,\n        -0.5496779,\n        -0.59502965,\n        -0.6356277,\n        -0.864928,\n        -0.9490388,\n        -0.8488594,\n        -0.65086263,\n        -0.7511573,\n        -0.6592746,\n        -0.79392016,\n        -0.7518688,\n        -0.646706,\n        -0.7839151,\n        -0.64437765,\n        -1.0888321,\n        -3.038991,\n        -1.1950477,\n        -0.9051793,\n        -1.2800106,\n        -0.8435553,\n        -1.2381986,\n        -1.7287918,\n        -1.0798482,\n        -0.764986,\n        -1.1250908,\n        -1.7044045,\n        -2.1042469,\n        -1.9712648,\n        -1.036632,\n        -2.0903056,\n        -1.3683456,\n        -1.0508043,\n        -1.4783676,\n        -1.0530821,\n        -0.934944,\n        -1.250601,\n        -2.1660426,\n        -1.6160846,\n        -2.042257,\n        -0.9940596,\n        -1.1671561,\n        -1.3778867,\n        -1.1717991,\n        -0.7084441,\n        -0.69722295,\n        -1.2867434,\n        -0.65041363,\n        -1.3847234,\n        -0.75861377,\n        -0.8957585,\n        -1.3514508,\n        -0.9746915,\n        -1.086659,\n        -1.1088036,\n        -1.0807667,\n        -0.77314854,\n        -1.0591305,\n        -0.9778022,\n        -1.0392236,\n        -1.8265591,\n        -0.77029204,\n        -0.8249063,\n        -1.866333,\n        -0.9302765,\n        -1.1593188,\n        -0.84130716,\n        -0.7519833,\n        -1.070171,\n        -1.1213996,\n        -0.8770907,\n        -0.66014594,\n        -1.7014613,\n        -1.1222742,\n        -3.6753109,\n        -3.2820745,\n        -2.1890109,\n        -2.7490132,\n        -2.0823503,\n        -2.1744382,\n        -2.1111338,\n        -1.7354052,\n        -2.6427343,\n        -1.5783647,\n        -2.3191943,\n        -2.2987525,\n        -4.551017,\n        -2.6797552,\n        -1.1783116,\n        -1.1015216,\n        -2.0225565,\n        -1.0180598,\n        -1.4789,\n        -1.1881534,\n        -2.7123027,\n        -2.309295,\n        -2.6841495,\n        -2.2632222,\n        -4.495556,\n        -4.797015,\n        -2.9508445,\n        -2.761931,\n        -2.3967311,\n        -3.2577622,\n        -2.7722323,\n        -4.411268,\n        -1.7627251,\n        -1.7279713,\n        -2.6112216,\n        -4.2717066,\n        -2.8829312,\n        -1.2101817,\n        -3.985402,\n        -2.2695396,\n        -2.272051,\n        -1.633945,\n        -5.034854,\n        -3.8012376,\n        -3.3353143,\n        -2.1721678,\n        -2.0492704,\n        -2.2347207,\n        -2.3234985,\n        -1.1148325,\n        -2.0454772,\n        -2.0794945,\n        -2.824807,\n        -1.6930101,\n        -2.8448656,\n        -1.2729511,\n        -1.3998069,\n        -0.7758805,\n        -1.3747265,\n        -0.7600807,\n        -1.4784952,\n        -2.373021,\n        -4.0120835,\n        -1.2613412,\n        -2.7648678,\n        -4.0832334,\n        -2.46164,\n        -1.9434028,\n        -0.9132536,\n        -1.0332861,\n        -2.0537302,\n        -4.2940426,\n        -1.770343,\n        -0.9790258,\n        -4.511891,\n        -2.8161838,\n        -1.1884514,\n        -1.6424551,\n        -1.822277,\n        -2.8266153,\n        -1.1383984,\n        -0.80848265,\n        -2.7638896,\n        -1.6277307,\n        -1.055982,\n        -2.2933555,\n        -1.3826586,\n        -2.1459916,\n        -2.1754184,\n        -3.1542444,\n        -2.6393998,\n        -3.4686902,\n        -2.8534982,\n        -1.0647641,\n        -3.4518557,\n        -3.361874,\n        -1.1547184,\n        -2.4933126,\n        -2.2956147,\n        -1.3746315,\n        -0.8468485,\n        -1.3102388,\n        -2.1647186,\n        -2.7558634,\n        -1.21418,\n        -3.544629,\n        -2.6128135,\n        -1.7413558,\n        -2.2080815,\n        -1.8403822,\n        -4.518899,\n        -1.2705089,\n        -2.5194619\n      ],\n      \"pointIndex\": [\n        1,\n        503,\n        228,\n        109434413,\n        108902718,\n        45375688,\n        86832624,\n        61475015,\n        46389286,\n        22559253,\n        79620416,\n        99865252,\n        3332832,\n        76968655,\n        45642799,\n        26412718,\n        124991254,\n        75975371,\n        87432558,\n        84846355,\n        101770610,\n        14411874,\n        35292438,\n        117281552,\n        8002730,\n        17329378,\n        18437826,\n        18993252,\n        106181391,\n        54209581,\n        4957861,\n        108094925,\n        23961322,\n        86705572,\n        25729239,\n        115529408,\n        77332542,\n        103448472,\n        119617281,\n        115078757,\n        127247486,\n        46567368,\n        116666879,\n        52781221,\n        37853085,\n        73956241,\n        113961039,\n        41589895,\n        105665277,\n        75178451,\n        1978930,\n        118698118,\n        2148348,\n        81224634,\n        20280151,\n        73821485,\n        21476629,\n        58807825,\n        9382088,\n        57233256,\n        72226598,\n        99300621,\n        111957991,\n        80990229,\n        69990459,\n        71743878,\n        2732081,\n        78838502,\n        27695186,\n        98383303,\n        62160363,\n        119084073,\n        93542763,\n        98105998,\n        99641668,\n        103936523,\n        110020651,\n        116195474,\n        110564406\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 228,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 7872595278026300545\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5318544,\n        -0.5526347,\n        -0.5349425,\n        -0.648295,\n        -0.5679767,\n        -0.53884137,\n        -0.61517274,\n        -0.69902545,\n        -0.6633728,\n        -0.57753766,\n        -0.64800483,\n        -0.548599,\n        -0.54938823,\n        -0.6605117,\n        -0.86284494,\n        -0.71828693,\n        -0.81339324,\n        -0.81776756,\n        -0.931348,\n        -0.6920306,\n        -0.6435287,\n        -0.71530217,\n        -0.6555444,\n        -0.98792934,\n        -0.5638099,\n        -0.6063291,\n        -0.71361756,\n        -0.8526455,\n        -0.94601935,\n        -0.9070212,\n        -1.2568408,\n        -1.6926986,\n        -0.94654936,\n        -1.230797,\n        -0.9508877,\n        -1.2342145,\n        -1.5927098,\n        -1.0119296,\n        -1.0349593,\n        -0.77720755,\n        -0.9708845,\n        -0.8177801,\n        -0.7369408,\n        -0.925699,\n        -1.3079426,\n        -0.6662252,\n        -0.6679798,\n        -1.0225967,\n        -1.0462278,\n        -1.093949,\n        -0.61489826,\n        -0.7670343,\n        -0.6418647,\n        -0.8002868,\n        -0.7372775,\n        -0.8960303,\n        -1.1364943,\n        -0.9798754,\n        -1.1735563,\n        -1.3999515,\n        -1.0121135,\n        -1.5662241,\n        -1.4446868,\n        -1.7188863,\n        -1.9484583,\n        -1.6996074,\n        -1.1428131,\n        -1.5619653,\n        -1.2767718,\n        -1.7581125,\n        -2.8999724,\n        -1.5626452,\n        -1.7994468,\n        -2.3420005,\n        -1.8132316,\n        -1.3271576,\n        -1.1727203,\n        -1.6932975,\n        -1.093756,\n        -0.8861891,\n        -1.9243885,\n        -1.7915244,\n        -1.0730273,\n        -1.5481496,\n        -1.1812528,\n        -0.95917827,\n        -1.4939461,\n        -1.3916106,\n        -1.5121248,\n        -1.7567997,\n        -1.620493,\n        -0.7535965,\n        -0.9707958,\n        -1.8692814,\n        -0.7235233,\n        -1.0412691,\n        -1.3920215,\n        -1.1896292,\n        -1.3844485,\n        -1.7028971,\n        -1.2931386,\n        -0.7415553,\n        -1.7628093,\n        -0.8847007,\n        -0.9979066,\n        -0.6709767,\n        -1.7224689,\n        -0.8326914,\n        -0.8062148,\n        -0.87184334,\n        -0.7651507,\n        -0.99336547,\n        -1.0146102,\n        -2.5240374,\n        -1.830741,\n        -1.0107177,\n        -1.7176067,\n        -1.2612122,\n        -1.3724835,\n        -3.8971236,\n        -1.6958439,\n        -1.310176,\n        -1.26883,\n        -3.2852666,\n        -2.4117398,\n        -5.1487803,\n        -1.9918764,\n        -4.3635464,\n        -2.1841486,\n        -3.4227571,\n        -3.6512878,\n        -5.3473387,\n        -3.6426513,\n        -1.1749694,\n        -3.8084428,\n        -1.797575,\n        -3.917274,\n        -1.3197335,\n        -3.105423,\n        -1.9836197,\n        -3.3455346,\n        -3.5547278,\n        -2.9551923,\n        -2.1559243,\n        -3.201745,\n        -3.1743422,\n        -1.8750439,\n        -2.8562143,\n        -4.809888,\n        -2.040142,\n        -2.576594,\n        -2.1615236,\n        -2.6725488,\n        -3.3076863,\n        -1.9927764,\n        -3.0091994,\n        -2.5479405,\n        -1.5206244,\n        -1.3348264,\n        -1.2347344,\n        -1.0797039,\n        -2.975576,\n        -1.9721434,\n        -2.4176564,\n        -2.7389612,\n        -2.7175198,\n        -1.5420492,\n        -1.8847944,\n        -1.6513671,\n        -2.5460448,\n        -2.1374488,\n        -2.0714803,\n        -3.0631979,\n        -2.056451,\n        -2.042578,\n        -1.50944,\n        -1.6670105,\n        -4.353,\n        -1.9820719,\n        -2.9284163,\n        -1.8892688,\n        -4.1990786,\n        -2.1729066,\n        -1.5507696,\n        -2.7340138,\n        -1.0775504,\n        -1.3502195,\n        -2.0504923,\n        -5.231181,\n        -1.5180875,\n        -1.3710163,\n        -1.2219802,\n        -1.7035313,\n        -1.8204783,\n        -2.402197,\n        -1.8848188,\n        -1.4722091,\n        -1.6576673,\n        -1.3964651,\n        -6.033809,\n        -3.7256346,\n        -1.6946881,\n        -2.1770961,\n        -1.2699507,\n        -1.0702896,\n        -2.541309,\n        -5.0751104,\n        -2.4699755,\n        -2.5016696,\n        -1.0325359,\n        -2.0575097,\n        -1.0528655,\n        -0.79915005,\n        -3.4902232,\n        -2.510019,\n        -2.2903266,\n        -1.1804458,\n        -0.82398766,\n        -6.061527,\n        -2.3614256,\n        -0.8843897,\n        -1.9906607,\n        -5.030266,\n        -5.0068526,\n        -1.2827667,\n        -1.1882246,\n        -4.0000052,\n        -2.9999995,\n        -2.5603006,\n        -2.2915864\n      ],\n      \"pointIndex\": [\n        3,\n        504,\n        230,\n        115347595,\n        114781716,\n        88385758,\n        82551603,\n        81082979,\n        43380974,\n        10194344,\n        46512063,\n        110078896,\n        94415135,\n        17090150,\n        1091252,\n        8607563,\n        93121216,\n        66403817,\n        74732784,\n        88677344,\n        108310888,\n        81147254,\n        53245757,\n        37363390,\n        19078022,\n        95335853,\n        69450267,\n        47217789,\n        99847234,\n        96872649,\n        76836844,\n        105708382,\n        64047755,\n        68216567,\n        106743551,\n        79846973,\n        86907060,\n        96454124,\n        104808962,\n        116530184,\n        243950,\n        110187486,\n        8357889,\n        69643767,\n        47947051,\n        34809077,\n        25305060,\n        39642089,\n        97101121,\n        41094885,\n        42168413,\n        44000776,\n        19255895,\n        1606917,\n        100649211,\n        61085956,\n        53954022,\n        36935615,\n        4489663,\n        58836135,\n        23305165,\n        64421908,\n        96644721,\n        88863796,\n        45998859,\n        84197218,\n        74033379,\n        77317836,\n        20708815,\n        81693082,\n        85518041,\n        13072922,\n        109596508,\n        98983742,\n        6003317,\n        108464322,\n        115642690,\n        120441323,\n        73830126,\n        252001\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 230,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -7386051442361988795\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5108797,\n        -0.53291243,\n        -0.5210462,\n        -0.54066086,\n        -0.62747353,\n        -0.5228788,\n        -0.53056675,\n        -0.857135,\n        -0.5431626,\n        -0.6728934,\n        -0.83623904,\n        -0.5277067,\n        -0.54691744,\n        -0.5597804,\n        -0.5415357,\n        -1.0465266,\n        -0.9172645,\n        -0.6498583,\n        -0.5838935,\n        -0.7679667,\n        -0.83578706,\n        -0.86347896,\n        -0.8489478,\n        -0.58355063,\n        -0.5913145,\n        -0.67941654,\n        -0.5532097,\n        -0.57800925,\n        -0.6950984,\n        -1.0079625,\n        -0.93897146,\n        -1.0548553,\n        -1.0986328,\n        -0.990683,\n        -0.95704466,\n        -0.927313,\n        -0.707812,\n        -0.8876795,\n        -0.58693117,\n        -0.87326914,\n        -0.78118306,\n        -1.0092329,\n        -0.90121645,\n        -0.96073735,\n        -1.1489044,\n        -0.85746175,\n        -1.0691592,\n        -0.9054018,\n        -0.94487154,\n        -0.5967019,\n        -0.7073035,\n        -1.0018818,\n        -1.0238538,\n        -0.6573265,\n        -0.65281594,\n        -0.60852957,\n        -0.8033723,\n        -1.239034,\n        -1.9295261,\n        -3.1623762,\n        -1.8403192,\n        -1.3301795,\n        -2.7431705,\n        -1.4577744,\n        -1.1961795,\n        -1.1853992,\n        -2.1535192,\n        -1.0238626,\n        -2.1560311,\n        -1.0290521,\n        -0.9725088,\n        -1.5267935,\n        -1.254052,\n        -0.9736978,\n        -1.2086835,\n        -1.1516668,\n        -1.6799083,\n        -0.89718825,\n        -1.7337049,\n        -1.0380706,\n        -1.2615322,\n        -1.9453869,\n        -0.9431378,\n        -1.3229971,\n        -1.113262,\n        -0.9120092,\n        -0.940582,\n        -1.1862113,\n        -1.2428428,\n        -1.4028096,\n        -1.1804905,\n        -1.2219838,\n        -1.0842413,\n        -1.2987602,\n        -1.3231869,\n        -1.1256894,\n        -1.4694376,\n        -1.778964,\n        -1.2206122,\n        -0.7018415,\n        -0.7977892,\n        -1.0083444,\n        -0.80514693,\n        -1.1089602,\n        -1.1285571,\n        -1.4524838,\n        -1.9886453,\n        -1.9686787,\n        -0.8539721,\n        -1.1108284,\n        -1.7369735,\n        -0.75138754,\n        -1.2929288,\n        -2.8029969,\n        -1.8970212,\n        -2.9346006,\n        -3.1239185,\n        -2.3111954,\n        -4.161659,\n        -4.740618,\n        -3.930361,\n        -3.1946766,\n        -2.6492808,\n        -3.8204699,\n        -1.512944,\n        -3.6797109,\n        -3.3561983,\n        -2.0975733,\n        -2.804475,\n        -1.4382597,\n        -1.8145785,\n        -1.6634225,\n        -4.7425866,\n        -3.160053,\n        -3.949805,\n        -1.6202605,\n        -1.3382463,\n        -5.6329947,\n        -3.1294072,\n        -1.9707998,\n        -1.1553085,\n        -3.8239818,\n        -4.8079767,\n        -2.4293022,\n        -1.5714104,\n        -3.515261,\n        -2.4722693,\n        -1.2639914,\n        -1.5367815,\n        -2.372899,\n        -2.0205681,\n        -3.5453362,\n        -1.3925744,\n        -4.381768,\n        -2.2124214,\n        -2.1457138,\n        -1.6154038,\n        -2.1108005,\n        -8.22557,\n        -4.411373,\n        -1.0580919,\n        -1.2821277,\n        -1.3998395,\n        -2.4272609,\n        -2.3948019,\n        -0.96729654,\n        -1.3134754,\n        -1.676334,\n        -1.6012802,\n        -1.3334838,\n        -1.2328417,\n        -3.944815,\n        -2.678497,\n        -3.3102927,\n        -1.9348183,\n        -2.8151054,\n        -1.7924582,\n        -2.1709836,\n        -2.1577468,\n        -2.7874053,\n        -1.9171277,\n        -1.4504448,\n        -2.6907806,\n        -2.2421236,\n        -1.2558291,\n        -1.3776932,\n        -1.1472483,\n        -1.5318475,\n        -5.8697314,\n        -1.6595546,\n        -2.01325,\n        -1.4753066,\n        -3.3051827,\n        -2.9482744,\n        -1.6691211,\n        -3.6826656,\n        -2.018957,\n        -2.9914162,\n        -2.479594,\n        -2.6773448,\n        -0.7891612,\n        -1.6135672,\n        -1.5358614,\n        -1.0630399,\n        -2.6844327,\n        -2.140832,\n        -1.4059803,\n        -1.8687615,\n        -1.5211347,\n        -1.1563559,\n        -1.8074015,\n        -1.585874,\n        -2.905799,\n        -2.0970418,\n        -2.6970625,\n        -3.638173,\n        -2.7245076,\n        -1.0986912,\n        -1.3670276,\n        -2.4939995,\n        -1.2994958,\n        -3.5241516,\n        -1.7636971,\n        -1.6964252,\n        -1.2399389,\n        -1.4650214\n      ],\n      \"pointIndex\": [\n        0,\n        502,\n        226,\n        32292610,\n        109869361,\n        49676440,\n        38860639,\n        62592793,\n        600790,\n        23911739,\n        83647733,\n        116832183,\n        95541343,\n        17576862,\n        29948653,\n        57543406,\n        23404328,\n        71784850,\n        83143492,\n        92528201,\n        110518113,\n        14668457,\n        92928200,\n        332742,\n        103153798,\n        18665463,\n        121031463,\n        56018456,\n        1132292,\n        61525538,\n        66163859,\n        5447769,\n        89783462,\n        118557716,\n        6231473,\n        87963967,\n        114288691,\n        31571420,\n        6534378,\n        14235721,\n        35168244,\n        21282808,\n        37519649,\n        4883690,\n        102255530,\n        39341605,\n        8200693,\n        50840927,\n        45610825,\n        45886354,\n        48352851,\n        6983081,\n        53828108,\n        24397969,\n        3149000,\n        60837572,\n        59532798,\n        87757933,\n        65933620,\n        66841196,\n        24895074,\n        69969079,\n        109326827,\n        74783313,\n        77159216,\n        61306945,\n        82896750,\n        117164763,\n        110986551,\n        30674296,\n        91982112,\n        96851510,\n        29073451,\n        103960445,\n        13886321,\n        111283137,\n        33622972,\n        34148650,\n        425\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 226,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 8161128184677511835\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.40340668,\n        -0.529459,\n        -0.5161869,\n        -0.53543,\n        -0.5572265,\n        -0.559943,\n        -0.5261264,\n        -0.65634567,\n        -0.8289195,\n        -0.5882762,\n        -0.6718383,\n        -0.5602667,\n        -0.59636503,\n        -0.52978766,\n        -0.5676633,\n        -0.66157055,\n        -0.67849123,\n        -0.8783929,\n        -0.87770337,\n        -0.63287824,\n        -0.7088296,\n        -0.730397,\n        -0.6751855,\n        -0.67252594,\n        -0.61771494,\n        -0.62258834,\n        -0.8742302,\n        -0.54998684,\n        -0.53258646,\n        -0.5697075,\n        -0.6338611,\n        -0.7282499,\n        -0.71893036,\n        -0.69252646,\n        -0.6978233,\n        -1.083951,\n        -1.0749961,\n        -0.9492825,\n        -1.0267339,\n        -0.8309118,\n        -1.0968392,\n        -1.0346222,\n        -0.71960473,\n        -0.753241,\n        -0.82036847,\n        -0.68786734,\n        -0.95036894,\n        -0.7919302,\n        -0.7842412,\n        -0.78362364,\n        -0.69734085,\n        -0.7361724,\n        -0.63951087,\n        -0.9256614,\n        -1.3222324,\n        -0.557387,\n        -0.5765443,\n        -0.8541396,\n        -1.5424199,\n        -1.239235,\n        -0.6581758,\n        -0.6957259,\n        -0.71216434,\n        -1.2122377,\n        -1.3424256,\n        -0.8595487,\n        -0.93395984,\n        -0.91291904,\n        -0.99467903,\n        -1.0259969,\n        -0.83747417,\n        -1.1415584,\n        -1.844381,\n        -2.6260114,\n        -1.8964473,\n        -1.1886573,\n        -0.95166403,\n        -1.3016888,\n        -2.1763248,\n        -0.8594845,\n        -1.7636291,\n        -1.5953441,\n        -1.2344376,\n        -2.9971352,\n        -1.2809831,\n        -0.7776844,\n        -1.5833745,\n        -1.1377445,\n        -0.81377697,\n        -1.4259038,\n        -1.5685266,\n        -1.2828376,\n        -1.2463381,\n        -1.035027,\n        -1.023178,\n        -0.9225322,\n        -1.334532,\n        -1.2539599,\n        -0.8611026,\n        -1.0418408,\n        -1.2660669,\n        -1.0434586,\n        -0.82252276,\n        -0.9484263,\n        -0.7822511,\n        -0.7963581,\n        -0.94546413,\n        -1.4416524,\n        -1.972239,\n        -1.3854128,\n        -1.7213302,\n        -0.88175184,\n        -0.660204,\n        -1.0088569,\n        -0.6626971,\n        -1.4806819,\n        -3.8850646,\n        -3.4880378,\n        -2.0370426,\n        -1.7129023,\n        -2.4480736,\n        -0.9842973,\n        -1.1321272,\n        -1.3175576,\n        -1.7688314,\n        -0.99098766,\n        -0.8212509,\n        -2.069529,\n        -2.7128298,\n        -2.104214,\n        -1.9494729,\n        -1.5712934,\n        -1.9657266,\n        -2.8847177,\n        -2.0214329,\n        -1.1168336,\n        -1.2752433,\n        -1.5474607,\n        -2.0712857,\n        -1.228839,\n        -2.3795173,\n        -2.8112884,\n        -0.84520334,\n        -1.9762503,\n        -1.7711192,\n        -2.0245588,\n        -2.4114013,\n        -4.225437,\n        -5.1449366,\n        -3.5052965,\n        -3.2005363,\n        -1.7473437,\n        -4.5288105,\n        -1.3647547,\n        -1.6030688,\n        -3.399047,\n        -3.9606524,\n        -2.4988217,\n        -2.2541208,\n        -1.6598576,\n        -2.0318375,\n        -3.440064,\n        -2.4472992,\n        -2.583677,\n        -1.9249152,\n        -2.901985,\n        -2.3829575,\n        -3.055308,\n        -3.5783951,\n        -1.7583847,\n        -1.3392538,\n        -1.5853678,\n        -1.7856835,\n        -4.027946,\n        -2.5141406,\n        -2.4793181,\n        -1.6539936,\n        -1.8417726,\n        -1.4020761,\n        -1.6960694,\n        -1.6217222,\n        -1.7380404,\n        -1.8801169,\n        -2.5759354,\n        -2.688679,\n        -1.4906636,\n        -2.4896166,\n        -2.0299616,\n        -1.1732154,\n        -2.218882,\n        -1.5876522,\n        -2.8258562,\n        -2.2313547,\n        -1.8463587,\n        -3.1047606,\n        -1.3781078,\n        -1.4334736,\n        -2.1798456,\n        -2.3027706,\n        -2.6381938,\n        -2.3633974,\n        -1.6888646,\n        -2.6922355,\n        -3.9937582,\n        -1.8913554,\n        -3.3459861,\n        -1.9445951,\n        -2.7985828,\n        -1.6192544,\n        -2.2790425,\n        -1.6679081,\n        -2.0862288,\n        -1.6981969,\n        -2.1887875,\n        -1.3072184,\n        -1.5203125,\n        -2.3198156,\n        -4.1349745,\n        -3.2535048,\n        -4.6283674,\n        -2.5832548,\n        -3.1003091,\n        -1.7998267,\n        -1.9538168,\n        -0.9321133,\n        -1.3000219,\n        -0.6603427,\n        -1.417374,\n        -4.4221253,\n        -1.0785013,\n        -1.8408641,\n        -2.0598917\n      ],\n      \"pointIndex\": [\n        1,\n        504,\n        232,\n        93371543,\n        83193340,\n        51609995,\n        89399250,\n        58411525,\n        8723328,\n        20440944,\n        46893266,\n        122901962,\n        7356524,\n        43010919,\n        19179752,\n        70365646,\n        119511225,\n        76853214,\n        84102788,\n        95436273,\n        1216784,\n        124196634,\n        4094230,\n        39663861,\n        41923224,\n        21344652,\n        21685201,\n        43458670,\n        15386476,\n        60096538,\n        103046171,\n        23183276,\n        11319123,\n        45373015,\n        12355583,\n        77390034,\n        94445921,\n        99768692,\n        30435166,\n        117326285,\n        123597904,\n        3120255,\n        36522499,\n        16413966,\n        23712342,\n        50831074,\n        41869953,\n        31582697,\n        18123249,\n        45993707,\n        98907237,\n        50027373,\n        52003281,\n        76222963,\n        97052839,\n        55995355,\n        59301019,\n        41017833,\n        63884255,\n        71897870,\n        24273408,\n        71362132,\n        35199141,\n        46381904,\n        97006198,\n        81449733,\n        2703585,\n        123730816,\n        88570270,\n        28538466,\n        94720610,\n        88963368,\n        73814843,\n        102316085,\n        88128342,\n        110050359,\n        116313747,\n        118351755,\n        65273013,\n        127514189,\n        27\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 232,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 5911960361261524437\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.39453644,\n        -0.4091319,\n        -0.40276408,\n        -0.42154148,\n        -0.43336,\n        -0.40930772,\n        -0.49056512,\n        -0.4553714,\n        -0.6343011,\n        -0.43776572,\n        -0.46448955,\n        -0.49443203,\n        -0.49125093,\n        -0.55794704,\n        -0.52942365,\n        -0.5564705,\n        -0.65847504,\n        -0.69913536,\n        -0.75695574,\n        -0.71217597,\n        -0.60467654,\n        -0.49103096,\n        -0.57176715,\n        -0.5623783,\n        -0.5702394,\n        -0.5680598,\n        -0.52803683,\n        -0.5789208,\n        -0.62406635,\n        -0.5716927,\n        -1.0776572,\n        -0.74342746,\n        -0.7486334,\n        -0.69127804,\n        -0.6608637,\n        -0.9031974,\n        -0.72085214,\n        -0.769788,\n        -0.77804446,\n        -0.7316041,\n        -0.95129424,\n        -0.7048584,\n        -0.68752366,\n        -0.6274702,\n        -0.78758854,\n        -0.57621306,\n        -0.980954,\n        -0.7966236,\n        -0.70016164,\n        -0.9555895,\n        -0.79694474,\n        -0.8355656,\n        -0.6856815,\n        -0.567936,\n        -0.8354194,\n        -0.6369338,\n        -0.81904274,\n        -0.7119923,\n        -0.86386496,\n        -0.7636028,\n        -1.0274584,\n        -1.2511095,\n        -1.3548596,\n        -1.2078041,\n        -1.0715795,\n        -1.3628885,\n        -0.895939,\n        -0.79696196,\n        -1.0516008,\n        -0.7873496,\n        -0.7411404,\n        -1.0266874,\n        -1.6383885,\n        -0.7790249,\n        -0.7705543,\n        -1.0220681,\n        -1.0377313,\n        -0.95866215,\n        -1.0646464,\n        -1.2318766,\n        -0.8416069,\n        -1.0857205,\n        -1.3508493,\n        -1.0250691,\n        -0.7554236,\n        -1.2693433,\n        -0.7089957,\n        -1.5169703,\n        -0.68931425,\n        -0.92620236,\n        -0.8353271,\n        -0.82917625,\n        -0.62339437,\n        -0.99650085,\n        -1.5622008,\n        -1.2203263,\n        -1.1878941,\n        -1.4765059,\n        -2.4610686,\n        -1.033241,\n        -1.1396645,\n        -1.6011978,\n        -1.1930758,\n        -0.9267733,\n        -0.9729251,\n        -1.3341185,\n        -1.0385826,\n        -0.6454081,\n        -0.60673964,\n        -1.0060118,\n        -0.9427047,\n        -0.9003368,\n        -0.90673447,\n        -1.2340496,\n        -1.0492711,\n        -3.363109,\n        -2.0330606,\n        -1.0982486,\n        -2.7784495,\n        -1.5517746,\n        -2.2327852,\n        -1.1796428,\n        -2.6232302,\n        -2.162269,\n        -3.54792,\n        -1.6273329,\n        -1.846263,\n        -1.3899765,\n        -4.4950037,\n        -2.3274353,\n        -2.5995972,\n        -3.1184244,\n        -1.998079,\n        -2.848405,\n        -1.6703528,\n        -2.4962456,\n        -1.6481466,\n        -3.440863,\n        -2.6396058,\n        -2.9867055,\n        -2.670095,\n        -1.41553,\n        -1.5924307,\n        -1.4283478,\n        -1.1941426,\n        -2.6631145,\n        -1.681505,\n        -2.669729,\n        -1.8716806,\n        -1.4485921,\n        -3.8915946,\n        -1.7940518,\n        -1.3624225,\n        -2.0018167,\n        -3.7898927,\n        -1.0225416,\n        -1.9008695,\n        -2.9503803,\n        -1.1070347,\n        -1.7413036,\n        -1.7370498,\n        -1.1614032,\n        -1.8687525,\n        -1.8824614,\n        -1.1254486,\n        -1.7149575,\n        -1.7517456,\n        -1.09711,\n        -2.1374192,\n        -1.3370602,\n        -1.3006828,\n        -6.7224092,\n        -1.5339332,\n        -1.0457886,\n        -1.276754,\n        -3.4331303,\n        -1.6579875,\n        -2.1001787,\n        -1.690451,\n        -1.8255048,\n        -1.8420224,\n        -0.90034497,\n        -1.0836391,\n        -2.4854763,\n        -2.548339,\n        -1.9398812,\n        -1.4311638,\n        -6.7015624,\n        -1.0492077,\n        -2.898579,\n        -3.2530568,\n        -2.267042,\n        -2.1777713,\n        -2.7854774,\n        -1.7165158,\n        -2.1922739,\n        -1.6294314,\n        -3.1127849,\n        -3.2658153,\n        -3.1945748,\n        -2.37653,\n        -1.6131859,\n        -1.9520547,\n        -2.718332,\n        -1.8190503,\n        -2.4957001,\n        -3.5247574,\n        -1.804782,\n        -1.4233923,\n        -2.1619806,\n        -1.8960294,\n        -1.3675936,\n        -1.4663973,\n        -1.6324245,\n        -1.3808581,\n        -2.4436696,\n        -1.645096,\n        -0.7788095,\n        -0.6247076,\n        -1.2705113,\n        -1.5623835,\n        -2.6446254,\n        -1.5811759,\n        -1.0504855,\n        -6.6227217,\n        -5.9884167\n      ],\n      \"pointIndex\": [\n        7,\n        504,\n        226,\n        39212145,\n        24172071,\n        63328689,\n        64391118,\n        80415529,\n        79126737,\n        81548709,\n        77773432,\n        113781713,\n        45052060,\n        63749217,\n        111353902,\n        111742931,\n        59065567,\n        113400660,\n        106333244,\n        92186730,\n        112305367,\n        5020307,\n        19122326,\n        13956955,\n        37768826,\n        72182442,\n        44295021,\n        48397330,\n        19062050,\n        105440754,\n        106754985,\n        62469862,\n        67227185,\n        71449425,\n        108893739,\n        84983029,\n        89703916,\n        95292207,\n        57488522,\n        119775888,\n        110360335,\n        105944426,\n        5519420,\n        23569782,\n        103807625,\n        14868430,\n        94940360,\n        38835649,\n        41730968,\n        43929060,\n        43492514,\n        36987043,\n        7288799,\n        60860404,\n        50942648,\n        38573793,\n        115194212,\n        54955231,\n        9713218,\n        10423051,\n        62160467,\n        64660088,\n        67093319,\n        16165124,\n        72556718,\n        97362277,\n        110787379,\n        82336653,\n        84240227,\n        87221624,\n        10841983,\n        107072852,\n        95670596,\n        27229074,\n        104610343,\n        114059877,\n        95223577,\n        122325223,\n        497\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 226,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 443867406360206419\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.46440938,\n        -0.55194354,\n        -0.5503501,\n        -0.55612004,\n        -0.57011396,\n        -0.5687599,\n        -0.5504094,\n        -0.6050909,\n        -0.5642461,\n        -0.6262973,\n        -0.57370365,\n        -0.63136375,\n        -0.59516186,\n        -0.55939776,\n        -0.59046006,\n        -0.6264104,\n        -1.2096254,\n        -0.60545015,\n        -1.0312783,\n        -0.66549975,\n        -0.7972616,\n        -0.5882695,\n        -0.77537113,\n        -0.6631247,\n        -0.97287655,\n        -0.6211128,\n        -0.6168714,\n        -0.5831981,\n        -0.5755868,\n        -0.9301831,\n        -0.68822616,\n        -0.9176543,\n        -0.89416593,\n        -1.2748886,\n        -1.299621,\n        -0.8893791,\n        -0.6247632,\n        -1.1938516,\n        -1.4159937,\n        -0.79601777,\n        -0.81408983,\n        -0.8329688,\n        -0.8892465,\n        -0.660977,\n        -0.9649444,\n        -0.8232206,\n        -1.1116574,\n        -0.7901643,\n        -1.1673359,\n        -0.9826653,\n        -0.9795712,\n        -0.65056336,\n        -0.6972535,\n        -0.8124611,\n        -0.8433622,\n        -0.6048559,\n        -0.5874916,\n        -0.7510074,\n        -0.9282149,\n        -1.5678449,\n        -1.0508904,\n        -1.1219891,\n        -0.89590734,\n        -1.1569976,\n        -1.2163044,\n        -0.9758314,\n        -1.1721662,\n        -1.6305572,\n        -1.423851,\n        -2.877465,\n        -1.3352257,\n        -0.94389266,\n        -1.0725486,\n        -1.0377557,\n        -0.6702163,\n        -2.1633449,\n        -1.6787019,\n        -1.5197566,\n        -1.464606,\n        -0.84717834,\n        -0.8617492,\n        -1.1352758,\n        -0.8588444,\n        -1.3158804,\n        -0.93582493,\n        -1.3900374,\n        -1.312389,\n        -0.8095709,\n        -1.1310865,\n        -1.4500014,\n        -1.0954881,\n        -1.4290438,\n        -1.9153944,\n        -1.1448199,\n        -1.3486769,\n        -0.848019,\n        -1.28376,\n        -1.5707656,\n        -2.1327546,\n        -1.2360355,\n        -1.8001504,\n        -2.4112065,\n        -1.1716948,\n        -0.7562583,\n        -0.7529761,\n        -0.97947717,\n        -1.4603944,\n        -0.85283864,\n        -0.9359614,\n        -0.87537324,\n        -1.0744178,\n        -0.9669737,\n        -0.65526927,\n        -0.76069856,\n        -0.6492787,\n        -1.1887507,\n        -1.4968824,\n        -1.8659223,\n        -1.6839557,\n        -2.8952484,\n        -2.0116107,\n        -1.9978883,\n        -3.5571594,\n        -3.4723294,\n        -1.6517309,\n        -1.8540668,\n        -2.5876698,\n        -3.1918757,\n        -1.4802592,\n        -3.1979494,\n        -1.2262383,\n        -1.3654613,\n        -1.4973582,\n        -3.4507692,\n        -3.1694086,\n        -1.9254063,\n        -5.152074,\n        -1.553401,\n        -1.8380904,\n        -3.0006897,\n        -3.0137157,\n        -2.1162019,\n        -1.655828,\n        -1.031361,\n        -1.5780444,\n        -1.4910339,\n        -2.0867536,\n        -1.985997,\n        -1.4112707,\n        -1.3098238,\n        -0.8342073,\n        -2.2895734,\n        -2.759206,\n        -2.8878415,\n        -2.9131627,\n        -2.8809798,\n        -2.5492322,\n        -3.8534236,\n        -1.8139232,\n        -1.133239,\n        -3.4292166,\n        -6.1477556,\n        -2.3670046,\n        -3.3983068,\n        -1.2114067,\n        -3.470149,\n        -2.30582,\n        -5.7869906,\n        -2.0662296,\n        -1.5458041,\n        -2.3490396,\n        -3.2671423,\n        -1.6294608,\n        -2.9943771,\n        -2.3128247,\n        -2.9752448,\n        -3.2546005,\n        -1.285794,\n        -1.847425,\n        -2.8550897,\n        -1.7487998,\n        -1.1289887,\n        -1.5940181,\n        -2.1344016,\n        -2.2462327,\n        -2.1513886,\n        -3.020991,\n        -3.056209,\n        -2.1476986,\n        -1.449248,\n        -1.5552889,\n        -1.5682355,\n        -1.3316531,\n        -3.897052,\n        -1.3796781,\n        -2.231988,\n        -1.6785682,\n        -2.6739948,\n        -3.0805538,\n        -3.0601883,\n        -3.3602982,\n        -3.3161924,\n        -2.250506,\n        -2.8370016,\n        -3.9633605,\n        -5.9724317,\n        -1.6216393,\n        -1.413277,\n        -0.8392533,\n        -5.599172,\n        -2.7745616,\n        -2.7916412,\n        -1.7649142,\n        -2.2872913,\n        -1.9738077,\n        -2.6074562,\n        -1.1445656,\n        -1.2890749,\n        -2.647072,\n        -2.0012999,\n        -1.7970166,\n        -2.9343374,\n        -2.818287,\n        -3.303542,\n        -0.97759485,\n        -2.3368385,\n        -2.182434,\n        -3.0812023,\n        -1.9679145,\n        -0.9874132,\n        -0.72272855,\n        -2.6502638,\n        -1.4007757,\n        -1.6466194\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        234,\n        69537992,\n        91445418,\n        19390374,\n        28720100,\n        24477262,\n        57926693,\n        19248800,\n        6294096,\n        107055478,\n        1769480,\n        89281685,\n        49819340,\n        14918544,\n        66508569,\n        19778600,\n        85497112,\n        34258159,\n        116497451,\n        124776842,\n        60003625,\n        7548891,\n        59501782,\n        103625696,\n        125092715,\n        52468681,\n        618331,\n        61076944,\n        105684622,\n        91178763,\n        11364797,\n        107376926,\n        31656572,\n        83561758,\n        120265579,\n        97611816,\n        14491424,\n        114217289,\n        123716365,\n        128784073,\n        15728235,\n        32719135,\n        183378,\n        42413085,\n        89850175,\n        71310800,\n        18559145,\n        60809821,\n        110463371,\n        106184965,\n        51872121,\n        102370607,\n        9800332,\n        57747978,\n        22822474,\n        103522097,\n        9064348,\n        11101780,\n        99851390,\n        117196965,\n        73336535,\n        75285846,\n        27693273,\n        28463006,\n        80378635,\n        80900038,\n        30152149,\n        96680559,\n        90712670,\n        96075623,\n        99012189,\n        102751726,\n        56593235,\n        110128911,\n        112942886,\n        34690435,\n        35691352,\n        35441887,\n        36498903\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 234,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 2055649794410205326\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.44288203,\n        -0.4733615,\n        -0.45990443,\n        -0.5049441,\n        -0.48364964,\n        -0.46929833,\n        -0.5951013,\n        -0.51562756,\n        -0.50766015,\n        -0.51539856,\n        -0.5068618,\n        -0.5359355,\n        -0.51409596,\n        -0.6012245,\n        -0.672219,\n        -0.5989203,\n        -0.53809875,\n        -0.5351609,\n        -0.53932685,\n        -0.5565911,\n        -0.52460414,\n        -0.774303,\n        -0.566129,\n        -0.54035103,\n        -0.7583755,\n        -0.5659491,\n        -0.5352195,\n        -0.68098015,\n        -0.6640686,\n        -0.7816395,\n        -0.77675265,\n        -0.8260454,\n        -0.6643803,\n        -0.8596022,\n        -0.6064849,\n        -0.6059143,\n        -0.68557113,\n        -0.58883345,\n        -0.6485081,\n        -0.6736849,\n        -0.7512264,\n        -0.5525414,\n        -0.7294364,\n        -0.79328084,\n        -1.1318378,\n        -0.6086408,\n        -0.73085433,\n        -0.620078,\n        -0.6123451,\n        -0.8122214,\n        -1.075476,\n        -0.62589544,\n        -0.6219365,\n        -0.8617798,\n        -0.60795546,\n        -1.1461984,\n        -0.89067245,\n        -0.70753044,\n        -0.97933424,\n        -1.4291743,\n        -1.1285313,\n        -1.6885188,\n        -1.0900038,\n        -1.0275611,\n        -1.0758282,\n        -1.024274,\n        -1.3263488,\n        -2.243545,\n        -0.8787395,\n        -0.97567546,\n        -0.79980785,\n        -0.6283274,\n        -0.71997434,\n        -0.72288865,\n        -0.7504799,\n        -0.92405844,\n        -0.68834955,\n        -0.8254108,\n        -1.2518294,\n        -1.4317335,\n        -0.81669384,\n        -1.2866132,\n        -1.3988098,\n        -0.9238192,\n        -0.82263625,\n        -0.8600533,\n        -1.1017289,\n        -0.929503,\n        -0.8326041,\n        -1.9470408,\n        -1.1576751,\n        -1.2693574,\n        -0.828232,\n        -1.6521869,\n        -1.1554543,\n        -0.7606298,\n        -1.7863603,\n        -1.0608599,\n        -0.7879148,\n        -0.8942874,\n        -1.3608972,\n        -1.4805124,\n        -1.3453927,\n        -1.2434686,\n        -0.62647575,\n        -0.9385256,\n        -0.76255625,\n        -1.2914335,\n        -1.3336471,\n        -0.7012181,\n        -1.076786,\n        -1.3590081,\n        -1.7965043,\n        -1.2802029,\n        -1.2463863,\n        -0.9444799,\n        -0.8902152,\n        -5.1300464,\n        -1.9487566,\n        -2.5945091,\n        -3.9016354,\n        -1.3708203,\n        -1.7007787,\n        -5.8792505,\n        -2.924918,\n        -2.0084155,\n        -1.5516672,\n        -1.9452276,\n        -2.7344933,\n        -2.4761894,\n        -2.3559952,\n        -1.6918682,\n        -4.0521593,\n        -2.8841708,\n        -4.250247,\n        -2.6824002,\n        -2.351825,\n        -7.0725884,\n        -3.8799038,\n        -1.0702031,\n        -1.7500367,\n        -1.7154711,\n        -2.2885256,\n        -1.1017034,\n        -1.0053222,\n        -3.9765737,\n        -0.8603018,\n        -1.6386056,\n        -1.4199564,\n        -1.0428835,\n        -2.1775246,\n        -1.817723,\n        -1.5003532,\n        -4.018854,\n        -4.3493705,\n        -0.86794126,\n        -1.9145643,\n        -1.7242973,\n        -2.7774487,\n        -2.593774,\n        -2.8310463,\n        -1.1839824,\n        -0.99695164,\n        -1.3106467,\n        -2.2921367,\n        -2.6983166,\n        -1.6927524,\n        -2.2582834,\n        -1.862477,\n        -1.604147,\n        -1.1071069,\n        -1.5620903,\n        -1.6573658,\n        -3.5879345,\n        -1.8574893,\n        -5.450627,\n        -2.953317,\n        -1.379029,\n        -3.140232,\n        -2.237678,\n        -2.3773768,\n        -1.4420193,\n        -1.2076032,\n        -1.6367044,\n        -1.5738162,\n        -2.370365,\n        -2.294873,\n        -2.259818,\n        -3.3089173,\n        -2.3828115,\n        -1.4231418,\n        -2.0798078,\n        -3.65892,\n        -2.782997,\n        -2.4264176,\n        -3.1848116,\n        -2.5167139,\n        -2.035897,\n        -0.9078463,\n        -1.9608375,\n        -1.0440569,\n        -2.198517,\n        -3.9061434,\n        -2.4758701,\n        -2.5298085,\n        -2.2807584,\n        -1.8718891,\n        -1.7454989,\n        -1.5738424,\n        -1.5848371,\n        -1.9713155,\n        -1.3583566,\n        -1.9085646,\n        -0.78653663,\n        -0.91740644,\n        -1.4229441,\n        -4.3172007,\n        -2.259679,\n        -1.9394193,\n        -2.3271387,\n        -2.017966,\n        -1.5892746,\n        -1.1223392,\n        -3.2935107,\n        -1.9982659,\n        -2.696077,\n        -4.7974358,\n        -4.0544963,\n        -1.7201512,\n        -4.663512,\n        -2.5073416,\n        -2.0538328,\n        -1.1117074,\n        -1.6175225,\n        -1.0261378\n      ],\n      \"pointIndex\": [\n        0,\n        504,\n        235,\n        99588220,\n        76013627,\n        19457571,\n        85439945,\n        34908503,\n        53846803,\n        63291537,\n        28617007,\n        56293943,\n        38760239,\n        45478888,\n        49224639,\n        20952121,\n        10945812,\n        70524642,\n        113401068,\n        89973135,\n        32545594,\n        121351398,\n        30164645,\n        41959089,\n        16114615,\n        34039935,\n        15397635,\n        65264377,\n        56470190,\n        60045792,\n        120239339,\n        65801832,\n        121529019,\n        72480908,\n        107004937,\n        81560839,\n        12924994,\n        92743100,\n        63497,\n        107566904,\n        116513017,\n        126651450,\n        37815476,\n        100255780,\n        41648880,\n        41052862,\n        44501779,\n        45860742,\n        23535023,\n        22280250,\n        48943274,\n        49573865,\n        51773427,\n        101774133,\n        20817324,\n        57752019,\n        59792965,\n        82798362,\n        10579199,\n        66130504,\n        112340598,\n        69028211,\n        40608197,\n        103680196,\n        73078149,\n        67610248,\n        28456353,\n        77633952,\n        83048960,\n        84438350,\n        89740547,\n        98945000,\n        93624260,\n        102291671,\n        124136951,\n        100718338,\n        50707467,\n        109734397,\n        118807444,\n        126147953,\n        128014075,\n        504\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 235,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 5184381769321948633\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5680312,\n        -0.57754153,\n        -0.5803335,\n        -0.58679545,\n        -0.5828035,\n        -0.58446527,\n        -0.59277546,\n        -0.6110242,\n        -0.6347451,\n        -0.650318,\n        -0.5963477,\n        -0.589384,\n        -0.608581,\n        -0.6600299,\n        -0.65432173,\n        -0.618048,\n        -0.89475805,\n        -0.69706285,\n        -0.65788984,\n        -0.7013913,\n        -0.66922814,\n        -0.82567894,\n        -0.7393027,\n        -0.6538014,\n        -0.62519866,\n        -0.68216234,\n        -0.6155415,\n        -0.68521684,\n        -0.68252844,\n        -0.8635453,\n        -0.6732785,\n        -0.95870876,\n        -0.683223,\n        -1.0384604,\n        -1.0183295,\n        -1.1785916,\n        -0.7090885,\n        -0.89864945,\n        -1.1735743,\n        -0.99098986,\n        -1.2033477,\n        -0.8001452,\n        -1.0698832,\n        -1.0053589,\n        -1.1478246,\n        -0.82658064,\n        -0.7682891,\n        -1.2017244,\n        -0.71174425,\n        -0.6443983,\n        -0.67368025,\n        -0.7728953,\n        -0.8445695,\n        -0.74427897,\n        -0.75514364,\n        -0.7461769,\n        -0.74116397,\n        -0.82120323,\n        -1.0424049,\n        -1.4037803,\n        -1.8726203,\n        -0.74156517,\n        -1.0557835,\n        -0.96516573,\n        -1.1816691,\n        -1.1345637,\n        -0.8071123,\n        -1.1318005,\n        -1.5799372,\n        -1.0468409,\n        -1.3268329,\n        -1.1965047,\n        -1.5018703,\n        -1.9639885,\n        -0.9940779,\n        -1.2910422,\n        -1.6323619,\n        -1.2475739,\n        -1.5904983,\n        -2.8499577,\n        -1.687947,\n        -1.3974866,\n        -1.5153632,\n        -1.2589599,\n        -1.3481356,\n        -1.0943612,\n        -1.3469661,\n        -1.0530231,\n        -1.0110728,\n        -2.2912538,\n        -2.0522537,\n        -1.311908,\n        -1.1040617,\n        -1.49376,\n        -0.9379908,\n        -1.2910535,\n        -1.2647202,\n        -1.465746,\n        -1.0292729,\n        -0.903422,\n        -0.9057903,\n        -1.0987015,\n        -0.9196338,\n        -1.5492829,\n        -1.8007623,\n        -0.8688556,\n        -1.5213107,\n        -1.3652006,\n        -0.7674869,\n        -1.199262,\n        -1.0817316,\n        -0.7907398,\n        -0.8617374,\n        -1.2053959,\n        -0.7756612,\n        -0.8560713,\n        -0.8616877,\n        -1.3875729,\n        -1.4480209,\n        -7.3676667,\n        -1.7680417,\n        -1.9234877,\n        -2.000328,\n        -1.7633696,\n        -3.9967668,\n        -1.7751657,\n        -1.7586172,\n        -1.360004,\n        -2.5315728,\n        -1.2654521,\n        -2.6796257,\n        -2.3294647,\n        -2.5737264,\n        -3.0398452,\n        -1.5801363,\n        -1.390896,\n        -1.3436232,\n        -4.408353,\n        -2.7197933,\n        -5.5573773,\n        -1.6548759,\n        -1.8081745,\n        -4.581191,\n        -2.2083158,\n        -1.2077483,\n        -1.9272752,\n        -2.1264136,\n        -2.210442,\n        -2.9746046,\n        -1.8114252,\n        -1.5349617,\n        -5.21763,\n        -3.5900617,\n        -4.7764673,\n        -2.1318388,\n        -1.4026515,\n        -2.02728,\n        -4.707749,\n        -2.9943347,\n        -4.005021,\n        -2.996215,\n        -3.1028323,\n        -2.4414601,\n        -2.6545715,\n        -1.9498566,\n        -2.0314212,\n        -2.6523108,\n        -1.9968216,\n        -1.4356999,\n        -2.427331,\n        -1.6129284,\n        -1.7536944,\n        -1.2429045,\n        -2.0530457,\n        -6.1134977,\n        -3.2139742,\n        -1.5324388,\n        -1.9248765,\n        -2.0524924,\n        -3.6633136,\n        -2.5591948,\n        -3.6681902,\n        -3.4150784,\n        -4.0795193,\n        -3.0952582,\n        -1.2616416,\n        -3.3483067,\n        -3.7493348,\n        -1.9752245,\n        -1.8228238,\n        -2.7378254,\n        -2.3111594,\n        -1.3959323,\n        -3.6355662,\n        -1.5012057,\n        -1.8132061,\n        -1.6209233,\n        -1.9733783,\n        -2.1340463,\n        -1.2205851,\n        -0.9665645,\n        -1.0465889,\n        -1.7400819,\n        -1.2885246,\n        -1.5000305,\n        -1.0893486,\n        -1.8212684,\n        -2.766409,\n        -2.371216,\n        -2.1498919,\n        -2.5711405,\n        -1.5781529,\n        -0.9767635,\n        -2.1542985,\n        -3.195587,\n        -1.8495193,\n        -1.589415,\n        -1.8028818,\n        -0.8964406,\n        -6.5694284,\n        -2.386041,\n        -1.7140924,\n        -2.8135862,\n        -3.413123,\n        -1.8383019,\n        -2.5924628,\n        -1.0788869,\n        -3.4721863,\n        -2.3472803,\n        -1.513369,\n        -2.005564,\n        -0.9147193\n      ],\n      \"pointIndex\": [\n        3,\n        504,\n        232,\n        38649302,\n        11990578,\n        81992637,\n        93291768,\n        65762442,\n        18485813,\n        61602633,\n        5179386,\n        102261781,\n        36540032,\n        41990296,\n        76448885,\n        54237680,\n        4518924,\n        67995138,\n        119087849,\n        95016431,\n        108065424,\n        18145236,\n        6621405,\n        82602739,\n        13689195,\n        34020832,\n        47915466,\n        60266361,\n        95866064,\n        22470217,\n        68911772,\n        23959189,\n        87592514,\n        123638180,\n        84840840,\n        88626199,\n        33952373,\n        109338161,\n        77079998,\n        113625126,\n        122206333,\n        33585290,\n        66678691,\n        40366891,\n        8381952,\n        86515696,\n        7458778,\n        74425191,\n        16869744,\n        109422026,\n        47664082,\n        73178736,\n        19053106,\n        51977784,\n        53968219,\n        12275988,\n        79571602,\n        103617253,\n        9721202,\n        60610567,\n        43523358,\n        62506069,\n        66909823,\n        68497670,\n        79771835,\n        71962373,\n        26767973,\n        85851873,\n        78799049,\n        28153276,\n        48373087,\n        96705395,\n        82853479,\n        103905866,\n        12809656,\n        108756209,\n        112969720,\n        117158318,\n        121449827,\n        125021075,\n        501\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 232,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 9169886588161009142\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.53035605,\n        -0.5324966,\n        -0.55486125,\n        -0.5330728,\n        -0.5471217,\n        -0.58455336,\n        -0.5933521,\n        -0.5599782,\n        -0.6342549,\n        -0.6387794,\n        -0.55667377,\n        -0.6493626,\n        -0.74867827,\n        -0.6670967,\n        -0.6307774,\n        -0.6196332,\n        -0.5918868,\n        -0.65830445,\n        -0.6425075,\n        -0.8268777,\n        -0.6852247,\n        -0.56706643,\n        -0.6376095,\n        -0.70883864,\n        -0.9732487,\n        -0.7535966,\n        -0.76869935,\n        -0.7472471,\n        -0.923651,\n        -0.7573382,\n        -0.93107444,\n        -0.6483894,\n        -0.70469373,\n        -0.6645228,\n        -0.7695643,\n        -0.80381304,\n        -0.66245395,\n        -0.82613385,\n        -0.695198,\n        -0.9515421,\n        -1.010398,\n        -0.7056208,\n        -0.76953393,\n        -0.9928844,\n        -0.59132975,\n        -0.99380237,\n        -0.8515312,\n        -0.82218546,\n        -1.1088846,\n        -1.0138144,\n        -1.0554961,\n        -1.2125427,\n        -0.861188,\n        -0.77022,\n        -0.9146566,\n        -0.8259621,\n        -0.76882243,\n        -1.2034107,\n        -1.0809807,\n        -1.5401542,\n        -0.91514635,\n        -1.3669915,\n        -1.4481707,\n        -1.409995,\n        -0.85730594,\n        -1.5939871,\n        -0.73895526,\n        -0.66953665,\n        -0.77782303,\n        -0.88488233,\n        -1.0499003,\n        -1.2570091,\n        -1.1976534,\n        -0.92151576,\n        -1.182133,\n        -0.833123,\n        -0.9230005,\n        -0.8967325,\n        -1.0337293,\n        -1.2166122,\n        -0.99116784,\n        -1.1490904,\n        -1.1511246,\n        -1.397479,\n        -0.9786536,\n        -1.402299,\n        -0.94586,\n        -1.2771218,\n        -1.0545266,\n        -0.59421927,\n        -1.1618001,\n        -1.8738767,\n        -1.1558522,\n        -1.8159966,\n        -0.89378375,\n        -0.8380161,\n        -1.3560284,\n        -2.3792744,\n        -1.461425,\n        -1.2379498,\n        -1.2818031,\n        -1.9932383,\n        -1.3166027,\n        -1.5687723,\n        -1.9454088,\n        -1.445117,\n        -0.9366784,\n        -0.92885256,\n        -1.3723916,\n        -1.2701558,\n        -1.0020772,\n        -1.0913596,\n        -1.3008364,\n        -1.4996603,\n        -0.7830359,\n        -1.7696719,\n        -1.9255613,\n        -3.2565742,\n        -1.7884704,\n        -6.904876,\n        -2.9134037,\n        -1.9650456,\n        -2.221382,\n        -2.0634215,\n        -1.5779783,\n        -2.0036862,\n        -2.2380946,\n        -2.1677172,\n        -4.0049834,\n        -1.6050944,\n        -3.2902942,\n        -4.6903667,\n        -3.661412,\n        -1.959162,\n        -2.357832,\n        -1.53947,\n        -2.2820132,\n        -2.1057246,\n        -2.0005455,\n        -3.5140362,\n        -4.0113697,\n        -2.8073394,\n        -1.3974141,\n        -3.510641,\n        -1.5597332,\n        -1.6886966,\n        -3.7408767,\n        -2.8858912,\n        -3.572505,\n        -1.8949933,\n        -1.8129742,\n        -4.6692724,\n        -1.0588484,\n        -2.3928235,\n        -2.532507,\n        -1.4463307,\n        -3.0277712,\n        -2.9297788,\n        -1.9894695,\n        -1.958042,\n        -1.8407285,\n        -1.2177694,\n        -1.1468236,\n        -1.2246511,\n        -1.1679093,\n        -1.5983938,\n        -2.6404345,\n        -1.760991,\n        -2.9862454,\n        -2.6671734,\n        -1.3666124,\n        -2.2798762,\n        -4.07843,\n        -1.3272638,\n        -1.1056348,\n        -1.3501083,\n        -2.066305,\n        -3.0409312,\n        -1.3775028,\n        -0.90984005,\n        -0.72415125,\n        -1.6538361,\n        -4.1680713,\n        -1.953995,\n        -5.421199,\n        -1.7286031,\n        -2.956821,\n        -1.9774354,\n        -2.0668619,\n        -1.656342,\n        -1.2232676,\n        -0.9585533,\n        -1.0880358,\n        -3.5506268,\n        -2.2162747,\n        -2.78535,\n        -2.4571283,\n        -2.4578626,\n        -1.8101616,\n        -1.5967966,\n        -1.3759114,\n        -1.5432861,\n        -2.9304342,\n        -2.0907807,\n        -2.9865646,\n        -1.413012,\n        -2.822878,\n        -2.1518707,\n        -1.6998991,\n        -2.7407525,\n        -2.152089,\n        -4.4406257,\n        -2.0288308,\n        -1.1662818,\n        -2.3149743,\n        -1.0508428,\n        -4.1577044,\n        -1.7287313,\n        -5.233291,\n        -2.2002838,\n        -2.4515307,\n        -3.343693,\n        -1.1925954,\n        -3.403616,\n        -1.1421292,\n        -2.327714,\n        -2.8356974,\n        -3.2215524,\n        -1.7986864,\n        -1.0376507,\n        -1.9582424,\n        -3.4708614\n      ],\n      \"pointIndex\": [\n        1,\n        504,\n        232,\n        95940876,\n        85658404,\n        48807258,\n        5729481,\n        37837239,\n        9161211,\n        22735646,\n        28106346,\n        68013783,\n        38134148,\n        7907680,\n        49992278,\n        54106613,\n        59102940,\n        69489239,\n        77880382,\n        49446833,\n        105355591,\n        118604499,\n        120370463,\n        40569547,\n        16849157,\n        64352906,\n        47340019,\n        82652660,\n        20898060,\n        21545586,\n        22413604,\n        63804236,\n        66767259,\n        116482736,\n        31396222,\n        29627447,\n        101324628,\n        108731924,\n        119581943,\n        33754835,\n        30549495,\n        7112415,\n        44789791,\n        68914851,\n        16082884,\n        41912800,\n        98721568,\n        47027405,\n        41238752,\n        45725007,\n        18889598,\n        114310282,\n        105856109,\n        52174821,\n        9503028,\n        2075681,\n        91137023,\n        81857977,\n        58803769,\n        73782073,\n        61882364,\n        114571543,\n        41576065,\n        91505261,\n        72539750,\n        75591724,\n        77472155,\n        12287422,\n        83482431,\n        35221081,\n        2994602,\n        88829860,\n        95147664,\n        35610587,\n        32033118,\n        110962144,\n        115815305,\n        24617835,\n        122676595,\n        127514003,\n        142\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 232,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 7625825163713561172\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.68963563,\n        -0.71228534,\n        -0.69185126,\n        -0.72458655,\n        -0.73350304,\n        -0.70023793,\n        -0.6973863,\n        -0.7521709,\n        -0.74596405,\n        -0.75555176,\n        -0.7994502,\n        -0.70423424,\n        -0.71557814,\n        -0.80184543,\n        -0.7378255,\n        -0.90932745,\n        -0.8513464,\n        -0.945497,\n        -0.75618273,\n        -0.9176568,\n        -0.91030234,\n        -0.870406,\n        -0.8781088,\n        -0.7445866,\n        -0.7060583,\n        -0.8570121,\n        -0.71676576,\n        -0.9451282,\n        -0.98158306,\n        -1.0464047,\n        -1.2191277,\n        -1.2070265,\n        -1.1122887,\n        -0.91584504,\n        -0.9098384,\n        -1.0282311,\n        -1.0526453,\n        -1.0620815,\n        -0.9422277,\n        -0.98331505,\n        -1.0915213,\n        -0.9253102,\n        -0.972425,\n        -1.5254942,\n        -1.0299138,\n        -1.0033057,\n        -0.9877612,\n        -0.8424406,\n        -0.7661645,\n        -0.8719801,\n        -0.74254644,\n        -0.87919515,\n        -1.1128438,\n        -0.82937247,\n        -0.8437849,\n        -1.4884276,\n        -1.8097106,\n        -1.6915003,\n        -1.1752342,\n        -1.2706611,\n        -1.5071601,\n        -1.4045566,\n        -2.038409,\n        -2.151111,\n        -2.0421367,\n        -1.4022131,\n        -1.5389507,\n        -1.3170675,\n        -1.315749,\n        -0.93967783,\n        -1.2088668,\n        -1.3546461,\n        -1.1657493,\n        -1.3647672,\n        -1.3999888,\n        -1.6134369,\n        -1.1775569,\n        -1.5266851,\n        -1.5846884,\n        -1.1892252,\n        -2.184959,\n        -1.4332705,\n        -1.6650143,\n        -1.119504,\n        -1.1874212,\n        -1.0438701,\n        -1.1250359,\n        -1.9323465,\n        -2.3231916,\n        -1.4397689,\n        -1.2220843,\n        -1.2882428,\n        -1.0033833,\n        -1.0603511,\n        -0.98936176,\n        -1.4499718,\n        -0.9140737,\n        -1.0238861,\n        -1.1773385,\n        -0.9237867,\n        -1.4686022,\n        -0.97709936,\n        -0.75256306,\n        -1.2221575,\n        -1.3114357,\n        -1.1347244,\n        -1.7514151,\n        -1.123055,\n        -0.8995096,\n        -0.86474705,\n        -0.85333395,\n        -2.048599,\n        -1.9980121,\n        -2.3663135,\n        -1.9952893,\n        -2.4167542,\n        -1.9417686,\n        -2.6963432,\n        -1.3648587,\n        -3.931102,\n        -2.0329041,\n        -4.328194,\n        -2.0767705,\n        -1.6628162,\n        -5.6688666,\n        -3.5560164,\n        -2.79209,\n        -2.689386,\n        -3.5538812,\n        -3.9373045,\n        -2.5425057,\n        -1.4241658,\n        -2.6849244,\n        -2.719885,\n        -1.6209738,\n        -3.384303,\n        -2.4125133,\n        -2.8833256,\n        -1.3654975,\n        -1.449889,\n        -1.0091305,\n        -2.6887238,\n        -1.2313497,\n        -1.5814414,\n        -4.6353374,\n        -2.5800967,\n        -2.6386955,\n        -4.811471,\n        -2.0467079,\n        -1.5542631,\n        -4.6060424,\n        -1.8895761,\n        -1.7013047,\n        -1.4387131,\n        -1.9494822,\n        -2.21773,\n        -2.4674315,\n        -1.9776376,\n        -2.1961777,\n        -3.6641738,\n        -1.4983013,\n        -3.065605,\n        -2.5193455,\n        -2.4397542,\n        -1.6718928,\n        -1.8533777,\n        -3.6097572,\n        -2.178392,\n        -1.6004324,\n        -1.533436,\n        -1.2607406,\n        -1.5537279,\n        -3.823595,\n        -2.2444324,\n        -1.2052363,\n        -2.4344325,\n        -1.996442,\n        -2.5809674,\n        -2.397142,\n        -1.9402657,\n        -3.9277935,\n        -1.531284,\n        -1.3297614,\n        -1.4778779,\n        -1.8241446,\n        -2.1734576,\n        -1.2779739,\n        -1.3039699,\n        -1.2775682,\n        -3.7285767,\n        -1.7983359,\n        -2.3423522,\n        -3.0988562,\n        -5.8211617,\n        -1.0550694,\n        -5.147151,\n        -1.402445,\n        -1.5902166,\n        -4.214791,\n        -1.2044046,\n        -2.8841598,\n        -1.496543,\n        -1.6186411,\n        -2.2231202,\n        -3.8776445,\n        -1.3348498,\n        -3.221405,\n        -1.6098989,\n        -2.5724285,\n        -5.3377237,\n        -1.8548803,\n        -1.764812,\n        -1.3415471,\n        -5.20307,\n        -2.0851576,\n        -1.914553,\n        -2.0306556,\n        -1.0157343,\n        -1.0076644,\n        -1.2212769,\n        -1.0830055,\n        -1.6060807,\n        -5.5612807\n      ],\n      \"pointIndex\": [\n        2,\n        503,\n        223,\n        39112803,\n        104990114,\n        81937957,\n        100213345,\n        38376824,\n        93871402,\n        106658479,\n        78636624,\n        124971297,\n        15024299,\n        17742985,\n        72679231,\n        55114284,\n        61605062,\n        80242238,\n        79029880,\n        97582134,\n        117911316,\n        39630890,\n        63755532,\n        15610385,\n        27111429,\n        44667628,\n        95898681,\n        55537610,\n        105882141,\n        57752310,\n        107102110,\n        71106035,\n        8477543,\n        75496573,\n        110934773,\n        88507968,\n        98972924,\n        112105035,\n        86434716,\n        120953345,\n        21224130,\n        34552766,\n        3241555,\n        15145986,\n        16709245,\n        4633963,\n        3355562,\n        43882594,\n        44439062,\n        46207160,\n        1501667,\n        48161858,\n        32358115,\n        23484559,\n        23372477,\n        56610622,\n        45983463,\n        92602974,\n        60857406,\n        63897868,\n        65522801,\n        68204117,\n        11338446,\n        80785638,\n        110850108,\n        77107000,\n        94909029,\n        86604853,\n        109040266,\n        106369672,\n        97466705,\n        103041026,\n        29702452,\n        114035833,\n        115899372,\n        118423772,\n        123722046,\n        501\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 223,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -1521360444406908161\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.448413,\n        -0.4563397,\n        -0.45389304,\n        -0.46477798,\n        -0.47320774,\n        -0.47462812,\n        -0.47083667,\n        -0.61239666,\n        -0.5819031,\n        -0.5328883,\n        -0.55435115,\n        -0.5672333,\n        -0.49760112,\n        -0.5436042,\n        -0.59967595,\n        -0.64196426,\n        -0.77108985,\n        -0.5990893,\n        -0.68164337,\n        -0.6658914,\n        -0.6779405,\n        -0.65547,\n        -0.57186633,\n        -0.7292096,\n        -0.6201713,\n        -0.5016417,\n        -0.5391519,\n        -0.5894515,\n        -0.67855585,\n        -0.793728,\n        -0.95858973,\n        -0.75544524,\n        -1.1236985,\n        -0.7778626,\n        -0.87012255,\n        -0.6094903,\n        -0.8075542,\n        -0.6905392,\n        -0.7119214,\n        -0.7561649,\n        -0.7637378,\n        -0.74645245,\n        -0.7054175,\n        -0.7976067,\n        -1.184815,\n        -0.679329,\n        -0.6759231,\n        -0.80786866,\n        -0.802765,\n        -0.73436123,\n        -0.7933314,\n        -0.7040496,\n        -0.5593558,\n        -0.58655614,\n        -0.60826343,\n        -0.6955069,\n        -0.79697454,\n        -2.0321658,\n        -2.3671505,\n        -1.2036948,\n        -0.91650933,\n        -1.2221347,\n        -1.4929575,\n        -0.82133824,\n        -0.99631953,\n        -1.2400419,\n        -2.2688756,\n        -0.8122746,\n        -1.5502583,\n        -0.95800006,\n        -0.9741957,\n        -0.91015464,\n        -1.0454623,\n        -1.6839831,\n        -0.8809304,\n        -1.0153738,\n        -0.81790936,\n        -1.3347377,\n        -0.85336673,\n        -1.419818,\n        -1.2422366,\n        -1.3848045,\n        -1.8259623,\n        -0.8008377,\n        -1.4634923,\n        -1.2378216,\n        -0.83597064,\n        -1.2431781,\n        -1.0476589,\n        -1.1947871,\n        -1.502235,\n        -1.1875122,\n        -1.0802674,\n        -0.7131609,\n        -1.303804,\n        -0.93148845,\n        -0.868008,\n        -0.91230166,\n        -1.2311869,\n        -0.7778201,\n        -0.8125608,\n        -1.0481919,\n        -1.0332779,\n        -2.4180708,\n        -0.90919775,\n        -1.0201457,\n        -0.74089086,\n        -0.7694123,\n        -0.6068406,\n        -1.9599571,\n        -0.64965326,\n        -0.80325395,\n        -0.70115393,\n        -1.4920911,\n        -2.105182,\n        -2.593633,\n        -4.9075894,\n        -2.4500175,\n        -2.6866624,\n        -2.011585,\n        -1.2831956,\n        -1.6990656,\n        -5.3042397,\n        -4.074707,\n        -2.3969848,\n        -1.8422551,\n        -1.7336991,\n        -2.014161,\n        -1.1614549,\n        -2.387507,\n        -2.2531638,\n        -1.5358254,\n        -2.0135787,\n        -2.4276953,\n        -2.544838,\n        -0.8222214,\n        -1.783477,\n        -2.3671846,\n        -2.0942051,\n        -1.530744,\n        -3.109618,\n        -2.4571917,\n        -4.8672504,\n        -1.750401,\n        -1.1609975,\n        -1.5165246,\n        -2.134081,\n        -1.7671065,\n        -2.9051976,\n        -2.3550234,\n        -1.3915247,\n        -1.6891075,\n        -4.364541,\n        -1.8734646,\n        -1.9972665,\n        -1.9541153,\n        -1.9995427,\n        -1.3496068,\n        -0.99282694,\n        -2.723645,\n        -2.133502,\n        -1.6864717,\n        -1.82967,\n        -1.3975633,\n        -2.5430946,\n        -2.8813655,\n        -2.819163,\n        -2.3204596,\n        -1.1076525,\n        -1.9235603,\n        -1.9012809,\n        -1.8713926,\n        -1.4276859,\n        -5.034833,\n        -1.3538642,\n        -1.7771684,\n        -2.354052,\n        -3.5404406,\n        -1.6530912,\n        -2.0195994,\n        -2.6805522,\n        -3.3481433,\n        -2.3543854,\n        -1.8849224,\n        -1.7921944,\n        -1.9087023,\n        -1.5636692,\n        -4.559099,\n        -0.9331611,\n        -2.0087197,\n        -3.0887632,\n        -1.7429163,\n        -1.8492596,\n        -1.4279119,\n        -2.107058,\n        -1.9292601,\n        -1.2173969,\n        -1.7226368,\n        -1.288711,\n        -1.5266242,\n        -1.0676883,\n        -0.82271296,\n        -2.72016,\n        -1.95744,\n        -1.6681421,\n        -1.561834,\n        -1.139806,\n        -2.7807567,\n        -2.852466,\n        -1.5560211,\n        -2.8218806,\n        -2.6191962,\n        -3.2710078,\n        -1.9588827,\n        -0.9443455,\n        -1.0752059,\n        -2.6169057,\n        -1.3419697,\n        -3.6202824,\n        -2.6320822,\n        -2.3424454,\n        -3.1701627,\n        -1.705714,\n        -2.471587,\n        -1.706018,\n        -3.0725029,\n        -0.78821653,\n        -3.9174464,\n        -1.8377808\n      ],\n      \"pointIndex\": [\n        1,\n        502,\n        229,\n        124615864,\n        110763474,\n        66359894,\n        79907579,\n        35495826,\n        45985371,\n        23556059,\n        99951616,\n        111833388,\n        35827229,\n        64730104,\n        89025,\n        55394228,\n        87863787,\n        73136326,\n        80548010,\n        90110354,\n        31092250,\n        125987025,\n        27087925,\n        3716780,\n        20623733,\n        43597728,\n        45811303,\n        48424553,\n        4840676,\n        123094759,\n        4513018,\n        24238832,\n        31302989,\n        58364321,\n        27878880,\n        89627407,\n        54208096,\n        96030457,\n        6524879,\n        114400195,\n        49457760,\n        17319422,\n        15150999,\n        23999127,\n        111962595,\n        38632545,\n        64027745,\n        118741542,\n        17979860,\n        56032910,\n        45211702,\n        46460679,\n        20337475,\n        59822455,\n        95109119,\n        109122258,\n        21832209,\n        68020971,\n        10293097,\n        86314633,\n        90878049,\n        87951177,\n        114630977,\n        75148188,\n        77180011,\n        78900649,\n        5601924,\n        93405592,\n        83673586,\n        10641235,\n        91171577,\n        91661234,\n        6995240,\n        30253916,\n        119912134,\n        109594041,\n        113369225,\n        120188293,\n        33002468,\n        501\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 229,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 4365738981533818552\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5989758,\n        -0.6030343,\n        -0.6106647,\n        -0.617829,\n        -0.6120543,\n        -0.6209751,\n        -0.6540577,\n        -0.7291006,\n        -0.6260681,\n        -0.6240438,\n        -0.63892895,\n        -0.6271658,\n        -0.6558473,\n        -0.67823005,\n        -0.6712329,\n        -0.7736877,\n        -0.8370473,\n        -0.8698825,\n        -0.63718224,\n        -0.66500527,\n        -0.76872224,\n        -0.7976186,\n        -0.64789164,\n        -0.75742877,\n        -0.7079197,\n        -0.66363484,\n        -0.7033005,\n        -0.6999278,\n        -0.7570301,\n        -0.8842918,\n        -0.86259925,\n        -1.0528697,\n        -0.9030894,\n        -1.0192664,\n        -0.87634534,\n        -0.97609395,\n        -0.870785,\n        -1.2165607,\n        -0.8950667,\n        -0.9456082,\n        -0.81416637,\n        -0.90334135,\n        -0.9257617,\n        -0.95053643,\n        -0.8269434,\n        -0.68671584,\n        -0.9238147,\n        -0.91530436,\n        -1.2250437,\n        -0.8648471,\n        -0.9400571,\n        -0.77502626,\n        -0.69389594,\n        -1.1458131,\n        -0.9694328,\n        -0.7568269,\n        -1.0375473,\n        -0.8990032,\n        -1.1046243,\n        -1.6111062,\n        -1.2231795,\n        -1.7332932,\n        -1.2406415,\n        -1.0933986,\n        -1.4834079,\n        -1.3647826,\n        -1.162224,\n        -1.0776985,\n        -1.0275012,\n        -1.0436147,\n        -1.2653149,\n        -1.7329624,\n        -1.0180838,\n        -1.0981978,\n        -0.9358566,\n        -1.3720297,\n        -1.4376353,\n        -1.0709182,\n        -1.0828278,\n        -0.9824067,\n        -1.8678888,\n        -1.6504165,\n        -1.0274376,\n        -1.0305225,\n        -1.1328106,\n        -1.4048452,\n        -1.0147716,\n        -1.2578955,\n        -1.1658182,\n        -1.8569355,\n        -1.256529,\n        -0.8613851,\n        -0.73715734,\n        -1.231118,\n        -1.3888869,\n        -1.298621,\n        -1.3348144,\n        -1.2907434,\n        -1.3547943,\n        -0.91213906,\n        -2.81691,\n        -1.2689966,\n        -1.3123076,\n        -0.8410366,\n        -0.818469,\n        -0.95411783,\n        -0.85013014,\n        -1.292814,\n        -1.3674784,\n        -1.4650946,\n        -1.6974814,\n        -0.7731692,\n        -1.556783,\n        -1.9376053,\n        -2.6791103,\n        -2.8065603,\n        -3.4199686,\n        -2.632667,\n        -3.097106,\n        -2.398917,\n        -2.5683875,\n        -1.318025,\n        -3.004227,\n        -2.803569,\n        -1.7933978,\n        -2.2934864,\n        -2.2485514,\n        -1.731055,\n        -1.157685,\n        -1.9098945,\n        -2.593425,\n        -1.854552,\n        -3.8862267,\n        -1.2046897,\n        -1.6525576,\n        -1.6870998,\n        -1.3142855,\n        -3.2718213,\n        -2.055217,\n        -1.2397898,\n        -1.3642328,\n        -3.6667328,\n        -2.4019687,\n        -3.7105029,\n        -2.5529263,\n        -3.6775377,\n        -3.4969838,\n        -2.5784345,\n        -3.47127,\n        -2.0122714,\n        -1.2789313,\n        -1.7605172,\n        -2.4059713,\n        -2.830982,\n        -1.5596879,\n        -1.1299579,\n        -2.445495,\n        -2.1350765,\n        -2.6100347,\n        -1.7369318,\n        -4.3140836,\n        -2.028551,\n        -1.9430747,\n        -2.796395,\n        -5.855211,\n        -5.55672,\n        -2.097489,\n        -2.3304756,\n        -2.0556016,\n        -2.3982108,\n        -1.8669431,\n        -2.6178248,\n        -2.3411436,\n        -1.0704573,\n        -1.0604006,\n        -2.2578304,\n        -2.3912299,\n        -1.3100142,\n        -1.2620088,\n        -1.9373451,\n        -2.2696261,\n        -1.309974,\n        -1.3542323,\n        -2.646746,\n        -0.89304143,\n        -2.8895712,\n        -1.4734464,\n        -4.183573,\n        -1.3531339,\n        -1.6352302,\n        -3.697477,\n        -2.8222086,\n        -3.847648,\n        -1.4322337,\n        -2.2488072,\n        -1.5600172,\n        -1.3510327,\n        -2.5411255,\n        -1.7665361,\n        -1.1097009,\n        -1.1876982,\n        -6.4440703,\n        -3.827908,\n        -1.6914281,\n        -2.3275604,\n        -4.6303954,\n        -2.2944593,\n        -1.0078675,\n        -0.9225348,\n        -1.9457649,\n        -2.338592,\n        -2.6975172,\n        -3.4240358,\n        -2.4394717,\n        -0.8670702,\n        -3.9153588,\n        -1.4039124,\n        -1.5496547,\n        -4.910925,\n        -1.6267688,\n        -2.5536485,\n        -2.1050537,\n        -2.210298,\n        -1.024319,\n        -1.2126342,\n        -1.8602599\n      ],\n      \"pointIndex\": [\n        1,\n        504,\n        226,\n        98429960,\n        112656297,\n        97879958,\n        97194275,\n        7373483,\n        50394127,\n        81304428,\n        11835750,\n        121966368,\n        35984071,\n        50958101,\n        84195903,\n        24589368,\n        64319713,\n        100139347,\n        87559209,\n        107136641,\n        117838144,\n        32701006,\n        35765779,\n        25433494,\n        25147855,\n        46773108,\n        18929866,\n        66570036,\n        57057252,\n        60835908,\n        22833967,\n        70319399,\n        38498169,\n        93120334,\n        86293692,\n        21093512,\n        22041922,\n        106002956,\n        64746981,\n        122673071,\n        40642181,\n        37877243,\n        7182531,\n        36374056,\n        82994547,\n        90761342,\n        102959872,\n        45036745,\n        46067631,\n        48358483,\n        18563435,\n        29229391,\n        19184263,\n        113583996,\n        56503154,\n        59226275,\n        123570234,\n        22119290,\n        64825232,\n        127647664,\n        69875154,\n        98054970,\n        58565901,\n        74327999,\n        76273637,\n        104065828,\n        1185237,\n        88841019,\n        92894678,\n        16534084,\n        105157004,\n        44909320,\n        122969354,\n        112214521,\n        30448094,\n        33692792,\n        49009014,\n        125650218,\n        79\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 226,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -3752382720585822890\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.35257456,\n        -0.63316864,\n        -0.6249559,\n        -0.644586,\n        -0.70678765,\n        -0.64916795,\n        -0.6261788,\n        -0.7028251,\n        -0.68006915,\n        -0.71244913,\n        -0.77226007,\n        -0.6599282,\n        -0.72423315,\n        -0.64820457,\n        -0.86892164,\n        -0.80033886,\n        -0.9883338,\n        -0.7176925,\n        -0.8587724,\n        -0.7907359,\n        -0.7907442,\n        -0.8138974,\n        -0.8363504,\n        -0.8191511,\n        -0.7210162,\n        -0.8011215,\n        -0.7275551,\n        -0.6740003,\n        -0.76122135,\n        -0.9360815,\n        -1.143798,\n        -1.069601,\n        -0.89091927,\n        -1.4255719,\n        -1.0564026,\n        -0.7504338,\n        -0.74547905,\n        -0.8932367,\n        -0.9149731,\n        -0.89604896,\n        -0.89056855,\n        -1.0763606,\n        -1.2780377,\n        -0.814832,\n        -0.87155193,\n        -1.1978128,\n        -0.9658152,\n        -0.91909224,\n        -1.1020787,\n        -0.9806116,\n        -0.80837256,\n        -1.121417,\n        -0.959307,\n        -0.82256985,\n        -0.78294426,\n        -0.8547436,\n        -0.6911423,\n        -0.9919269,\n        -0.9217603,\n        -1.1567005,\n        -1.0467317,\n        -1.1764008,\n        -2.3090951,\n        -1.9561859,\n        -1.4364341,\n        -0.9376694,\n        -1.1808302,\n        -1.6580477,\n        -1.5421588,\n        -1.2061565,\n        -1.3012215,\n        -1.1677957,\n        -1.6640215,\n        -0.9539052,\n        -0.9304223,\n        -1.4524605,\n        -1.1548522,\n        -3.087163,\n        -0.9651231,\n        -1.1932969,\n        -1.7018605,\n        -1.1340724,\n        -1.1854857,\n        -1.1199478,\n        -1.6968377,\n        -1.7598923,\n        -1.5454631,\n        -0.83821553,\n        -1.3094827,\n        -0.9318102,\n        -1.0099014,\n        -1.4364564,\n        -1.6273265,\n        -1.3257332,\n        -1.6213363,\n        -1.0319413,\n        -1.9782693,\n        -1.5172966,\n        -1.8712493,\n        -1.3883024,\n        -1.1409416,\n        -1.5943706,\n        -1.3920141,\n        -1.2470638,\n        -1.3488907,\n        -1.2877753,\n        -2.3829832,\n        -1.0140915,\n        -0.86199796,\n        -1.969369,\n        -1.2389947,\n        -1.1190457,\n        -1.0083293,\n        -1.5217599,\n        -2.4827971,\n        -3.2721095,\n        -1.832073,\n        -1.0300539,\n        -1.3891287,\n        -4.1304526,\n        -2.2308176,\n        -1.1684897,\n        -1.3797816,\n        -1.6611247,\n        -3.9026396,\n        -2.3507211,\n        -2.3537116,\n        -4.3257017,\n        -3.9033318,\n        -3.0079002,\n        -2.1621022,\n        -3.0395641,\n        -1.1423059,\n        -2.8617375,\n        -3.5767941,\n        -2.289676,\n        -4.3020816,\n        -2.492416,\n        -1.7381042,\n        -1.8634133,\n        -2.2264454,\n        -2.779647,\n        -3.4513872,\n        -1.451247,\n        -1.6320184,\n        -5.162463,\n        -2.647772,\n        -1.6107192,\n        -1.6144214,\n        -1.3170866,\n        -2.3347747,\n        -1.5668651,\n        -1.5948402,\n        -2.1280186,\n        -2.3939443,\n        -4.045837,\n        -3.275819,\n        -1.6477028,\n        -1.5976466,\n        -2.7628117,\n        -2.4815483,\n        -2.3328712,\n        -3.8712041,\n        -2.2842953,\n        -1.4850677,\n        -2.7312644,\n        -2.5408404,\n        -2.7129276,\n        -2.680968,\n        -1.9831715,\n        -2.7402458,\n        -1.7652198,\n        -2.2835853,\n        -1.6363691,\n        -2.0129876,\n        -2.5755525,\n        -2.2679524,\n        -2.9405003,\n        -1.3456973,\n        -1.4536569,\n        -1.5227104,\n        -6.832543,\n        -1.6134499,\n        -2.1005044,\n        -1.5718675,\n        -2.4156106,\n        -1.9933846,\n        -1.3972045,\n        -1.3527149,\n        -1.6452624,\n        -1.8692507,\n        -5.578517,\n        -2.2867987,\n        -2.0242627,\n        -2.273815,\n        -1.6365331,\n        -1.7262317,\n        -4.741056,\n        -5.9371395,\n        -1.6308647,\n        -3.7583308,\n        -3.0203693,\n        -1.697553,\n        -3.1482482,\n        -1.7286578,\n        -1.5627481,\n        -2.9239838,\n        -5.917072,\n        -1.7130485,\n        -1.6678938,\n        -1.4424074,\n        -1.5029242,\n        -1.4068965,\n        -2.7383819,\n        -3.7690282,\n        -1.0258008,\n        -4.613188,\n        -1.9295435,\n        -2.5107205,\n        -4.164117,\n        -2.1205091,\n        -2.5135174,\n        -1.7840471,\n        -2.2793093,\n        -1.6349511,\n        -1.6482064,\n        -1.0658326,\n        -2.3246038,\n        -2.2873054\n      ],\n      \"pointIndex\": [\n        0,\n        503,\n        229,\n        47003035,\n        91960546,\n        53637672,\n        28600765,\n        38427429,\n        40526672,\n        26033828,\n        115488365,\n        112303677,\n        49858696,\n        42759117,\n        49676271,\n        21699959,\n        64819169,\n        74063572,\n        88056669,\n        97891273,\n        108932556,\n        12206832,\n        37949326,\n        61758732,\n        43266547,\n        46161538,\n        49251921,\n        20045725,\n        16366498,\n        61588018,\n        31034449,\n        69264521,\n        74375881,\n        51199646,\n        86387221,\n        29430627,\n        95950547,\n        104463550,\n        109041831,\n        115801990,\n        72897011,\n        32290592,\n        16076766,\n        66841640,\n        40804401,\n        41674410,\n        17596219,\n        10670356,\n        18382075,\n        26510078,\n        48722314,\n        51585602,\n        53045444,\n        2025613,\n        21195801,\n        56912908,\n        61279871,\n        125705476,\n        23011045,\n        66571601,\n        75174384,\n        71934978,\n        11471,\n        62130061,\n        98582289,\n        100499662,\n        86025852,\n        88381278,\n        39302271,\n        57339052,\n        95444371,\n        101036885,\n        103842701,\n        106390806,\n        38130557,\n        112724572,\n        120471251,\n        124260657,\n        35299134,\n        503\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 229,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": 3134929515649909125\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -0.5317006,\n        -0.5549443,\n        -0.55005306,\n        -0.58047175,\n        -0.5792419,\n        -0.5618388,\n        -0.55156845,\n        -0.59663844,\n        -0.6457649,\n        -0.61152357,\n        -0.6713033,\n        -0.85364294,\n        -0.6575595,\n        -0.6192772,\n        -0.7174029,\n        -0.68995684,\n        -0.6509831,\n        -0.6804296,\n        -0.8823458,\n        -0.6229239,\n        -0.63748795,\n        -0.6725953,\n        -0.77007604,\n        -0.86289036,\n        -0.87573314,\n        -0.9368412,\n        -0.6730294,\n        -0.72140056,\n        -0.70452213,\n        -0.85281587,\n        -0.778473,\n        -0.80166095,\n        -0.8534091,\n        -0.97973895,\n        -0.72902834,\n        -1.0171428,\n        -0.7279368,\n        -0.8837121,\n        -0.9821047,\n        -0.81776035,\n        -0.71419483,\n        -0.87666404,\n        -0.6967305,\n        -0.686794,\n        -0.83897287,\n        -1.0782491,\n        -0.79766566,\n        -1.0869834,\n        -0.9266605,\n        -0.96625125,\n        -1.3254075,\n        -0.990544,\n        -0.95162743,\n        -0.68006086,\n        -0.94362134,\n        -0.91446745,\n        -0.76033175,\n        -0.82099503,\n        -1.98926,\n        -0.9402286,\n        -1.1041036,\n        -1.4840512,\n        -1.3193669,\n        -1.1383134,\n        -1.0716103,\n        -1.353175,\n        -1.2287694,\n        -1.0996752,\n        -1.3116969,\n        -0.8533949,\n        -0.78780127,\n        -1.2678065,\n        -1.3051043,\n        -0.892428,\n        -1.0663462,\n        -1.2590696,\n        -1.0059689,\n        -1.5622548,\n        -1.3429893,\n        -0.9912661,\n        -0.91426957,\n        -0.89937264,\n        -0.81522775,\n        -0.8956658,\n        -0.9297555,\n        -1.1325147,\n        -2.0525374,\n        -1.4441347,\n        -0.8413736,\n        -1.7260334,\n        -0.84645754,\n        -1.1753869,\n        -1.0894531,\n        -1.2714784,\n        -1.0054822,\n        -1.9432375,\n        -1.4703784,\n        -1.0627242,\n        -1.0848396,\n        -1.6313579,\n        -1.2841729,\n        -2.3314505,\n        -1.6599146,\n        -1.5145736,\n        -1.1292386,\n        -1.1420798,\n        -1.1882722,\n        -0.7650141,\n        -0.8330598,\n        -1.0279738,\n        -1.5063119,\n        -1.0402813,\n        -1.673066,\n        -0.855341,\n        -0.8344414,\n        -0.8772748,\n        -3.3096359,\n        -2.1580143,\n        -3.2406547,\n        -1.2543756,\n        -1.3132578,\n        -1.7253371,\n        -1.2502601,\n        -1.4913716,\n        -1.6956722,\n        -1.6008805,\n        -3.4310803,\n        -4.0316906,\n        -1.3139403,\n        -1.2990541,\n        -2.7985826,\n        -3.0076113,\n        -2.204242,\n        -2.1400445,\n        -1.8365418,\n        -1.5423521,\n        -2.2629874,\n        -1.9485801,\n        -2.8632088,\n        -1.4487959,\n        -1.7835891,\n        -1.3249984,\n        -0.8726291,\n        -6.6406846,\n        -2.2428896,\n        -3.2130282,\n        -1.9984459,\n        -2.1642005,\n        -2.2802176,\n        -2.3029919,\n        -1.7716421,\n        -1.2956859,\n        -2.674018,\n        -1.4789615,\n        -1.2592919,\n        -1.651695,\n        -2.7735283,\n        -2.754395,\n        -5.219823,\n        -1.8212689,\n        -1.5849502,\n        -1.5373638,\n        -2.1230855,\n        -2.006167,\n        -1.95454,\n        -1.5437725,\n        -3.7523682,\n        -1.6756934,\n        -1.1302894,\n        -2.3797457,\n        -1.5028995,\n        -1.2654449,\n        -3.4029913,\n        -2.4615457,\n        -2.1503222,\n        -2.4433656,\n        -3.428305,\n        -2.0373065,\n        -1.5127231,\n        -1.9826941,\n        -3.430179,\n        -2.1248314,\n        -1.0785834,\n        -1.3342285,\n        -3.162054,\n        -1.7133318,\n        -2.4264777,\n        -2.7424254,\n        -2.205474,\n        -2.1188297,\n        -1.0416778,\n        -3.8446963,\n        -3.152632,\n        -1.5539855,\n        -2.4188478,\n        -1.0664158,\n        -5.486928,\n        -1.2196403,\n        -1.6307973,\n        -2.1837428,\n        -4.4910727,\n        -1.627812,\n        -1.5182625,\n        -2.5001745,\n        -2.4735217,\n        -4.885121,\n        -2.1104615,\n        -1.761913,\n        -1.5156634,\n        -3.1595268,\n        -2.6720266,\n        -2.3196728,\n        -3.2704804,\n        -3.718801,\n        -1.2293067,\n        -1.98525,\n        -1.442857,\n        -2.399521,\n        -1.6830201,\n        -1.3301995,\n        -1.9729538,\n        -5.5065384,\n        -2.9774437,\n        -3.094193,\n        -1.7231919,\n        -2.0231786,\n        -1.9183023,\n        -1.1119088,\n        -1.0021992,\n        -1.0839311,\n        -1.6686866,\n        -2.8646417\n      ],\n      \"pointIndex\": [\n        1,\n        502,\n        232,\n        30125521,\n        104207869,\n        11501672,\n        117074017,\n        7291457,\n        63508587,\n        23048853,\n        117747004,\n        110069385,\n        49195970,\n        41410132,\n        1528660,\n        90821874,\n        10375556,\n        24571484,\n        119895067,\n        37777908,\n        102803209,\n        122503004,\n        84265154,\n        16301591,\n        74151424,\n        83856748,\n        24283422,\n        107239937,\n        52268400,\n        4644238,\n        60393661,\n        23482961,\n        65935747,\n        70951942,\n        73063505,\n        80420200,\n        28404641,\n        118557453,\n        30463815,\n        125460264,\n        121125580,\n        34340303,\n        86574979,\n        37513098,\n        24156390,\n        48742358,\n        77536476,\n        42673052,\n        118237420,\n        113489827,\n        46483872,\n        40120324,\n        21519222,\n        51163760,\n        22531470,\n        53478866,\n        55841307,\n        57247299,\n        60599982,\n        71139165,\n        113963557,\n        64030350,\n        111083161,\n        2400827,\n        10908098,\n        71356663,\n        105361320,\n        90624850,\n        124964176,\n        72753707,\n        92804731,\n        113839913,\n        97210899,\n        66840182,\n        103525693,\n        111846083,\n        108929408,\n        116239059,\n        69030844,\n        125742944,\n        27\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 232,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0E-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 504,\n      \"compressed\": true,\n      \"randomSeed\": -6734892575936596101\n    }\n  ],\n  \"compactRandomCutTreeStates\": [\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1035009826,\n          250599463,\n          603060643,\n          754644337,\n          891416226,\n          782146283,\n          378713454,\n          1056620465,\n          177986421,\n          928036085,\n          1071848943,\n          501577126,\n          898733163,\n          254781898,\n          53861341,\n          509206381,\n          333917169,\n          959170671,\n          437304017,\n          659733574,\n          389488302,\n          1021681109,\n          82354001,\n          762926541,\n          711943918,\n          735524973,\n          1005665057,\n          320935793,\n          1029885382,\n          573999574,\n          483525999,\n          72393826,\n          1050637815,\n          175233779,\n          1013619019,\n          69921914,\n          199951919,\n          23503,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          52,\n          -120,\n          38,\n          66,\n          -105,\n          91,\n          -66,\n          66,\n          -119,\n          -123,\n          -128,\n          66,\n          -112,\n          -25,\n          76,\n          64,\n          -49,\n          79,\n          79,\n          66,\n          -93,\n          20,\n          -11,\n          66,\n          90,\n          87,\n          -118,\n          66,\n          -83,\n          119,\n          41,\n          66,\n          -82,\n          -112,\n          -109,\n          66,\n          68,\n          -88,\n          -34,\n          66,\n          -61,\n          -42,\n          101,\n          66,\n          113,\n          3,\n          -70,\n          66,\n          127,\n          51,\n          -88,\n          66,\n          -104,\n          -22,\n          -59,\n          66,\n          -82,\n          -85,\n          -15,\n          66,\n          123,\n          -105,\n          34,\n          66,\n          115,\n          52,\n          98,\n          66,\n          -116,\n          -128,\n          -82,\n          66,\n          -95,\n          49,\n          -125,\n          65,\n          -73,\n          19,\n          -122,\n          66,\n          -113,\n          -121,\n          34,\n          66,\n          -110,\n          8,\n          1,\n          66,\n          101,\n          25,\n          -6,\n          66,\n          -88,\n          7,\n          -59,\n          66,\n          -77,\n          2,\n          27,\n          66,\n          -109,\n          -34,\n          54,\n          66,\n          95,\n          -7,\n          71,\n          66,\n          -119,\n          30,\n          41,\n          66,\n          -74,\n          -53,\n          -123,\n          66,\n          -63,\n          102,\n          -15,\n          66,\n          84,\n          105,\n          70,\n          65,\n          4,\n          -25,\n          -22,\n          66,\n          -65,\n          -5,\n          -112,\n          66,\n          -116,\n          -47,\n          -119,\n          66,\n          -108,\n          9,\n          -92,\n          66,\n          91,\n          -44,\n          88,\n          66,\n          -65,\n          -21,\n          19,\n          66,\n          67,\n          38,\n          75,\n          66,\n          -114,\n          -113,\n          20,\n          65,\n          -38,\n          72,\n          72,\n          66,\n          -109,\n          3,\n          -88,\n          66,\n          86,\n          -27,\n          -95,\n          66,\n          -111,\n          -60,\n          -113,\n          66,\n          122,\n          -1,\n          -112,\n          66,\n          96,\n          -8,\n          33,\n          66,\n          -109,\n          -88,\n          -122,\n          66,\n          97,\n          -14,\n          89,\n          66,\n          115,\n          44,\n          65,\n          66,\n          -122,\n          -114,\n          -71,\n          66,\n          76,\n          -51,\n          3,\n          66,\n          -78,\n          -88,\n          -93,\n          66,\n          90,\n          31,\n          -105,\n          66,\n          -104,\n          -74,\n          37,\n          66,\n          -115,\n          -96,\n          73,\n          66,\n          -113,\n          111,\n          24,\n          66,\n          -126,\n          -58,\n          61,\n          66,\n          117,\n          -80,\n          27,\n          65,\n          -38,\n          125,\n          52,\n          66,\n          -99,\n          117,\n          47,\n          66,\n          81,\n          49,\n          -33,\n          66,\n          93,\n          -100,\n          75,\n          66,\n          110,\n          62,\n          -54,\n          66,\n          82,\n          -51,\n          91,\n          66,\n          -71,\n          -93,\n          -22,\n          66,\n          -64,\n          51,\n          0,\n          66,\n          75,\n          78,\n          -62,\n          66,\n          -73,\n          64,\n          84,\n          66,\n          -95,\n          -53,\n          -83,\n          66,\n          -92,\n          53,\n          63,\n          66,\n          -65,\n          -15,\n          103,\n          66,\n          -76,\n          -88,\n          -113,\n          66,\n          -76,\n          -128,\n          -28,\n          66,\n          114,\n          113,\n          117,\n          66,\n          74,\n          68,\n          88,\n          66,\n          -111,\n          -73,\n          -124,\n          66,\n          72,\n          -3,\n          -80,\n          66,\n          -98,\n          -117,\n          86,\n          66,\n          -64,\n          80,\n          60,\n          66,\n          -95,\n          50,\n          121,\n          66,\n          -61,\n          -75,\n          -94,\n          66,\n          -78,\n          -20,\n          96,\n          66,\n          83,\n          -82,\n          -84,\n          66,\n          -70,\n          12,\n          -59,\n          66,\n          81,\n          -3,\n          56,\n          66,\n          -112,\n          22,\n          94,\n          66,\n          -64,\n          100,\n          -27,\n          66,\n          -83,\n          -17,\n          -68,\n          66,\n          116,\n          1,\n          44,\n          65,\n          -97,\n          93,\n          -34,\n          66,\n          -113,\n          -63,\n          -95,\n          66,\n          -116,\n          71,\n          -23,\n          66,\n          105,\n          -119,\n          -108,\n          66,\n          78,\n          -52,\n          122,\n          66,\n          75,\n          89,\n          69,\n          66,\n          -106,\n          88,\n          54,\n          66,\n          88,\n          81,\n          82,\n          65,\n          -15,\n          9,\n          -44,\n          65,\n          -2,\n          -118,\n          -32,\n          66,\n          -62,\n          71,\n          -79,\n          66,\n          -64,\n          84,\n          99,\n          66,\n          -62,\n          -72,\n          -101,\n          66,\n          -115,\n          118,\n          69,\n          66,\n          78,\n          93,\n          -70,\n          66,\n          80,\n          69,\n          60,\n          66,\n          -76,\n          49,\n          22,\n          66,\n          84,\n          -25,\n          48,\n          66,\n          -63,\n          104,\n          -70,\n          64,\n          -15,\n          116,\n          -28,\n          66,\n          -117,\n          21,\n          34,\n          66,\n          -60,\n          -12,\n          -7,\n          66,\n          -59,\n          -122,\n          96,\n          66,\n          -119,\n          111,\n          81,\n          66,\n          51,\n          106,\n          -48,\n          66,\n          -113,\n          -1,\n          19,\n          66,\n          -74,\n          -13,\n          -8,\n          66,\n          -63,\n          7,\n          -32,\n          66,\n          104,\n          85,\n          83,\n          66,\n          -110,\n          -25,\n          12,\n          66,\n          -126,\n          4,\n          25,\n          66,\n          86,\n          121,\n          109,\n          66,\n          -92,\n          47,\n          -114,\n          66,\n          -112,\n          -55,\n          4,\n          66,\n          127,\n          59,\n          -65,\n          66,\n          -62,\n          18,\n          80,\n          66,\n          76,\n          -59,\n          108,\n          66,\n          71,\n          -104,\n          95,\n          66,\n          -95,\n          -67,\n          97,\n          66,\n          -93,\n          -126,\n          -59,\n          66,\n          70,\n          -2,\n          -29,\n          66,\n          105,\n          -76,\n          11,\n          66,\n          -68,\n          -1,\n          108,\n          66,\n          -72,\n          62,\n          11,\n          66,\n          -103,\n          -56,\n          -72,\n          66,\n          -80,\n          67,\n          -43,\n          66,\n          99,\n          -36,\n          44,\n          66,\n          -118,\n          120,\n          -3,\n          66,\n          -85,\n          -97,\n          39,\n          66,\n          -79,\n          120,\n          -98,\n          66,\n          -99,\n          8,\n          16,\n          66,\n          -71,\n          -124,\n          -94,\n          66,\n          -106,\n          19,\n          -75,\n          66,\n          -71,\n          24,\n          -39,\n          66,\n          79,\n          43,\n          22,\n          66,\n          -61,\n          -84,\n          109,\n          66,\n          -88,\n          -46,\n          -86,\n          66,\n          72,\n          31,\n          61,\n          66,\n          -117,\n          23,\n          -121,\n          66,\n          -78,\n          -66,\n          1,\n          66,\n          -78,\n          -65,\n          16,\n          66,\n          -128,\n          -87,\n          127,\n          66,\n          -117,\n          -128,\n          6,\n          66,\n          71,\n          36,\n          67,\n          66,\n          -62,\n          -42,\n          -51,\n          66,\n          -71,\n          -104,\n          105,\n          66,\n          -112,\n          106,\n          -34,\n          66,\n          -113,\n          63,\n          -111,\n          66,\n          -120,\n          -69,\n          32,\n          66,\n          -124,\n          -64,\n          122,\n          66,\n          -60,\n          -43,\n          -95,\n          66,\n          -65,\n          -117,\n          -34,\n          66,\n          120,\n          -80,\n          62,\n          66,\n          -101,\n          37,\n          105,\n          66,\n          -103,\n          77,\n          -81,\n          66,\n          79,\n          -72,\n          -70,\n          66,\n          -113,\n          111,\n          -111,\n          66,\n          -66,\n          -128,\n          -1,\n          66,\n          -60,\n          -67,\n          59,\n          66,\n          -90,\n          126,\n          -15,\n          66,\n          -62,\n          -91,\n          109,\n          66,\n          -76,\n          -125,\n          80,\n          66,\n          73,\n          -106,\n          113,\n          66,\n          -96,\n          -79,\n          106,\n          66,\n          -73,\n          -29,\n          -110,\n          66,\n          -63,\n          -61,\n          63,\n          66,\n          -61,\n          7,\n          -61,\n          66,\n          -66,\n          94,\n          -105,\n          66,\n          -65,\n          118,\n          -110,\n          66,\n          -112,\n          43,\n          -74,\n          66,\n          84,\n          84,\n          -96,\n          66,\n          -101,\n          -54,\n          -110,\n          66,\n          88,\n          70,\n          -55,\n          66,\n          71,\n          -112,\n          54,\n          66,\n          -103,\n          -71,\n          -87,\n          64,\n          -34,\n          -46,\n          44,\n          66,\n          -114,\n          -1,\n          115,\n          66,\n          -69,\n          42,\n          85,\n          66,\n          -77,\n          -11,\n          -128,\n          66,\n          78,\n          20,\n          -12,\n          66,\n          -97,\n          18,\n          -41,\n          66,\n          -98,\n          24,\n          63,\n          66,\n          -99,\n          -108,\n          59,\n          66,\n          -69,\n          -109,\n          39,\n          66,\n          123,\n          20,\n          44,\n          66,\n          71,\n          107,\n          -23,\n          66,\n          -102,\n          -54,\n          -55,\n          66,\n          -59,\n          -1,\n          59,\n          66,\n          -106,\n          117,\n          62,\n          66,\n          89,\n          -75,\n          55,\n          66,\n          118,\n          -44,\n          66,\n          66,\n          82,\n          -77,\n          110,\n          66,\n          -64,\n          46,\n          -10,\n          66,\n          103,\n          -84,\n          -54,\n          66,\n          89,\n          67,\n          -103,\n          66,\n          -92,\n          -70,\n          -61,\n          66,\n          80,\n          -87,\n          -57,\n          66,\n          -66,\n          -49,\n          -88,\n          66,\n          -78,\n          -121,\n          -13,\n          66,\n          -108,\n          -9,\n          88,\n          66,\n          -64,\n          -81,\n          -11,\n          66,\n          -67,\n          26,\n          6,\n          66,\n          -67,\n          34,\n          -62,\n          66,\n          77,\n          -43,\n          122,\n          66,\n          -128,\n          8,\n          28,\n          66,\n          -113,\n          1,\n          70,\n          66,\n          -70,\n          67,\n          48,\n          66,\n          -61,\n          66,\n          125,\n          66,\n          85,\n          -105,\n          -82,\n          66,\n          -90,\n          65,\n          44,\n          66,\n          -108,\n          -28,\n          87,\n          66,\n          -66,\n          55,\n          -74,\n          66,\n          -58,\n          39,\n          107,\n          66,\n          -102,\n          79,\n          -124,\n          66,\n          86,\n          -119,\n          -25,\n          66,\n          -69,\n          37,\n          -54,\n          66,\n          -71,\n          5,\n          91,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 225,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162254824,\n          712052113,\n          754938941,\n          595716196,\n          726452833,\n          1028151670,\n          729943241,\n          644983369,\n          983697169,\n          1017110264,\n          725394985,\n          23845990,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          731785498,\n          1098282959,\n          1013783822,\n          1145606648,\n          582793942,\n          625851574,\n          731233652,\n          644972300,\n          1098282118,\n          597311243,\n          1099823740,\n          21523369,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 29,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 29,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -4554398593646845892,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          120031194,\n          258812407,\n          464848054,\n          333552074,\n          753138641,\n          900978230,\n          595176269,\n          364882913,\n          899482826,\n          514410871,\n          376147622,\n          200264779,\n          497241677,\n          992958069,\n          536472545,\n          719968494,\n          378748489,\n          448611453,\n          757247405,\n          504404769,\n          249024510,\n          902268867,\n          849593554,\n          304122367,\n          188320945,\n          305055057,\n          309976559,\n          913659554,\n          634459733,\n          454921387,\n          737840465,\n          708523681,\n          637470893,\n          623547595,\n          458720634,\n          644593638,\n          709922266,\n          1022,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          18,\n          9,\n          -30,\n          68,\n          -53,\n          97,\n          81,\n          66,\n          -128,\n          -75,\n          -115,\n          66,\n          105,\n          -14,\n          88,\n          69,\n          4,\n          108,\n          -55,\n          66,\n          9,\n          -56,\n          46,\n          66,\n          49,\n          97,\n          -113,\n          66,\n          94,\n          -27,\n          115,\n          66,\n          -73,\n          111,\n          53,\n          66,\n          -99,\n          115,\n          -29,\n          66,\n          -102,\n          89,\n          120,\n          66,\n          118,\n          -22,\n          15,\n          66,\n          -60,\n          -63,\n          -58,\n          66,\n          -124,\n          -30,\n          42,\n          66,\n          126,\n          -62,\n          106,\n          66,\n          -87,\n          -3,\n          -127,\n          66,\n          114,\n          -96,\n          -70,\n          66,\n          -122,\n          -12,\n          -71,\n          66,\n          -75,\n          100,\n          6,\n          66,\n          -100,\n          -88,\n          -63,\n          66,\n          -76,\n          117,\n          5,\n          66,\n          77,\n          83,\n          64,\n          66,\n          -68,\n          -78,\n          4,\n          66,\n          -126,\n          -56,\n          79,\n          66,\n          -93,\n          45,\n          -50,\n          66,\n          -71,\n          75,\n          29,\n          66,\n          -80,\n          23,\n          -71,\n          66,\n          17,\n          -39,\n          -54,\n          66,\n          -61,\n          -19,\n          -116,\n          66,\n          -67,\n          39,\n          -45,\n          66,\n          -73,\n          -51,\n          36,\n          66,\n          -112,\n          -57,\n          -127,\n          66,\n          -119,\n          100,\n          81,\n          66,\n          76,\n          23,\n          -49,\n          66,\n          70,\n          -100,\n          27,\n          66,\n          -79,\n          -17,\n          38,\n          66,\n          -115,\n          -45,\n          -83,\n          66,\n          -87,\n          72,\n          45,\n          66,\n          -61,\n          104,\n          -89,\n          66,\n          122,\n          0,\n          -93,\n          66,\n          74,\n          36,\n          -66,\n          66,\n          -114,\n          57,\n          21,\n          66,\n          -96,\n          112,\n          62,\n          66,\n          95,\n          120,\n          -104,\n          66,\n          -72,\n          -53,\n          -112,\n          66,\n          95,\n          22,\n          -127,\n          66,\n          96,\n          102,\n          -77,\n          66,\n          -78,\n          -76,\n          80,\n          66,\n          -61,\n          72,\n          48,\n          66,\n          -79,\n          28,\n          37,\n          66,\n          -102,\n          -112,\n          52,\n          66,\n          -68,\n          -10,\n          -114,\n          66,\n          -109,\n          -12,\n          70,\n          66,\n          -83,\n          -12,\n          -13,\n          66,\n          70,\n          -9,\n          -42,\n          66,\n          126,\n          -66,\n          52,\n          66,\n          -84,\n          -79,\n          49,\n          66,\n          -76,\n          20,\n          60,\n          66,\n          -77,\n          114,\n          51,\n          65,\n          -51,\n          -43,\n          -90,\n          66,\n          -70,\n          41,\n          51,\n          66,\n          -93,\n          67,\n          -16,\n          66,\n          115,\n          -11,\n          -17,\n          66,\n          78,\n          -46,\n          -34,\n          66,\n          -88,\n          -69,\n          -103,\n          66,\n          68,\n          75,\n          -82,\n          66,\n          101,\n          104,\n          -73,\n          66,\n          -63,\n          113,\n          0,\n          66,\n          119,\n          125,\n          -29,\n          65,\n          -43,\n          -85,\n          46,\n          66,\n          -59,\n          116,\n          66,\n          66,\n          -99,\n          41,\n          -87,\n          66,\n          -120,\n          -36,\n          -119,\n          66,\n          -82,\n          58,\n          99,\n          66,\n          96,\n          83,\n          97,\n          66,\n          -81,\n          -71,\n          17,\n          66,\n          -83,\n          99,\n          65,\n          66,\n          -64,\n          -78,\n          9,\n          66,\n          -94,\n          11,\n          -80,\n          66,\n          73,\n          44,\n          -111,\n          66,\n          -109,\n          58,\n          52,\n          66,\n          -66,\n          -62,\n          -119,\n          66,\n          -81,\n          101,\n          5,\n          66,\n          -97,\n          -12,\n          -43,\n          66,\n          -107,\n          20,\n          -13,\n          66,\n          82,\n          -102,\n          22,\n          66,\n          -68,\n          -53,\n          102,\n          66,\n          94,\n          39,\n          28,\n          66,\n          107,\n          -47,\n          31,\n          66,\n          72,\n          -93,\n          89,\n          66,\n          -72,\n          91,\n          -62,\n          66,\n          103,\n          -74,\n          53,\n          66,\n          -75,\n          86,\n          -126,\n          66,\n          113,\n          -103,\n          -50,\n          66,\n          -58,\n          64,\n          -36,\n          66,\n          -114,\n          -58,\n          77,\n          66,\n          -87,\n          -15,\n          -119,\n          66,\n          -60,\n          -79,\n          50,\n          66,\n          71,\n          38,\n          -15,\n          66,\n          -69,\n          -26,\n          65,\n          66,\n          -107,\n          -54,\n          69,\n          66,\n          81,\n          66,\n          23,\n          66,\n          -113,\n          -114,\n          24,\n          66,\n          72,\n          -5,\n          3,\n          66,\n          -96,\n          114,\n          -63,\n          66,\n          -60,\n          -121,\n          -41,\n          66,\n          69,\n          -109,\n          -21,\n          66,\n          -118,\n          50,\n          -94,\n          66,\n          -102,\n          -120,\n          -56,\n          66,\n          -104,\n          -10,\n          -95,\n          66,\n          -110,\n          106,\n          -125,\n          66,\n          -100,\n          68,\n          44,\n          66,\n          -65,\n          42,\n          16,\n          66,\n          -74,\n          11,\n          -76,\n          66,\n          -108,\n          -11,\n          -128,\n          66,\n          -106,\n          42,\n          -48,\n          66,\n          -83,\n          -49,\n          -4,\n          66,\n          -97,\n          -120,\n          3,\n          66,\n          -93,\n          -52,\n          57,\n          66,\n          80,\n          35,\n          1,\n          66,\n          -70,\n          -94,\n          -71,\n          66,\n          83,\n          -104,\n          -20,\n          66,\n          88,\n          -112,\n          -57,\n          66,\n          75,\n          -113,\n          56,\n          66,\n          -112,\n          47,\n          20,\n          66,\n          -125,\n          45,\n          -116,\n          66,\n          77,\n          37,\n          -9,\n          66,\n          -82,\n          55,\n          32,\n          66,\n          -125,\n          94,\n          86,\n          66,\n          80,\n          -31,\n          -76,\n          63,\n          -89,\n          29,\n          -121,\n          66,\n          -66,\n          -81,\n          -71,\n          66,\n          -68,\n          36,\n          30,\n          66,\n          -60,\n          -25,\n          25,\n          66,\n          -102,\n          -120,\n          111,\n          66,\n          70,\n          79,\n          85,\n          66,\n          -68,\n          95,\n          -68,\n          66,\n          -99,\n          28,\n          115,\n          66,\n          83,\n          -78,\n          106,\n          66,\n          122,\n          13,\n          -37,\n          66,\n          -62,\n          -7,\n          109,\n          66,\n          -89,\n          -79,\n          126,\n          66,\n          -80,\n          -109,\n          -31,\n          66,\n          -101,\n          -90,\n          13,\n          66,\n          -96,\n          -117,\n          87,\n          66,\n          -112,\n          125,\n          100,\n          66,\n          83,\n          127,\n          100,\n          66,\n          71,\n          94,\n          94,\n          66,\n          99,\n          -55,\n          43,\n          66,\n          -113,\n          -21,\n          50,\n          66,\n          -128,\n          125,\n          67,\n          66,\n          -82,\n          56,\n          86,\n          66,\n          -105,\n          35,\n          -38,\n          66,\n          -103,\n          -34,\n          93,\n          66,\n          -70,\n          30,\n          76,\n          66,\n          -101,\n          -104,\n          47,\n          66,\n          69,\n          -122,\n          -20,\n          66,\n          112,\n          85,\n          -50,\n          66,\n          90,\n          -15,\n          -41,\n          66,\n          94,\n          -91,\n          124,\n          66,\n          -128,\n          14,\n          -107,\n          66,\n          -105,\n          -24,\n          -7,\n          66,\n          -60,\n          -115,\n          -6,\n          66,\n          -114,\n          -14,\n          -44,\n          66,\n          -111,\n          92,\n          -47,\n          66,\n          -81,\n          28,\n          97,\n          66,\n          95,\n          -71,\n          55,\n          66,\n          76,\n          88,\n          100,\n          66,\n          -116,\n          -126,\n          -79,\n          66,\n          -86,\n          79,\n          -113,\n          66,\n          -101,\n          -6,\n          -51,\n          66,\n          -69,\n          72,\n          28,\n          66,\n          -100,\n          122,\n          -2,\n          66,\n          -78,\n          33,\n          -42,\n          66,\n          88,\n          9,\n          11,\n          66,\n          -108,\n          -51,\n          48,\n          66,\n          107,\n          121,\n          -18,\n          66,\n          85,\n          5,\n          -18,\n          66,\n          -103,\n          -98,\n          65,\n          66,\n          -112,\n          81,\n          -63,\n          66,\n          -105,\n          93,\n          -78,\n          66,\n          -63,\n          58,\n          -70,\n          66,\n          87,\n          -20,\n          59,\n          66,\n          -98,\n          -110,\n          48,\n          66,\n          75,\n          53,\n          20,\n          66,\n          -102,\n          18,\n          -113,\n          66,\n          -108,\n          92,\n          91,\n          66,\n          -105,\n          -117,\n          -11,\n          66,\n          -106,\n          -126,\n          -105,\n          66,\n          -82,\n          -125,\n          98,\n          66,\n          101,\n          -103,\n          -123,\n          66,\n          -108,\n          -95,\n          121,\n          66,\n          -98,\n          27,\n          -125,\n          66,\n          -105,\n          -39,\n          -79,\n          66,\n          -59,\n          -24,\n          -38,\n          66,\n          -70,\n          -37,\n          -125,\n          66,\n          100,\n          49,\n          57,\n          66,\n          -71,\n          -54,\n          -53,\n          66,\n          -127,\n          111,\n          -72,\n          66,\n          -60,\n          -7,\n          127,\n          66,\n          -108,\n          -89,\n          80,\n          66,\n          -105,\n          27,\n          -65,\n          66,\n          -62,\n          -4,\n          -89,\n          66,\n          -70,\n          -98,\n          -64,\n          66,\n          -73,\n          -30,\n          50,\n          66,\n          109,\n          -98,\n          100,\n          66,\n          -113,\n          17,\n          68,\n          66,\n          73,\n          -120,\n          26,\n          66,\n          -100,\n          18,\n          25,\n          66,\n          -97,\n          5,\n          -35,\n          66,\n          -62,\n          -4,\n          -102,\n          66,\n          91,\n          -111,\n          -117,\n          66,\n          -115,\n          74,\n          36,\n          66,\n          79,\n          27,\n          65,\n          66,\n          -73,\n          -125,\n          -122,\n          66,\n          79,\n          -61,\n          99,\n          66,\n          -66,\n          72,\n          -89,\n          66,\n          -60,\n          -119,\n          95,\n          66,\n          75,\n          72,\n          104,\n          66,\n          -125,\n          18,\n          -8,\n          66,\n          -98,\n          -116,\n          -82,\n          66,\n          -112,\n          102,\n          -6,\n          66,\n          -66,\n          -36,\n          -66,\n          66,\n          68,\n          -6,\n          6,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 224,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1028080268,\n          1117081687,\n          975190958,\n          716904877,\n          774831176,\n          639322549,\n          638555764,\n          595666588,\n          969288592,\n          1012189408,\n          989809142,\n          7201519,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1027607764,\n          1161729044,\n          1032527560,\n          1098284182,\n          645160292,\n          1012395092,\n          716716960,\n          726772144,\n          1114345717,\n          625838153,\n          588041896,\n          8827861,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 30,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 30,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 2247190326101601255,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          668518854,\n          401940207,\n          640755434,\n          937474889,\n          261731785,\n          975152181,\n          514923243,\n          334452697,\n          653561195,\n          708155091,\n          525945414,\n          573290346,\n          782200699,\n          933399350,\n          438040617,\n          1066984925,\n          1071890361,\n          1029289941,\n          173397579,\n          192211567,\n          94032977,\n          313317207,\n          359728693,\n          866854081,\n          447974187,\n          842047015,\n          191737714,\n          644083570,\n          1002012399,\n          899747637,\n          730906451,\n          189980403,\n          258315510,\n          738027979,\n          93416667,\n          376759465,\n          1010,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -76,\n          97,\n          71,\n          68,\n          -52,\n          -40,\n          92,\n          69,\n          15,\n          -33,\n          86,\n          66,\n          -100,\n          -128,\n          -32,\n          66,\n          -95,\n          46,\n          -85,\n          66,\n          74,\n          27,\n          -34,\n          66,\n          101,\n          90,\n          120,\n          66,\n          116,\n          110,\n          119,\n          66,\n          102,\n          118,\n          -25,\n          66,\n          -90,\n          4,\n          -87,\n          65,\n          67,\n          8,\n          -51,\n          66,\n          106,\n          -59,\n          -118,\n          66,\n          -64,\n          50,\n          94,\n          66,\n          81,\n          125,\n          68,\n          66,\n          -104,\n          64,\n          114,\n          66,\n          -84,\n          6,\n          -74,\n          64,\n          1,\n          43,\n          23,\n          66,\n          114,\n          4,\n          4,\n          66,\n          -103,\n          47,\n          84,\n          66,\n          -66,\n          38,\n          -63,\n          66,\n          -68,\n          -32,\n          80,\n          66,\n          54,\n          -121,\n          124,\n          66,\n          -66,\n          109,\n          82,\n          66,\n          78,\n          -125,\n          46,\n          66,\n          -96,\n          -74,\n          62,\n          66,\n          -72,\n          -57,\n          -45,\n          66,\n          -109,\n          116,\n          -83,\n          66,\n          54,\n          93,\n          -27,\n          66,\n          -108,\n          -115,\n          -89,\n          66,\n          110,\n          105,\n          12,\n          66,\n          -104,\n          124,\n          102,\n          66,\n          -97,\n          97,\n          -116,\n          66,\n          -108,\n          104,\n          -18,\n          66,\n          -128,\n          15,\n          -103,\n          66,\n          -121,\n          93,\n          38,\n          66,\n          122,\n          -34,\n          -61,\n          66,\n          77,\n          77,\n          107,\n          66,\n          87,\n          -114,\n          -7,\n          66,\n          -96,\n          24,\n          28,\n          66,\n          -82,\n          -17,\n          66,\n          66,\n          100,\n          -79,\n          -24,\n          66,\n          83,\n          -93,\n          34,\n          66,\n          -113,\n          27,\n          -37,\n          66,\n          -61,\n          67,\n          0,\n          66,\n          -103,\n          62,\n          17,\n          66,\n          -61,\n          -37,\n          90,\n          66,\n          -69,\n          109,\n          73,\n          66,\n          -103,\n          -12,\n          -24,\n          66,\n          23,\n          44,\n          32,\n          66,\n          79,\n          -79,\n          26,\n          66,\n          97,\n          114,\n          7,\n          66,\n          -95,\n          -128,\n          -39,\n          66,\n          95,\n          -75,\n          -94,\n          66,\n          119,\n          -16,\n          -38,\n          66,\n          -127,\n          -13,\n          18,\n          66,\n          -64,\n          -80,\n          -2,\n          66,\n          -106,\n          118,\n          -65,\n          66,\n          119,\n          4,\n          9,\n          66,\n          -123,\n          -34,\n          97,\n          66,\n          -119,\n          40,\n          103,\n          66,\n          -74,\n          74,\n          -98,\n          66,\n          -76,\n          127,\n          86,\n          66,\n          -69,\n          44,\n          9,\n          66,\n          -75,\n          85,\n          59,\n          66,\n          -98,\n          -29,\n          -1,\n          66,\n          110,\n          46,\n          17,\n          66,\n          -64,\n          11,\n          27,\n          66,\n          106,\n          59,\n          -41,\n          66,\n          -108,\n          86,\n          -11,\n          66,\n          80,\n          80,\n          17,\n          66,\n          -75,\n          120,\n          48,\n          66,\n          -111,\n          -72,\n          52,\n          66,\n          79,\n          12,\n          -109,\n          66,\n          108,\n          118,\n          30,\n          66,\n          85,\n          -13,\n          -12,\n          66,\n          -64,\n          77,\n          -58,\n          66,\n          -118,\n          -38,\n          -84,\n          66,\n          71,\n          33,\n          -27,\n          66,\n          -72,\n          -30,\n          -87,\n          66,\n          -89,\n          79,\n          -39,\n          66,\n          -81,\n          -69,\n          -47,\n          66,\n          -120,\n          -127,\n          -89,\n          66,\n          -83,\n          -100,\n          -52,\n          66,\n          95,\n          -115,\n          -36,\n          66,\n          -101,\n          84,\n          -122,\n          66,\n          -108,\n          67,\n          100,\n          66,\n          -92,\n          -68,\n          51,\n          66,\n          103,\n          -3,\n          110,\n          66,\n          -127,\n          -67,\n          104,\n          66,\n          -123,\n          -100,\n          -69,\n          66,\n          -123,\n          123,\n          -84,\n          66,\n          -83,\n          4,\n          28,\n          66,\n          -107,\n          44,\n          -71,\n          66,\n          -116,\n          -60,\n          5,\n          66,\n          -113,\n          119,\n          87,\n          66,\n          102,\n          91,\n          -9,\n          66,\n          -95,\n          8,\n          -96,\n          66,\n          -114,\n          46,\n          2,\n          66,\n          113,\n          56,\n          -125,\n          66,\n          88,\n          3,\n          49,\n          66,\n          -76,\n          -35,\n          16,\n          66,\n          91,\n          -41,\n          -19,\n          66,\n          -118,\n          17,\n          -41,\n          66,\n          -67,\n          43,\n          -68,\n          66,\n          -112,\n          -128,\n          -13,\n          66,\n          75,\n          -62,\n          4,\n          66,\n          -109,\n          -3,\n          -74,\n          66,\n          -92,\n          -8,\n          -123,\n          66,\n          -126,\n          7,\n          -79,\n          66,\n          -83,\n          -9,\n          -85,\n          66,\n          -103,\n          22,\n          105,\n          66,\n          103,\n          -26,\n          22,\n          66,\n          -117,\n          117,\n          118,\n          66,\n          -97,\n          -41,\n          -127,\n          66,\n          75,\n          -70,\n          -74,\n          66,\n          86,\n          -31,\n          97,\n          66,\n          -67,\n          -84,\n          85,\n          66,\n          -107,\n          -105,\n          94,\n          66,\n          92,\n          -48,\n          28,\n          66,\n          -121,\n          9,\n          -78,\n          66,\n          -126,\n          91,\n          63,\n          66,\n          -65,\n          -122,\n          -68,\n          66,\n          -115,\n          -95,\n          -73,\n          66,\n          -113,\n          48,\n          -93,\n          66,\n          -100,\n          -78,\n          101,\n          66,\n          -68,\n          116,\n          -46,\n          66,\n          77,\n          -41,\n          -58,\n          66,\n          -81,\n          -51,\n          44,\n          66,\n          -125,\n          -31,\n          -81,\n          66,\n          -91,\n          84,\n          -99,\n          66,\n          -77,\n          -108,\n          -34,\n          66,\n          -110,\n          19,\n          81,\n          66,\n          -98,\n          -123,\n          104,\n          66,\n          -111,\n          119,\n          44,\n          66,\n          -108,\n          -87,\n          3,\n          66,\n          -66,\n          124,\n          -116,\n          66,\n          80,\n          48,\n          28,\n          66,\n          -67,\n          88,\n          0,\n          66,\n          -105,\n          100,\n          46,\n          66,\n          -68,\n          119,\n          57,\n          66,\n          -105,\n          -113,\n          -67,\n          66,\n          -62,\n          109,\n          50,\n          66,\n          -85,\n          61,\n          87,\n          66,\n          -118,\n          78,\n          -76,\n          66,\n          70,\n          -110,\n          23,\n          66,\n          -103,\n          105,\n          20,\n          66,\n          -62,\n          -114,\n          28,\n          66,\n          102,\n          -20,\n          127,\n          66,\n          102,\n          -120,\n          -124,\n          66,\n          -107,\n          109,\n          -120,\n          66,\n          80,\n          4,\n          39,\n          66,\n          -115,\n          -110,\n          -59,\n          66,\n          100,\n          5,\n          -3,\n          66,\n          -94,\n          -123,\n          -72,\n          66,\n          74,\n          31,\n          50,\n          66,\n          -121,\n          -35,\n          117,\n          66,\n          -67,\n          118,\n          -112,\n          66,\n          79,\n          -119,\n          -56,\n          66,\n          96,\n          64,\n          -117,\n          66,\n          74,\n          124,\n          44,\n          66,\n          -64,\n          -35,\n          -81,\n          66,\n          -100,\n          -16,\n          71,\n          66,\n          -60,\n          8,\n          -82,\n          66,\n          76,\n          -33,\n          50,\n          66,\n          119,\n          -75,\n          -64,\n          66,\n          100,\n          96,\n          38,\n          66,\n          -84,\n          46,\n          32,\n          66,\n          81,\n          -34,\n          57,\n          66,\n          91,\n          -20,\n          65,\n          66,\n          80,\n          28,\n          107,\n          66,\n          91,\n          -107,\n          -87,\n          66,\n          -67,\n          104,\n          15,\n          66,\n          80,\n          -28,\n          70,\n          66,\n          -106,\n          2,\n          -63,\n          66,\n          -97,\n          45,\n          108,\n          66,\n          -112,\n          123,\n          -47,\n          66,\n          -127,\n          122,\n          -64,\n          66,\n          -67,\n          28,\n          94,\n          66,\n          -68,\n          109,\n          127,\n          66,\n          -57,\n          103,\n          -33,\n          66,\n          71,\n          69,\n          1,\n          66,\n          -66,\n          30,\n          -77,\n          66,\n          73,\n          122,\n          92,\n          66,\n          -107,\n          -105,\n          34,\n          66,\n          -99,\n          -3,\n          27,\n          66,\n          -119,\n          -83,\n          34,\n          66,\n          94,\n          63,\n          -47,\n          66,\n          119,\n          -68,\n          -2,\n          66,\n          78,\n          0,\n          7,\n          66,\n          -110,\n          -6,\n          76,\n          66,\n          -108,\n          -12,\n          12,\n          66,\n          -104,\n          127,\n          -16,\n          66,\n          -77,\n          0,\n          -37,\n          66,\n          127,\n          31,\n          32,\n          66,\n          -110,\n          56,\n          16,\n          66,\n          94,\n          -43,\n          87,\n          66,\n          -64,\n          24,\n          -51,\n          66,\n          71,\n          32,\n          -44,\n          66,\n          91,\n          118,\n          0,\n          66,\n          -84,\n          105,\n          33,\n          66,\n          -61,\n          121,\n          -74,\n          66,\n          -65,\n          60,\n          -122,\n          66,\n          89,\n          117,\n          -79,\n          66,\n          -113,\n          7,\n          10,\n          66,\n          125,\n          -97,\n          89,\n          66,\n          -69,\n          -107,\n          92,\n          66,\n          86,\n          14,\n          121,\n          66,\n          -63,\n          104,\n          96,\n          66,\n          -117,\n          -123,\n          -7,\n          66,\n          -75,\n          -117,\n          33,\n          66,\n          -103,\n          78,\n          64,\n          66,\n          -93,\n          -80,\n          118,\n          66,\n          -105,\n          55,\n          30,\n          66,\n          -98,\n          48,\n          105,\n          66,\n          90,\n          -44,\n          -13,\n          66,\n          82,\n          -67,\n          47,\n          66,\n          -60,\n          -9,\n          -36,\n          66,\n          90,\n          -53,\n          17,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 218,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          731715524,\n          990054538,\n          1026741703,\n          987921818,\n          1160427698,\n          1027744577,\n          1155172667,\n          753501596,\n          754980010,\n          970705661,\n          585921265,\n          12283,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          770055808,\n          1032510794,\n          1098485999,\n          987942229,\n          772988480,\n          643495504,\n          628963700,\n          710528990,\n          758159636,\n          973894064,\n          982928569,\n          10088,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 36,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 36,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5668109095303649572,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          905644061,\n          475760115,\n          343269078,\n          498908753,\n          380147165,\n          664402858,\n          221587067,\n          304048502,\n          922151586,\n          249281837,\n          790437806,\n          317134067,\n          853531946,\n          78737139,\n          116205674,\n          863303737,\n          488729431,\n          997545537,\n          728103473,\n          773499333,\n          481474850,\n          213716571,\n          455575535,\n          215173479,\n          1005317039,\n          904377537,\n          573929313,\n          534994493,\n          1029109043,\n          321695009,\n          1021381295,\n          584251965,\n          82291706,\n          182418734,\n          500729415,\n          174446697,\n          635208918,\n          524079082,\n          15,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          67,\n          25,\n          -29,\n          -114,\n          61,\n          -7,\n          21,\n          -38,\n          67,\n          -16,\n          62,\n          59,\n          68,\n          25,\n          115,\n          -42,\n          66,\n          102,\n          -95,\n          45,\n          69,\n          80,\n          95,\n          44,\n          66,\n          -127,\n          71,\n          31,\n          66,\n          -114,\n          -40,\n          61,\n          66,\n          -98,\n          88,\n          -49,\n          64,\n          -68,\n          -115,\n          10,\n          66,\n          -125,\n          36,\n          -113,\n          66,\n          -63,\n          -85,\n          49,\n          66,\n          -79,\n          -87,\n          46,\n          66,\n          -60,\n          -26,\n          -8,\n          66,\n          75,\n          -56,\n          -113,\n          66,\n          74,\n          -7,\n          -3,\n          65,\n          -105,\n          -86,\n          43,\n          66,\n          -59,\n          114,\n          121,\n          66,\n          -125,\n          27,\n          16,\n          66,\n          -94,\n          40,\n          -112,\n          66,\n          93,\n          110,\n          -71,\n          66,\n          -118,\n          -127,\n          -85,\n          66,\n          68,\n          -37,\n          108,\n          66,\n          -100,\n          -82,\n          -94,\n          66,\n          -89,\n          112,\n          -29,\n          66,\n          -69,\n          51,\n          -115,\n          66,\n          -114,\n          -12,\n          -1,\n          66,\n          -109,\n          -18,\n          50,\n          66,\n          -70,\n          -18,\n          39,\n          66,\n          82,\n          51,\n          87,\n          66,\n          -79,\n          -31,\n          46,\n          66,\n          77,\n          -105,\n          -96,\n          66,\n          -71,\n          -20,\n          21,\n          66,\n          78,\n          102,\n          -113,\n          66,\n          -110,\n          -44,\n          17,\n          66,\n          73,\n          50,\n          -85,\n          66,\n          82,\n          -104,\n          0,\n          66,\n          106,\n          -118,\n          -7,\n          66,\n          -114,\n          -109,\n          121,\n          66,\n          -57,\n          -9,\n          57,\n          65,\n          -93,\n          -63,\n          -66,\n          66,\n          -59,\n          -70,\n          -89,\n          66,\n          -74,\n          -120,\n          2,\n          66,\n          106,\n          7,\n          -88,\n          66,\n          -59,\n          94,\n          -105,\n          66,\n          -60,\n          -83,\n          -49,\n          66,\n          -103,\n          0,\n          -60,\n          66,\n          -113,\n          126,\n          120,\n          66,\n          -74,\n          14,\n          8,\n          66,\n          125,\n          104,\n          -117,\n          66,\n          -62,\n          120,\n          80,\n          66,\n          -122,\n          -34,\n          112,\n          66,\n          71,\n          -3,\n          117,\n          66,\n          83,\n          104,\n          57,\n          66,\n          -98,\n          31,\n          23,\n          66,\n          -94,\n          -91,\n          104,\n          66,\n          84,\n          -87,\n          47,\n          66,\n          69,\n          117,\n          -14,\n          66,\n          -94,\n          -117,\n          -86,\n          66,\n          76,\n          94,\n          -43,\n          66,\n          -65,\n          -13,\n          -78,\n          66,\n          -102,\n          -53,\n          43,\n          66,\n          75,\n          92,\n          -121,\n          66,\n          -80,\n          69,\n          -70,\n          66,\n          -108,\n          -125,\n          -44,\n          66,\n          91,\n          -9,\n          -39,\n          66,\n          74,\n          -4,\n          46,\n          66,\n          93,\n          121,\n          55,\n          66,\n          -106,\n          -56,\n          -94,\n          66,\n          -60,\n          83,\n          69,\n          66,\n          -66,\n          34,\n          109,\n          66,\n          -110,\n          -60,\n          95,\n          66,\n          -66,\n          -80,\n          49,\n          66,\n          -109,\n          45,\n          107,\n          66,\n          79,\n          66,\n          -59,\n          66,\n          116,\n          108,\n          68,\n          66,\n          -99,\n          100,\n          -72,\n          66,\n          -98,\n          -28,\n          53,\n          66,\n          100,\n          18,\n          -48,\n          66,\n          76,\n          -26,\n          55,\n          66,\n          83,\n          40,\n          -58,\n          66,\n          -62,\n          3,\n          -62,\n          66,\n          76,\n          -27,\n          -44,\n          66,\n          -59,\n          113,\n          -105,\n          66,\n          -74,\n          117,\n          92,\n          66,\n          -124,\n          -53,\n          37,\n          66,\n          -79,\n          -98,\n          -71,\n          66,\n          -66,\n          32,\n          -113,\n          66,\n          -72,\n          105,\n          112,\n          66,\n          82,\n          -81,\n          79,\n          66,\n          -121,\n          101,\n          -20,\n          66,\n          -120,\n          -69,\n          108,\n          66,\n          -61,\n          70,\n          72,\n          66,\n          -117,\n          -40,\n          -101,\n          66,\n          68,\n          -41,\n          63,\n          66,\n          -110,\n          79,\n          8,\n          66,\n          82,\n          82,\n          125,\n          66,\n          -69,\n          78,\n          -71,\n          66,\n          -68,\n          101,\n          98,\n          66,\n          -63,\n          -77,\n          -88,\n          66,\n          -64,\n          97,\n          -108,\n          66,\n          -74,\n          107,\n          -60,\n          66,\n          -111,\n          -34,\n          -26,\n          66,\n          -66,\n          -84,\n          41,\n          66,\n          -92,\n          8,\n          -25,\n          66,\n          -69,\n          -4,\n          74,\n          66,\n          106,\n          -41,\n          -101,\n          66,\n          -119,\n          -126,\n          1,\n          66,\n          -96,\n          -63,\n          -74,\n          66,\n          -127,\n          -112,\n          84,\n          66,\n          -75,\n          -23,\n          91,\n          66,\n          79,\n          -23,\n          28,\n          66,\n          -68,\n          30,\n          0,\n          66,\n          -104,\n          -123,\n          -83,\n          66,\n          -99,\n          -119,\n          -127,\n          66,\n          -83,\n          63,\n          -67,\n          66,\n          -71,\n          -61,\n          -76,\n          66,\n          -105,\n          -73,\n          -124,\n          66,\n          -115,\n          -17,\n          81,\n          66,\n          -119,\n          -109,\n          -17,\n          66,\n          -69,\n          3,\n          -46,\n          66,\n          -107,\n          -9,\n          126,\n          66,\n          -63,\n          40,\n          39,\n          66,\n          -126,\n          88,\n          -121,\n          66,\n          68,\n          -49,\n          -34,\n          66,\n          -59,\n          -78,\n          43,\n          64,\n          -113,\n          -25,\n          -1,\n          66,\n          -75,\n          97,\n          113,\n          66,\n          122,\n          -51,\n          -96,\n          66,\n          -70,\n          -83,\n          -120,\n          66,\n          72,\n          -18,\n          -77,\n          66,\n          -70,\n          -66,\n          -34,\n          66,\n          94,\n          -72,\n          94,\n          66,\n          78,\n          29,\n          -114,\n          66,\n          -107,\n          21,\n          -29,\n          66,\n          77,\n          -6,\n          -127,\n          66,\n          -64,\n          -41,\n          83,\n          66,\n          -105,\n          102,\n          -45,\n          66,\n          99,\n          33,\n          -34,\n          66,\n          98,\n          -117,\n          117,\n          66,\n          -62,\n          27,\n          20,\n          66,\n          -65,\n          -69,\n          -73,\n          66,\n          -100,\n          -33,\n          60,\n          66,\n          -68,\n          64,\n          -41,\n          66,\n          85,\n          -59,\n          84,\n          66,\n          -114,\n          -53,\n          -76,\n          66,\n          -83,\n          37,\n          -25,\n          66,\n          102,\n          -59,\n          46,\n          66,\n          -88,\n          -126,\n          -38,\n          66,\n          123,\n          -23,\n          -103,\n          66,\n          -121,\n          41,\n          -57,\n          66,\n          -73,\n          -15,\n          -111,\n          66,\n          -109,\n          -93,\n          56,\n          66,\n          71,\n          -24,\n          92,\n          66,\n          -61,\n          89,\n          -51,\n          66,\n          -84,\n          97,\n          -5,\n          66,\n          -99,\n          85,\n          86,\n          66,\n          74,\n          -122,\n          86,\n          66,\n          -113,\n          71,\n          56,\n          66,\n          -65,\n          94,\n          25,\n          66,\n          105,\n          -90,\n          81,\n          66,\n          -114,\n          34,\n          -85,\n          66,\n          -111,\n          25,\n          -36,\n          66,\n          -103,\n          -124,\n          126,\n          66,\n          78,\n          21,\n          77,\n          66,\n          -64,\n          -113,\n          -73,\n          66,\n          -83,\n          -73,\n          -85,\n          66,\n          97,\n          72,\n          67,\n          66,\n          109,\n          -69,\n          42,\n          66,\n          -109,\n          102,\n          44,\n          66,\n          -108,\n          10,\n          28,\n          66,\n          -112,\n          -46,\n          36,\n          66,\n          -101,\n          -71,\n          8,\n          66,\n          -88,\n          63,\n          -16,\n          66,\n          -95,\n          -93,\n          -25,\n          66,\n          -104,\n          110,\n          -37,\n          66,\n          88,\n          43,\n          66,\n          66,\n          -104,\n          69,\n          -122,\n          66,\n          -75,\n          -62,\n          28,\n          66,\n          -117,\n          67,\n          47,\n          66,\n          73,\n          12,\n          -32,\n          66,\n          -99,\n          104,\n          6,\n          66,\n          -71,\n          127,\n          -57,\n          66,\n          -68,\n          13,\n          96,\n          66,\n          -74,\n          9,\n          -9,\n          66,\n          -61,\n          17,\n          101,\n          66,\n          -108,\n          -37,\n          10,\n          66,\n          -119,\n          109,\n          -5,\n          66,\n          -64,\n          -62,\n          12,\n          66,\n          -125,\n          125,\n          126,\n          66,\n          -113,\n          50,\n          -18,\n          66,\n          -121,\n          -88,\n          115,\n          66,\n          -71,\n          60,\n          64,\n          66,\n          71,\n          103,\n          54,\n          66,\n          -90,\n          107,\n          50,\n          66,\n          79,\n          87,\n          28,\n          66,\n          -60,\n          -7,\n          3,\n          66,\n          -83,\n          115,\n          7,\n          66,\n          -60,\n          -121,\n          83,\n          66,\n          -114,\n          -48,\n          -14,\n          66,\n          81,\n          -124,\n          -18,\n          66,\n          -73,\n          -109,\n          -128,\n          66,\n          -103,\n          109,\n          34,\n          66,\n          -103,\n          -110,\n          -88,\n          66,\n          -128,\n          58,\n          -33,\n          66,\n          -79,\n          -56,\n          -9,\n          66,\n          -105,\n          -66,\n          19,\n          66,\n          -103,\n          26,\n          -50,\n          66,\n          -101,\n          -108,\n          -10,\n          66,\n          -64,\n          -85,\n          89,\n          66,\n          -104,\n          -3,\n          -85,\n          66,\n          97,\n          16,\n          16,\n          66,\n          -61,\n          -39,\n          -22,\n          66,\n          70,\n          -53,\n          17,\n          66,\n          -76,\n          -65,\n          42,\n          66,\n          -108,\n          -117,\n          59,\n          66,\n          -67,\n          -11,\n          -32,\n          66,\n          -68,\n          40,\n          -117,\n          66,\n          -115,\n          38,\n          121,\n          66,\n          -113,\n          -114,\n          83,\n          66,\n          -110,\n          4,\n          -45,\n          66,\n          -69,\n          -100,\n          78,\n          66,\n          -61,\n          122,\n          45,\n          66,\n          83,\n          -110,\n          32,\n          66,\n          98,\n          -95,\n          92,\n          66,\n          -118,\n          79,\n          58,\n          66,\n          87,\n          -22,\n          110,\n          66,\n          74,\n          -98,\n          48,\n          66,\n          79,\n          30,\n          15,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 229,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          645503984,\n          754932688,\n          1112574895,\n          772528310,\n          727004726,\n          1103212466,\n          1142518334,\n          968737928,\n          1114349854,\n          726301852,\n          715612291,\n          600441944,\n          1,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1031346634,\n          769874273,\n          968790667,\n          643571855,\n          1100062132,\n          730120472,\n          1028158001,\n          1017983984,\n          731079067,\n          984501022,\n          581840062,\n          624776773,\n          1,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 25,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 25,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2564745619268294314,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          192256842,\n          175568857,\n          367496865,\n          1055719231,\n          378742481,\n          202427477,\n          773694774,\n          623950843,\n          231783221,\n          632383305,\n          620189034,\n          196558758,\n          349878587,\n          1056540149,\n          70600491,\n          312944081,\n          580429739,\n          907859757,\n          459868150,\n          82024246,\n          760411894,\n          746158818,\n          663805373,\n          73718511,\n          879564727,\n          705772005,\n          1005532225,\n          89722041,\n          723745745,\n          799472890,\n          1054250306,\n          841709619,\n          444237647,\n          571555557,\n          1001506038,\n          108188741,\n          849344946,\n          606686422,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -25,\n          64,\n          -27,\n          68,\n          17,\n          100,\n          117,\n          69,\n          50,\n          88,\n          30,\n          66,\n          121,\n          124,\n          -116,\n          66,\n          8,\n          25,\n          55,\n          66,\n          -125,\n          97,\n          53,\n          66,\n          -92,\n          -48,\n          0,\n          66,\n          -69,\n          31,\n          -50,\n          66,\n          -101,\n          16,\n          -76,\n          66,\n          58,\n          28,\n          51,\n          66,\n          91,\n          64,\n          -38,\n          66,\n          -97,\n          7,\n          40,\n          66,\n          -102,\n          63,\n          -116,\n          66,\n          -111,\n          -101,\n          7,\n          62,\n          20,\n          67,\n          110,\n          66,\n          126,\n          125,\n          100,\n          66,\n          -79,\n          50,\n          -45,\n          66,\n          -66,\n          -62,\n          82,\n          66,\n          109,\n          50,\n          43,\n          66,\n          -118,\n          -11,\n          -50,\n          65,\n          -96,\n          -74,\n          -16,\n          66,\n          -88,\n          96,\n          -101,\n          66,\n          -74,\n          36,\n          -94,\n          66,\n          3,\n          32,\n          -4,\n          66,\n          -106,\n          123,\n          -106,\n          66,\n          -59,\n          102,\n          -92,\n          66,\n          -110,\n          -98,\n          34,\n          66,\n          -82,\n          -55,\n          -120,\n          66,\n          -93,\n          80,\n          -37,\n          65,\n          -121,\n          83,\n          19,\n          66,\n          -123,\n          -108,\n          59,\n          66,\n          -70,\n          108,\n          119,\n          66,\n          72,\n          77,\n          -13,\n          66,\n          -86,\n          -66,\n          13,\n          66,\n          -128,\n          85,\n          8,\n          66,\n          -68,\n          -126,\n          25,\n          66,\n          -66,\n          89,\n          1,\n          66,\n          -119,\n          11,\n          -49,\n          66,\n          -96,\n          15,\n          10,\n          66,\n          78,\n          -68,\n          -32,\n          66,\n          -120,\n          -60,\n          53,\n          66,\n          73,\n          116,\n          -104,\n          66,\n          116,\n          52,\n          45,\n          66,\n          109,\n          111,\n          1,\n          66,\n          -72,\n          60,\n          -31,\n          66,\n          -111,\n          -84,\n          -99,\n          66,\n          125,\n          -72,\n          -98,\n          66,\n          -66,\n          16,\n          -105,\n          66,\n          -104,\n          127,\n          -47,\n          66,\n          -95,\n          -38,\n          -5,\n          66,\n          -84,\n          88,\n          41,\n          66,\n          -108,\n          -40,\n          -66,\n          66,\n          -116,\n          78,\n          -24,\n          66,\n          -92,\n          97,\n          11,\n          66,\n          -104,\n          96,\n          116,\n          66,\n          -63,\n          -121,\n          -24,\n          66,\n          -117,\n          -32,\n          -74,\n          66,\n          -67,\n          91,\n          107,\n          66,\n          101,\n          -91,\n          -59,\n          66,\n          -61,\n          71,\n          118,\n          66,\n          -86,\n          -89,\n          11,\n          66,\n          92,\n          -3,\n          -64,\n          66,\n          -114,\n          102,\n          74,\n          66,\n          -64,\n          32,\n          19,\n          66,\n          79,\n          -56,\n          25,\n          66,\n          -59,\n          -114,\n          77,\n          66,\n          -97,\n          -122,\n          71,\n          66,\n          -119,\n          -112,\n          51,\n          66,\n          -128,\n          37,\n          17,\n          66,\n          -68,\n          41,\n          84,\n          65,\n          90,\n          91,\n          -107,\n          66,\n          -95,\n          -39,\n          -79,\n          66,\n          106,\n          64,\n          -61,\n          66,\n          -119,\n          -106,\n          25,\n          66,\n          -64,\n          -5,\n          -44,\n          66,\n          -121,\n          -116,\n          -77,\n          66,\n          -85,\n          -117,\n          -3,\n          66,\n          -103,\n          -56,\n          4,\n          66,\n          -102,\n          -24,\n          37,\n          66,\n          84,\n          119,\n          54,\n          66,\n          -113,\n          104,\n          -78,\n          66,\n          113,\n          -102,\n          -116,\n          66,\n          68,\n          4,\n          104,\n          66,\n          -122,\n          -2,\n          -49,\n          66,\n          98,\n          -60,\n          -88,\n          66,\n          -122,\n          14,\n          19,\n          66,\n          -113,\n          -111,\n          77,\n          66,\n          -73,\n          -15,\n          -59,\n          66,\n          86,\n          -74,\n          18,\n          66,\n          -77,\n          -90,\n          -41,\n          66,\n          -119,\n          -5,\n          21,\n          66,\n          -59,\n          50,\n          50,\n          66,\n          -103,\n          60,\n          21,\n          66,\n          -66,\n          121,\n          18,\n          66,\n          -70,\n          21,\n          24,\n          66,\n          -128,\n          -112,\n          -127,\n          66,\n          86,\n          48,\n          -89,\n          66,\n          -109,\n          -64,\n          -96,\n          66,\n          -116,\n          28,\n          68,\n          66,\n          -100,\n          127,\n          0,\n          66,\n          -112,\n          -63,\n          -67,\n          66,\n          -110,\n          -65,\n          76,\n          66,\n          -112,\n          -72,\n          43,\n          66,\n          9,\n          65,\n          65,\n          66,\n          -93,\n          -8,\n          -34,\n          66,\n          -106,\n          -20,\n          23,\n          66,\n          -100,\n          31,\n          -114,\n          66,\n          90,\n          120,\n          93,\n          66,\n          -62,\n          -18,\n          50,\n          66,\n          96,\n          -5,\n          -29,\n          66,\n          -94,\n          97,\n          2,\n          66,\n          -74,\n          -101,\n          88,\n          66,\n          -70,\n          28,\n          14,\n          66,\n          -106,\n          14,\n          33,\n          66,\n          -67,\n          75,\n          127,\n          66,\n          -98,\n          90,\n          -38,\n          66,\n          -119,\n          32,\n          -96,\n          66,\n          85,\n          99,\n          -60,\n          66,\n          -64,\n          -46,\n          72,\n          66,\n          -64,\n          -28,\n          -6,\n          66,\n          -81,\n          54,\n          94,\n          66,\n          86,\n          126,\n          97,\n          66,\n          -106,\n          -94,\n          -6,\n          66,\n          -111,\n          2,\n          -114,\n          66,\n          -95,\n          9,\n          -27,\n          66,\n          -68,\n          23,\n          62,\n          66,\n          -60,\n          -56,\n          -77,\n          66,\n          82,\n          -115,\n          -79,\n          66,\n          -62,\n          26,\n          -36,\n          66,\n          -59,\n          101,\n          70,\n          65,\n          95,\n          54,\n          62,\n          66,\n          -60,\n          7,\n          13,\n          66,\n          -100,\n          -94,\n          22,\n          66,\n          -110,\n          -103,\n          34,\n          66,\n          75,\n          77,\n          4,\n          66,\n          -99,\n          -126,\n          -41,\n          66,\n          -110,\n          -111,\n          101,\n          66,\n          100,\n          -23,\n          85,\n          66,\n          95,\n          73,\n          26,\n          66,\n          104,\n          -86,\n          92,\n          66,\n          -74,\n          -45,\n          29,\n          66,\n          -125,\n          -56,\n          106,\n          66,\n          -64,\n          112,\n          -125,\n          66,\n          -62,\n          115,\n          -101,\n          66,\n          89,\n          86,\n          40,\n          66,\n          -103,\n          97,\n          57,\n          66,\n          -109,\n          77,\n          -23,\n          66,\n          -74,\n          -14,\n          -104,\n          66,\n          -85,\n          21,\n          82,\n          66,\n          -81,\n          127,\n          5,\n          66,\n          -121,\n          60,\n          26,\n          66,\n          103,\n          -36,\n          -53,\n          66,\n          -61,\n          -76,\n          -45,\n          66,\n          -67,\n          112,\n          -87,\n          66,\n          -102,\n          86,\n          103,\n          66,\n          -110,\n          16,\n          69,\n          66,\n          -104,\n          -9,\n          122,\n          66,\n          -68,\n          115,\n          63,\n          66,\n          -86,\n          69,\n          88,\n          66,\n          -78,\n          21,\n          100,\n          66,\n          -73,\n          123,\n          -29,\n          66,\n          -101,\n          60,\n          -90,\n          66,\n          -112,\n          -47,\n          -54,\n          66,\n          -102,\n          -124,\n          2,\n          66,\n          92,\n          -55,\n          46,\n          66,\n          -70,\n          88,\n          93,\n          66,\n          -115,\n          -23,\n          -56,\n          66,\n          -63,\n          -75,\n          -103,\n          66,\n          -100,\n          -62,\n          -24,\n          66,\n          -62,\n          86,\n          -116,\n          66,\n          -69,\n          -30,\n          -74,\n          66,\n          -57,\n          -123,\n          21,\n          66,\n          -61,\n          -26,\n          45,\n          66,\n          -108,\n          -83,\n          58,\n          66,\n          -67,\n          117,\n          -66,\n          66,\n          90,\n          -35,\n          -125,\n          66,\n          112,\n          -123,\n          -45,\n          66,\n          -92,\n          -97,\n          84,\n          66,\n          -80,\n          24,\n          116,\n          66,\n          74,\n          116,\n          75,\n          66,\n          -69,\n          -86,\n          -105,\n          66,\n          -74,\n          41,\n          49,\n          66,\n          -119,\n          -39,\n          -23,\n          66,\n          -127,\n          -39,\n          106,\n          66,\n          -115,\n          -74,\n          103,\n          66,\n          126,\n          113,\n          -89,\n          66,\n          85,\n          -46,\n          -119,\n          66,\n          -101,\n          5,\n          -44,\n          66,\n          -59,\n          84,\n          124,\n          66,\n          -77,\n          116,\n          -68,\n          66,\n          -62,\n          42,\n          -69,\n          66,\n          -120,\n          -42,\n          -64,\n          66,\n          -126,\n          -57,\n          -15,\n          66,\n          -94,\n          72,\n          4,\n          66,\n          -123,\n          -62,\n          -81,\n          66,\n          -100,\n          76,\n          93,\n          66,\n          87,\n          46,\n          -8,\n          66,\n          -117,\n          -97,\n          -43,\n          66,\n          -101,\n          -44,\n          21,\n          66,\n          117,\n          -2,\n          43,\n          66,\n          113,\n          95,\n          36,\n          66,\n          -62,\n          -69,\n          10,\n          66,\n          -105,\n          107,\n          -51,\n          66,\n          -105,\n          -39,\n          89,\n          66,\n          -66,\n          33,\n          74,\n          66,\n          94,\n          -93,\n          -20,\n          66,\n          -99,\n          -44,\n          -10,\n          66,\n          79,\n          2,\n          -27,\n          66,\n          72,\n          -21,\n          1,\n          66,\n          -109,\n          -5,\n          98,\n          66,\n          -109,\n          25,\n          40,\n          66,\n          -72,\n          117,\n          -104,\n          66,\n          -106,\n          122,\n          -18,\n          66,\n          -111,\n          -114,\n          5,\n          66,\n          102,\n          55,\n          -127,\n          66,\n          81,\n          22,\n          -99,\n          66,\n          -77,\n          108,\n          51,\n          66,\n          -103,\n          32,\n          -74,\n          66,\n          -65,\n          66,\n          -116,\n          66,\n          87,\n          29,\n          -99,\n          66,\n          -118,\n          -73,\n          -109,\n          66,\n          -104,\n          72,\n          -27,\n          66,\n          -59,\n          -30,\n          118,\n          66,\n          -66,\n          105,\n          95,\n          66,\n          85,\n          -90,\n          -14,\n          66,\n          -83,\n          10,\n          -3,\n          66,\n          -61,\n          -28,\n          68,\n          66,\n          -65,\n          109,\n          6,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 228,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          983695022,\n          758358652,\n          1097956376,\n          644027516,\n          730002335,\n          758809528,\n          581218456,\n          968814673,\n          1016468675,\n          1145541514,\n          581196598,\n          726234781,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1147319869,\n          772687915,\n          1157235496,\n          1162025270,\n          731174326,\n          1160646694,\n          581219185,\n          1147144588,\n          987751169,\n          754111534,\n          581316658,\n          582922615,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 26,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 26,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6258575445422044724,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          196447062,\n          858087215,\n          488471342,\n          591964861,\n          866245582,\n          880327665,\n          454684506,\n          1005898102,\n          488539182,\n          880387785,\n          115723387,\n          363034074,\n          337086034,\n          465001035,\n          128511614,\n          204907702,\n          190798375,\n          212814947,\n          447661509,\n          903664767,\n          798058290,\n          1008183138,\n          783631675,\n          624912210,\n          395759066,\n          70870983,\n          346394591,\n          50118327,\n          43067743,\n          1005576053,\n          500361595,\n          364677745,\n          386971634,\n          1038415566,\n          389105102,\n          439010385,\n          267610930,\n          18056933,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          7,\n          84,\n          -42,\n          68,\n          115,\n          -35,\n          -55,\n          69,\n          49,\n          51,\n          -54,\n          66,\n          52,\n          88,\n          -72,\n          66,\n          99,\n          -100,\n          -76,\n          66,\n          -94,\n          -88,\n          119,\n          65,\n          -52,\n          9,\n          -59,\n          66,\n          -108,\n          11,\n          -92,\n          66,\n          -113,\n          28,\n          -74,\n          66,\n          -67,\n          58,\n          107,\n          66,\n          -71,\n          52,\n          74,\n          66,\n          -118,\n          -22,\n          69,\n          66,\n          -71,\n          95,\n          7,\n          66,\n          19,\n          30,\n          -73,\n          66,\n          -62,\n          48,\n          -76,\n          66,\n          -85,\n          -14,\n          -76,\n          65,\n          -56,\n          -34,\n          3,\n          66,\n          -85,\n          47,\n          -59,\n          66,\n          -128,\n          46,\n          55,\n          66,\n          -110,\n          109,\n          -40,\n          66,\n          -81,\n          -43,\n          52,\n          66,\n          -85,\n          -32,\n          -123,\n          64,\n          -20,\n          -16,\n          76,\n          66,\n          -115,\n          122,\n          26,\n          66,\n          -61,\n          35,\n          23,\n          66,\n          -67,\n          -54,\n          113,\n          66,\n          -68,\n          54,\n          123,\n          66,\n          35,\n          41,\n          -68,\n          66,\n          -65,\n          11,\n          117,\n          66,\n          -124,\n          -79,\n          -46,\n          66,\n          -89,\n          -14,\n          65,\n          66,\n          115,\n          -74,\n          -23,\n          66,\n          -71,\n          83,\n          26,\n          66,\n          122,\n          -53,\n          -85,\n          66,\n          90,\n          49,\n          -60,\n          66,\n          -77,\n          127,\n          4,\n          66,\n          -84,\n          -123,\n          122,\n          66,\n          -65,\n          31,\n          -122,\n          66,\n          5,\n          -52,\n          -7,\n          63,\n          -88,\n          64,\n          -50,\n          66,\n          -85,\n          -105,\n          -56,\n          66,\n          -126,\n          -10,\n          36,\n          66,\n          -88,\n          97,\n          -14,\n          66,\n          105,\n          20,\n          -17,\n          66,\n          -63,\n          97,\n          -9,\n          66,\n          -107,\n          -108,\n          6,\n          66,\n          102,\n          -15,\n          51,\n          66,\n          -104,\n          117,\n          -26,\n          66,\n          -68,\n          -57,\n          -14,\n          66,\n          -92,\n          6,\n          73,\n          66,\n          -109,\n          -61,\n          64,\n          66,\n          -95,\n          -25,\n          -55,\n          66,\n          -101,\n          82,\n          122,\n          66,\n          -78,\n          31,\n          -34,\n          66,\n          -117,\n          -49,\n          -47,\n          66,\n          -74,\n          -85,\n          -50,\n          66,\n          -108,\n          45,\n          -117,\n          66,\n          75,\n          47,\n          87,\n          66,\n          102,\n          80,\n          -94,\n          66,\n          -90,\n          -111,\n          1,\n          66,\n          -124,\n          76,\n          -106,\n          66,\n          91,\n          -120,\n          28,\n          66,\n          79,\n          39,\n          -7,\n          66,\n          91,\n          -22,\n          -7,\n          66,\n          -59,\n          121,\n          87,\n          66,\n          115,\n          43,\n          -80,\n          66,\n          -69,\n          -38,\n          118,\n          66,\n          -61,\n          -128,\n          -112,\n          66,\n          -88,\n          -18,\n          17,\n          66,\n          -72,\n          66,\n          -66,\n          66,\n          -70,\n          -117,\n          33,\n          66,\n          -68,\n          43,\n          8,\n          66,\n          -85,\n          -48,\n          7,\n          66,\n          -76,\n          43,\n          37,\n          66,\n          -117,\n          -4,\n          -119,\n          66,\n          86,\n          84,\n          125,\n          66,\n          -98,\n          97,\n          59,\n          66,\n          -72,\n          102,\n          -36,\n          66,\n          103,\n          99,\n          -56,\n          66,\n          -63,\n          79,\n          -98,\n          66,\n          -73,\n          100,\n          2,\n          66,\n          -59,\n          -16,\n          -60,\n          66,\n          73,\n          34,\n          15,\n          66,\n          -100,\n          -94,\n          -30,\n          66,\n          -73,\n          -29,\n          75,\n          66,\n          73,\n          -83,\n          -37,\n          66,\n          76,\n          -34,\n          32,\n          66,\n          -108,\n          120,\n          107,\n          66,\n          -72,\n          62,\n          -105,\n          66,\n          -124,\n          19,\n          -82,\n          66,\n          -74,\n          -4,\n          2,\n          66,\n          -113,\n          -5,\n          -103,\n          66,\n          -87,\n          -112,\n          -74,\n          66,\n          127,\n          -45,\n          42,\n          66,\n          101,\n          92,\n          63,\n          66,\n          -88,\n          70,\n          -128,\n          66,\n          77,\n          91,\n          -93,\n          66,\n          -98,\n          -110,\n          -6,\n          66,\n          -75,\n          37,\n          -69,\n          66,\n          -73,\n          100,\n          98,\n          66,\n          -96,\n          -15,\n          -90,\n          66,\n          -103,\n          -61,\n          -37,\n          66,\n          95,\n          34,\n          54,\n          66,\n          96,\n          -79,\n          -21,\n          66,\n          98,\n          4,\n          99,\n          66,\n          -64,\n          -24,\n          -21,\n          66,\n          -76,\n          74,\n          -60,\n          66,\n          -76,\n          -112,\n          40,\n          66,\n          -96,\n          -48,\n          99,\n          66,\n          -66,\n          -93,\n          -30,\n          66,\n          -103,\n          -29,\n          -94,\n          66,\n          -104,\n          67,\n          -36,\n          66,\n          -66,\n          118,\n          117,\n          66,\n          -84,\n          124,\n          95,\n          66,\n          79,\n          -51,\n          102,\n          66,\n          122,\n          -84,\n          -56,\n          66,\n          -92,\n          -67,\n          63,\n          66,\n          -107,\n          -25,\n          -77,\n          66,\n          -127,\n          90,\n          -90,\n          66,\n          -73,\n          43,\n          73,\n          66,\n          -78,\n          -47,\n          -81,\n          66,\n          -109,\n          -75,\n          13,\n          66,\n          -94,\n          0,\n          -76,\n          66,\n          -66,\n          -2,\n          -106,\n          66,\n          -110,\n          -71,\n          -69,\n          66,\n          70,\n          60,\n          -99,\n          66,\n          -64,\n          67,\n          -28,\n          66,\n          92,\n          -17,\n          -111,\n          66,\n          -106,\n          31,\n          -96,\n          66,\n          103,\n          47,\n          66,\n          66,\n          -111,\n          20,\n          77,\n          66,\n          -67,\n          77,\n          16,\n          66,\n          84,\n          -41,\n          86,\n          66,\n          -101,\n          32,\n          54,\n          66,\n          -111,\n          103,\n          -8,\n          66,\n          -62,\n          23,\n          -103,\n          66,\n          110,\n          -108,\n          126,\n          66,\n          -128,\n          77,\n          125,\n          66,\n          -82,\n          -76,\n          117,\n          66,\n          -63,\n          38,\n          -94,\n          66,\n          -109,\n          81,\n          -33,\n          66,\n          -68,\n          -37,\n          29,\n          66,\n          118,\n          -45,\n          102,\n          66,\n          -63,\n          -114,\n          -83,\n          66,\n          -67,\n          92,\n          48,\n          66,\n          -74,\n          -86,\n          -106,\n          66,\n          -125,\n          8,\n          -49,\n          66,\n          -119,\n          57,\n          34,\n          66,\n          -99,\n          -88,\n          -56,\n          66,\n          103,\n          -71,\n          -124,\n          66,\n          70,\n          -15,\n          29,\n          66,\n          -64,\n          -106,\n          -25,\n          66,\n          -100,\n          -6,\n          -79,\n          66,\n          -68,\n          80,\n          17,\n          66,\n          109,\n          -85,\n          -3,\n          66,\n          -65,\n          30,\n          14,\n          66,\n          75,\n          42,\n          25,\n          66,\n          -89,\n          -109,\n          91,\n          66,\n          70,\n          -77,\n          99,\n          66,\n          79,\n          51,\n          -14,\n          66,\n          -66,\n          70,\n          -30,\n          66,\n          -69,\n          74,\n          48,\n          66,\n          76,\n          -50,\n          53,\n          66,\n          -102,\n          121,\n          -20,\n          66,\n          99,\n          103,\n          -12,\n          66,\n          -90,\n          66,\n          -1,\n          66,\n          -123,\n          13,\n          119,\n          66,\n          -94,\n          110,\n          102,\n          66,\n          -123,\n          -125,\n          125,\n          66,\n          -84,\n          18,\n          -108,\n          66,\n          -96,\n          116,\n          -15,\n          66,\n          -88,\n          -5,\n          85,\n          66,\n          -107,\n          18,\n          37,\n          66,\n          -124,\n          -72,\n          14,\n          66,\n          -117,\n          -36,\n          -64,\n          66,\n          123,\n          -68,\n          65,\n          66,\n          114,\n          96,\n          -108,\n          66,\n          112,\n          35,\n          -112,\n          66,\n          -63,\n          -98,\n          -30,\n          66,\n          -112,\n          -103,\n          -34,\n          66,\n          79,\n          -28,\n          96,\n          66,\n          73,\n          -76,\n          49,\n          66,\n          -62,\n          -54,\n          52,\n          66,\n          -106,\n          109,\n          -29,\n          66,\n          -101,\n          -61,\n          -47,\n          66,\n          -78,\n          97,\n          -128,\n          66,\n          -119,\n          -84,\n          -110,\n          66,\n          82,\n          -102,\n          -25,\n          66,\n          -82,\n          -1,\n          72,\n          66,\n          -109,\n          -80,\n          106,\n          66,\n          77,\n          14,\n          -41,\n          66,\n          -64,\n          -75,\n          -108,\n          66,\n          -62,\n          63,\n          -45,\n          66,\n          97,\n          -109,\n          102,\n          66,\n          123,\n          -95,\n          -37,\n          66,\n          -120,\n          -114,\n          -105,\n          66,\n          -114,\n          -110,\n          102,\n          66,\n          85,\n          -23,\n          23,\n          66,\n          -62,\n          34,\n          7,\n          66,\n          -69,\n          -122,\n          -82,\n          66,\n          -127,\n          21,\n          -1,\n          66,\n          -116,\n          -80,\n          -109,\n          66,\n          -60,\n          32,\n          118,\n          66,\n          -67,\n          70,\n          44,\n          66,\n          -66,\n          -115,\n          4,\n          66,\n          -59,\n          41,\n          80,\n          66,\n          -115,\n          -116,\n          -108,\n          66,\n          -65,\n          -10,\n          -49,\n          66,\n          111,\n          64,\n          99,\n          66,\n          118,\n          121,\n          -62,\n          66,\n          -120,\n          76,\n          -126,\n          66,\n          -65,\n          32,\n          -30,\n          66,\n          -106,\n          -8,\n          26,\n          66,\n          -111,\n          29,\n          43,\n          66,\n          -74,\n          -12,\n          10,\n          66,\n          -113,\n          11,\n          -118,\n          66,\n          -61,\n          40,\n          28,\n          66,\n          -102,\n          -106,\n          83,\n          66,\n          -62,\n          109,\n          38,\n          66,\n          -80,\n          -36,\n          81,\n          66,\n          90,\n          -66,\n          -50,\n          66,\n          86,\n          101,\n          44,\n          66,\n          -111,\n          125,\n          105,\n          66,\n          84,\n          108,\n          -86,\n          66,\n          -126,\n          74,\n          -105,\n          66,\n          81,\n          -109,\n          -9,\n          66,\n          -105,\n          65,\n          -47,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 227,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          600468470,\n          1033035683,\n          753521989,\n          974069864,\n          985034669,\n          1162077727,\n          983616262,\n          1013448983,\n          753378865,\n          1013785996,\n          726931733,\n          198512897,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          774289759,\n          774584818,\n          624434183,\n          715231364,\n          1117383866,\n          712636127,\n          1104797758,\n          1103183918,\n          1098232306,\n          1146114728,\n          769447103,\n          209712766,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 27,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 27,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7118534165006407185,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          907630914,\n          43452073,\n          52886071,\n          483960655,\n          463058385,\n          133350823,\n          888458967,\n          853219053,\n          1000381230,\n          643782055,\n          368746353,\n          752585267,\n          576031587,\n          756983211,\n          1028849529,\n          179697501,\n          1031214894,\n          919921769,\n          783399599,\n          863552871,\n          711703893,\n          267228721,\n          52370723,\n          610856437,\n          606020794,\n          786210742,\n          357788135,\n          996140795,\n          354089839,\n          57068493,\n          739572467,\n          388188502,\n          261918058,\n          219784354,\n          72454101,\n          190183473,\n          857675045,\n          829,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          32,\n          -53,\n          67,\n          69,\n          65,\n          -110,\n          -22,\n          69,\n          37,\n          -95,\n          45,\n          69,\n          35,\n          6,\n          -57,\n          65,\n          -25,\n          65,\n          -90,\n          65,\n          -84,\n          101,\n          7,\n          66,\n          -125,\n          -125,\n          29,\n          66,\n          -100,\n          -38,\n          104,\n          66,\n          -93,\n          10,\n          -37,\n          66,\n          -79,\n          73,\n          29,\n          66,\n          84,\n          44,\n          26,\n          66,\n          -122,\n          -68,\n          18,\n          66,\n          98,\n          126,\n          23,\n          66,\n          -95,\n          -116,\n          -88,\n          66,\n          -82,\n          -9,\n          45,\n          66,\n          59,\n          12,\n          125,\n          66,\n          -66,\n          -9,\n          119,\n          66,\n          -89,\n          -36,\n          112,\n          66,\n          77,\n          42,\n          -75,\n          66,\n          -61,\n          123,\n          -109,\n          66,\n          -115,\n          -78,\n          -16,\n          66,\n          -116,\n          51,\n          43,\n          66,\n          126,\n          -50,\n          -78,\n          66,\n          -67,\n          -2,\n          82,\n          66,\n          -116,\n          21,\n          -60,\n          66,\n          -64,\n          12,\n          118,\n          66,\n          -107,\n          -57,\n          29,\n          66,\n          106,\n          -13,\n          -127,\n          66,\n          -114,\n          -67,\n          90,\n          66,\n          -107,\n          81,\n          38,\n          66,\n          68,\n          115,\n          -6,\n          66,\n          126,\n          -108,\n          -114,\n          66,\n          -88,\n          -51,\n          -117,\n          66,\n          -113,\n          -110,\n          -57,\n          66,\n          89,\n          105,\n          4,\n          66,\n          80,\n          125,\n          -126,\n          66,\n          85,\n          89,\n          103,\n          66,\n          -82,\n          13,\n          -28,\n          66,\n          75,\n          -9,\n          89,\n          66,\n          -109,\n          33,\n          -43,\n          66,\n          75,\n          -51,\n          87,\n          66,\n          -71,\n          -98,\n          88,\n          66,\n          -114,\n          93,\n          -11,\n          66,\n          70,\n          115,\n          121,\n          66,\n          -107,\n          83,\n          16,\n          66,\n          -94,\n          -44,\n          -40,\n          66,\n          -99,\n          -28,\n          92,\n          66,\n          -110,\n          -60,\n          109,\n          66,\n          -78,\n          55,\n          -71,\n          66,\n          -110,\n          -47,\n          -52,\n          66,\n          -63,\n          22,\n          48,\n          66,\n          -101,\n          -4,\n          90,\n          66,\n          -59,\n          -128,\n          -91,\n          66,\n          -86,\n          66,\n          65,\n          66,\n          6,\n          121,\n          -109,\n          66,\n          -116,\n          -83,\n          32,\n          66,\n          -119,\n          25,\n          -98,\n          66,\n          -76,\n          -24,\n          71,\n          66,\n          -110,\n          -8,\n          30,\n          66,\n          108,\n          106,\n          -23,\n          66,\n          -102,\n          51,\n          -75,\n          66,\n          125,\n          -19,\n          13,\n          66,\n          119,\n          21,\n          -67,\n          66,\n          -105,\n          -30,\n          1,\n          66,\n          -128,\n          9,\n          -101,\n          66,\n          -65,\n          122,\n          -37,\n          66,\n          115,\n          -124,\n          69,\n          66,\n          -110,\n          121,\n          -6,\n          66,\n          -75,\n          61,\n          111,\n          66,\n          69,\n          -30,\n          35,\n          66,\n          -112,\n          108,\n          -103,\n          66,\n          -68,\n          7,\n          -85,\n          66,\n          79,\n          -14,\n          -38,\n          66,\n          -128,\n          93,\n          -48,\n          66,\n          -62,\n          -109,\n          70,\n          66,\n          81,\n          -28,\n          64,\n          66,\n          -86,\n          96,\n          -102,\n          66,\n          -98,\n          -66,\n          -65,\n          66,\n          101,\n          55,\n          -120,\n          66,\n          -125,\n          78,\n          -125,\n          66,\n          -114,\n          -31,\n          90,\n          66,\n          -121,\n          -37,\n          -59,\n          66,\n          -117,\n          -100,\n          58,\n          66,\n          -62,\n          5,\n          13,\n          66,\n          -128,\n          23,\n          114,\n          66,\n          71,\n          -108,\n          -98,\n          66,\n          76,\n          -106,\n          2,\n          66,\n          -110,\n          -70,\n          105,\n          66,\n          -105,\n          -36,\n          125,\n          66,\n          -71,\n          -122,\n          5,\n          66,\n          -111,\n          110,\n          91,\n          66,\n          -68,\n          -84,\n          26,\n          66,\n          -120,\n          -47,\n          20,\n          66,\n          70,\n          -20,\n          101,\n          66,\n          124,\n          -123,\n          88,\n          66,\n          119,\n          -53,\n          -126,\n          66,\n          -59,\n          -24,\n          122,\n          66,\n          -92,\n          18,\n          -11,\n          66,\n          -118,\n          74,\n          -11,\n          66,\n          -89,\n          50,\n          103,\n          66,\n          70,\n          -94,\n          -104,\n          66,\n          -67,\n          -67,\n          -120,\n          66,\n          -99,\n          -58,\n          39,\n          66,\n          101,\n          86,\n          -33,\n          66,\n          -111,\n          20,\n          -109,\n          66,\n          -119,\n          38,\n          16,\n          66,\n          -120,\n          -111,\n          -85,\n          66,\n          71,\n          -11,\n          3,\n          66,\n          80,\n          -123,\n          124,\n          66,\n          -98,\n          -123,\n          -27,\n          66,\n          -76,\n          68,\n          -76,\n          66,\n          72,\n          93,\n          118,\n          66,\n          97,\n          -102,\n          -18,\n          66,\n          105,\n          24,\n          -34,\n          66,\n          107,\n          -27,\n          -55,\n          66,\n          122,\n          -89,\n          -50,\n          66,\n          89,\n          29,\n          -45,\n          66,\n          -104,\n          -107,\n          -70,\n          66,\n          123,\n          123,\n          103,\n          66,\n          -115,\n          -29,\n          -127,\n          66,\n          -110,\n          40,\n          -46,\n          66,\n          -71,\n          -52,\n          -100,\n          66,\n          77,\n          -104,\n          -4,\n          66,\n          68,\n          90,\n          -68,\n          66,\n          -61,\n          61,\n          -73,\n          66,\n          -118,\n          30,\n          27,\n          66,\n          -104,\n          -26,\n          -96,\n          66,\n          -122,\n          -89,\n          -84,\n          66,\n          -104,\n          72,\n          104,\n          66,\n          88,\n          97,\n          -12,\n          66,\n          -92,\n          47,\n          39,\n          66,\n          95,\n          114,\n          47,\n          66,\n          91,\n          -22,\n          -23,\n          66,\n          -113,\n          -29,\n          28,\n          66,\n          103,\n          -44,\n          106,\n          66,\n          -65,\n          52,\n          -31,\n          66,\n          -114,\n          117,\n          -54,\n          66,\n          -105,\n          -84,\n          57,\n          66,\n          -93,\n          -38,\n          -50,\n          66,\n          85,\n          -100,\n          122,\n          66,\n          107,\n          62,\n          116,\n          66,\n          -109,\n          29,\n          -89,\n          66,\n          -59,\n          69,\n          -39,\n          66,\n          -64,\n          29,\n          -10,\n          66,\n          -70,\n          -75,\n          22,\n          66,\n          -123,\n          58,\n          2,\n          66,\n          -98,\n          7,\n          55,\n          66,\n          -71,\n          70,\n          -108,\n          66,\n          -105,\n          88,\n          92,\n          66,\n          -88,\n          113,\n          -126,\n          66,\n          -64,\n          -108,\n          -108,\n          66,\n          -101,\n          -119,\n          -62,\n          66,\n          91,\n          84,\n          0,\n          66,\n          -110,\n          67,\n          40,\n          66,\n          -107,\n          105,\n          -102,\n          66,\n          83,\n          16,\n          -96,\n          66,\n          125,\n          52,\n          55,\n          66,\n          109,\n          -96,\n          -83,\n          66,\n          -66,\n          -82,\n          56,\n          66,\n          -67,\n          -48,\n          112,\n          66,\n          -118,\n          101,\n          -1,\n          66,\n          -59,\n          -14,\n          -56,\n          66,\n          84,\n          -85,\n          73,\n          66,\n          95,\n          33,\n          -27,\n          66,\n          -94,\n          65,\n          -12,\n          66,\n          77,\n          116,\n          -45,\n          66,\n          -114,\n          62,\n          105,\n          66,\n          -106,\n          65,\n          -60,\n          66,\n          101,\n          27,\n          36,\n          66,\n          83,\n          -94,\n          -15,\n          66,\n          -65,\n          -28,\n          -71,\n          66,\n          -115,\n          70,\n          108,\n          66,\n          122,\n          102,\n          74,\n          66,\n          -74,\n          12,\n          11,\n          66,\n          -101,\n          -125,\n          67,\n          66,\n          -81,\n          -76,\n          -49,\n          66,\n          -62,\n          -26,\n          -32,\n          66,\n          -99,\n          69,\n          -63,\n          66,\n          -75,\n          73,\n          77,\n          66,\n          -113,\n          30,\n          -100,\n          66,\n          71,\n          89,\n          -31,\n          66,\n          97,\n          52,\n          53,\n          66,\n          -68,\n          -18,\n          -114,\n          66,\n          -106,\n          44,\n          -107,\n          66,\n          -112,\n          -71,\n          93,\n          66,\n          -59,\n          -17,\n          -75,\n          66,\n          -62,\n          -46,\n          101,\n          66,\n          -69,\n          14,\n          96,\n          66,\n          -77,\n          -127,\n          82,\n          66,\n          -77,\n          -117,\n          18,\n          66,\n          -68,\n          -59,\n          53,\n          66,\n          69,\n          121,\n          -12,\n          66,\n          -60,\n          -2,\n          -71,\n          66,\n          104,\n          -102,\n          -85,\n          66,\n          70,\n          -38,\n          37,\n          66,\n          -92,\n          -55,\n          69,\n          66,\n          -112,\n          -38,\n          2,\n          66,\n          83,\n          59,\n          -122,\n          66,\n          -66,\n          -23,\n          121,\n          66,\n          -113,\n          -27,\n          -35,\n          66,\n          -108,\n          -33,\n          -119,\n          66,\n          86,\n          -49,\n          22,\n          66,\n          -113,\n          -47,\n          112,\n          66,\n          -65,\n          -11,\n          74,\n          66,\n          -119,\n          -51,\n          -82,\n          66,\n          -61,\n          -79,\n          -50,\n          66,\n          84,\n          -98,\n          -103,\n          66,\n          79,\n          27,\n          -33,\n          66,\n          -109,\n          -15,\n          126,\n          66,\n          -68,\n          -72,\n          -5,\n          66,\n          -104,\n          95,\n          100,\n          66,\n          -110,\n          -74,\n          -75,\n          66,\n          -62,\n          54,\n          74,\n          66,\n          83,\n          -80,\n          45,\n          66,\n          -103,\n          -31,\n          89,\n          66,\n          -108,\n          -60,\n          27,\n          66,\n          -111,\n          -82,\n          -128,\n          66,\n          -115,\n          101,\n          24,\n          66,\n          -104,\n          56,\n          -108,\n          66,\n          -71,\n          -126,\n          105,\n          66,\n          -106,\n          35,\n          -68,\n          66,\n          -108,\n          14,\n          38,\n          66,\n          -107,\n          -111,\n          68,\n          66,\n          -98,\n          17,\n          -116,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 224,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1033042247,\n          774818287,\n          643847449,\n          1156860994,\n          645640709,\n          759900490,\n          1118662559,\n          581140570,\n          639257174,\n          602040974,\n          583492814,\n          7371529,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1160608054,\n          968787773,\n          754052569,\n          1018686262,\n          629206123,\n          774660554,\n          970351891,\n          710457638,\n          600439768,\n          726417854,\n          583436672,\n          8775581,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 30,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 30,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -383864129972597491,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          657791446,\n          899217370,\n          582649659,\n          1021403074,\n          578610617,\n          492013155,\n          363629919,\n          636173671,\n          517688865,\n          913290846,\n          451351891,\n          509017666,\n          65459558,\n          213757125,\n          463693029,\n          1071109586,\n          123065565,\n          208727925,\n          788362713,\n          752153770,\n          309124941,\n          1070811597,\n          619885657,\n          854383865,\n          513408289,\n          496286787,\n          60538338,\n          572868917,\n          489369269,\n          43364083,\n          884256585,\n          510639963,\n          783796679,\n          761377953,\n          641164927,\n          607741022,\n          1005700346,\n          589597670,\n          18217,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -119,\n          111,\n          -123,\n          69,\n          30,\n          5,\n          62,\n          68,\n          1,\n          -122,\n          -50,\n          66,\n          -64,\n          92,\n          1,\n          66,\n          -119,\n          -56,\n          24,\n          66,\n          -118,\n          -44,\n          32,\n          68,\n          -77,\n          -109,\n          119,\n          66,\n          -96,\n          -43,\n          -92,\n          66,\n          -111,\n          -106,\n          -11,\n          66,\n          -113,\n          -110,\n          -89,\n          66,\n          -83,\n          101,\n          -43,\n          66,\n          -88,\n          -41,\n          115,\n          66,\n          80,\n          -94,\n          -96,\n          66,\n          124,\n          109,\n          -52,\n          66,\n          -106,\n          -20,\n          -50,\n          66,\n          31,\n          78,\n          120,\n          66,\n          14,\n          -24,\n          -46,\n          66,\n          -96,\n          59,\n          67,\n          66,\n          -77,\n          -126,\n          87,\n          66,\n          -70,\n          -105,\n          126,\n          66,\n          -114,\n          -62,\n          10,\n          66,\n          -69,\n          98,\n          -68,\n          66,\n          -86,\n          -33,\n          -119,\n          66,\n          -64,\n          -110,\n          -4,\n          66,\n          -116,\n          -71,\n          114,\n          66,\n          -118,\n          1,\n          30,\n          66,\n          -117,\n          -68,\n          -67,\n          66,\n          -100,\n          87,\n          -71,\n          66,\n          7,\n          -10,\n          -122,\n          66,\n          -110,\n          -82,\n          -26,\n          66,\n          81,\n          104,\n          36,\n          65,\n          28,\n          -36,\n          46,\n          66,\n          -109,\n          78,\n          33,\n          66,\n          88,\n          -30,\n          6,\n          66,\n          -110,\n          85,\n          -126,\n          66,\n          -72,\n          -85,\n          -13,\n          66,\n          87,\n          104,\n          -21,\n          66,\n          -76,\n          13,\n          -119,\n          66,\n          83,\n          7,\n          47,\n          66,\n          -119,\n          112,\n          -32,\n          66,\n          -71,\n          110,\n          122,\n          66,\n          -71,\n          38,\n          -35,\n          66,\n          103,\n          104,\n          17,\n          65,\n          -30,\n          52,\n          20,\n          66,\n          71,\n          -45,\n          78,\n          66,\n          -71,\n          -4,\n          36,\n          66,\n          -85,\n          -76,\n          -109,\n          66,\n          -77,\n          2,\n          25,\n          66,\n          -110,\n          126,\n          4,\n          66,\n          -123,\n          -127,\n          -3,\n          66,\n          69,\n          78,\n          115,\n          66,\n          -67,\n          47,\n          110,\n          66,\n          -122,\n          -87,\n          60,\n          66,\n          76,\n          -96,\n          4,\n          66,\n          -74,\n          -66,\n          -72,\n          66,\n          -77,\n          66,\n          45,\n          66,\n          -101,\n          32,\n          60,\n          66,\n          91,\n          126,\n          -82,\n          66,\n          -62,\n          -70,\n          27,\n          66,\n          73,\n          11,\n          36,\n          66,\n          86,\n          -103,\n          -123,\n          66,\n          -67,\n          -121,\n          -57,\n          66,\n          -101,\n          24,\n          102,\n          66,\n          -91,\n          -19,\n          -123,\n          66,\n          -61,\n          -4,\n          116,\n          66,\n          -100,\n          1,\n          -41,\n          66,\n          -69,\n          93,\n          -44,\n          66,\n          -72,\n          -27,\n          82,\n          66,\n          -125,\n          -115,\n          -36,\n          66,\n          -98,\n          112,\n          118,\n          66,\n          -125,\n          39,\n          52,\n          66,\n          4,\n          25,\n          108,\n          66,\n          -92,\n          -72,\n          -6,\n          66,\n          109,\n          -43,\n          33,\n          66,\n          -118,\n          16,\n          -68,\n          66,\n          -103,\n          -95,\n          -77,\n          66,\n          -69,\n          -112,\n          88,\n          66,\n          -110,\n          55,\n          16,\n          66,\n          -86,\n          -52,\n          108,\n          66,\n          -68,\n          -110,\n          -95,\n          66,\n          94,\n          126,\n          -49,\n          66,\n          73,\n          122,\n          -83,\n          66,\n          106,\n          7,\n          -5,\n          66,\n          -65,\n          115,\n          -12,\n          66,\n          -102,\n          -87,\n          124,\n          66,\n          89,\n          110,\n          -49,\n          66,\n          -103,\n          -15,\n          -107,\n          66,\n          -63,\n          54,\n          42,\n          66,\n          -64,\n          0,\n          5,\n          66,\n          -106,\n          -11,\n          7,\n          66,\n          -69,\n          -54,\n          -34,\n          66,\n          -65,\n          -88,\n          36,\n          66,\n          -102,\n          -51,\n          -32,\n          66,\n          -122,\n          -89,\n          5,\n          66,\n          26,\n          109,\n          -118,\n          66,\n          79,\n          -38,\n          -83,\n          66,\n          -116,\n          82,\n          -62,\n          66,\n          -65,\n          5,\n          26,\n          66,\n          -98,\n          113,\n          -60,\n          66,\n          109,\n          -37,\n          -32,\n          66,\n          -109,\n          -46,\n          50,\n          66,\n          81,\n          119,\n          -24,\n          66,\n          -121,\n          26,\n          123,\n          66,\n          82,\n          -111,\n          -48,\n          66,\n          80,\n          59,\n          118,\n          66,\n          -122,\n          2,\n          12,\n          66,\n          73,\n          112,\n          67,\n          66,\n          -65,\n          -8,\n          127,\n          66,\n          -109,\n          111,\n          -65,\n          66,\n          -84,\n          8,\n          68,\n          66,\n          -104,\n          120,\n          -71,\n          66,\n          -66,\n          -25,\n          -61,\n          66,\n          87,\n          -64,\n          99,\n          66,\n          78,\n          39,\n          -53,\n          66,\n          -71,\n          50,\n          -13,\n          66,\n          -122,\n          20,\n          97,\n          66,\n          -109,\n          123,\n          59,\n          66,\n          -110,\n          -66,\n          -49,\n          66,\n          -107,\n          116,\n          -36,\n          66,\n          -70,\n          -99,\n          116,\n          66,\n          -111,\n          -57,\n          -71,\n          66,\n          -79,\n          -21,\n          8,\n          66,\n          79,\n          -44,\n          44,\n          66,\n          -127,\n          -39,\n          -80,\n          66,\n          -75,\n          66,\n          61,\n          66,\n          -95,\n          70,\n          5,\n          66,\n          -114,\n          -109,\n          70,\n          66,\n          -67,\n          -42,\n          104,\n          66,\n          -71,\n          -103,\n          -121,\n          66,\n          -68,\n          -97,\n          -71,\n          66,\n          -127,\n          -85,\n          -122,\n          66,\n          79,\n          5,\n          -123,\n          66,\n          -123,\n          108,\n          110,\n          66,\n          -61,\n          75,\n          79,\n          66,\n          -109,\n          112,\n          -23,\n          66,\n          -120,\n          -98,\n          59,\n          66,\n          68,\n          14,\n          26,\n          66,\n          -69,\n          -26,\n          -16,\n          66,\n          -111,\n          14,\n          -38,\n          66,\n          107,\n          16,\n          -31,\n          66,\n          76,\n          -105,\n          -110,\n          66,\n          -113,\n          -8,\n          -50,\n          66,\n          -68,\n          127,\n          74,\n          66,\n          -101,\n          79,\n          -2,\n          66,\n          -113,\n          7,\n          106,\n          66,\n          -103,\n          117,\n          88,\n          66,\n          76,\n          -26,\n          108,\n          66,\n          82,\n          121,\n          -79,\n          66,\n          -101,\n          72,\n          -117,\n          66,\n          102,\n          -122,\n          -119,\n          66,\n          101,\n          116,\n          26,\n          66,\n          -65,\n          -65,\n          60,\n          66,\n          81,\n          3,\n          -121,\n          66,\n          -107,\n          -28,\n          -111,\n          66,\n          -111,\n          121,\n          1,\n          66,\n          -63,\n          -96,\n          -110,\n          66,\n          -82,\n          -67,\n          25,\n          66,\n          74,\n          -47,\n          117,\n          66,\n          91,\n          105,\n          37,\n          66,\n          93,\n          -104,\n          -85,\n          66,\n          -116,\n          -120,\n          -83,\n          66,\n          -85,\n          22,\n          56,\n          66,\n          -97,\n          -15,\n          74,\n          66,\n          -116,\n          85,\n          71,\n          66,\n          -66,\n          -62,\n          -85,\n          66,\n          -76,\n          84,\n          -58,\n          66,\n          -72,\n          -82,\n          -47,\n          66,\n          -114,\n          125,\n          -80,\n          66,\n          -106,\n          -124,\n          114,\n          66,\n          -102,\n          59,\n          105,\n          66,\n          93,\n          -74,\n          53,\n          66,\n          -70,\n          -24,\n          -115,\n          66,\n          -61,\n          -121,\n          73,\n          66,\n          -90,\n          112,\n          -4,\n          66,\n          90,\n          -73,\n          -68,\n          66,\n          85,\n          54,\n          45,\n          66,\n          90,\n          100,\n          70,\n          66,\n          110,\n          -88,\n          -47,\n          66,\n          -111,\n          59,\n          100,\n          66,\n          -110,\n          -128,\n          -2,\n          66,\n          -115,\n          67,\n          -1,\n          66,\n          -63,\n          -13,\n          -84,\n          66,\n          77,\n          81,\n          -121,\n          66,\n          -111,\n          123,\n          98,\n          66,\n          94,\n          -47,\n          -31,\n          66,\n          -70,\n          101,\n          14,\n          66,\n          69,\n          -104,\n          -11,\n          66,\n          -72,\n          -17,\n          -58,\n          66,\n          81,\n          -54,\n          -83,\n          66,\n          107,\n          -65,\n          -103,\n          66,\n          -67,\n          111,\n          31,\n          66,\n          97,\n          -77,\n          125,\n          66,\n          93,\n          -79,\n          25,\n          66,\n          -65,\n          -56,\n          91,\n          66,\n          -72,\n          125,\n          -122,\n          66,\n          69,\n          -25,\n          122,\n          66,\n          78,\n          59,\n          55,\n          66,\n          117,\n          82,\n          79,\n          66,\n          -105,\n          12,\n          -81,\n          66,\n          -102,\n          30,\n          -94,\n          66,\n          -113,\n          111,\n          -65,\n          66,\n          84,\n          -29,\n          111,\n          66,\n          -61,\n          115,\n          108,\n          66,\n          -80,\n          -100,\n          28,\n          66,\n          73,\n          -123,\n          85,\n          66,\n          77,\n          -21,\n          46,\n          66,\n          -119,\n          55,\n          -22,\n          66,\n          -90,\n          127,\n          -97,\n          66,\n          95,\n          72,\n          -31,\n          66,\n          77,\n          -38,\n          65,\n          66,\n          -67,\n          104,\n          -117,\n          66,\n          -59,\n          116,\n          46,\n          66,\n          -107,\n          5,\n          98,\n          66,\n          -85,\n          49,\n          -24,\n          66,\n          85,\n          72,\n          39,\n          66,\n          -66,\n          -5,\n          107,\n          66,\n          -81,\n          -50,\n          53,\n          66,\n          89,\n          41,\n          -31,\n          66,\n          -104,\n          -40,\n          -83,\n          66,\n          91,\n          119,\n          5,\n          66,\n          81,\n          87,\n          -10,\n          66,\n          -105,\n          57,\n          74,\n          66,\n          -65,\n          -24,\n          -58,\n          66,\n          71,\n          -2,\n          43,\n          66,\n          -63,\n          119,\n          -1,\n          66,\n          -100,\n          -14,\n          112,\n          66,\n          -62,\n          124,\n          -124,\n          66,\n          -100,\n          58,\n          -5,\n          66,\n          -106,\n          -97,\n          -35,\n          66,\n          -89,\n          26,\n          87,\n          66,\n          -101,\n          -2,\n          -32,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 231,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1098311399,\n          1161709991,\n          1162061360,\n          1141278415,\n          631172210,\n          625792513,\n          774832136,\n          1030808822,\n          596092733,\n          595486228,\n          595657519,\n          710979521,\n          13,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1155824059,\n          1162261382,\n          1162261358,\n          1119034435,\n          1018684795,\n          975716890,\n          1162023080,\n          969281689,\n          731535917,\n          639084055,\n          581150528,\n          586624567,\n          13,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 23,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 23,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3659552152257293168,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          756914650,\n          655449419,\n          241077338,\n          773286393,\n          317564493,\n          922970742,\n          310240305,\n          661821857,\n          1047602866,\n          976735609,\n          729737213,\n          635283406,\n          717067626,\n          102160233,\n          59971026,\n          664525046,\n          991674161,\n          854027370,\n          499060198,\n          375451518,\n          871718259,\n          1046831037,\n          209254327,\n          585823959,\n          1052567975,\n          638646123,\n          874210494,\n          790981751,\n          909854517,\n          56866030,\n          636464493,\n          996341883,\n          922606834,\n          123508551,\n          91168441,\n          240574077,\n          1029876730,\n          458612681,\n          209,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -91,\n          73,\n          -35,\n          68,\n          121,\n          36,\n          102,\n          69,\n          84,\n          -10,\n          -91,\n          65,\n          -21,\n          -26,\n          -18,\n          66,\n          -122,\n          -37,\n          -37,\n          67,\n          117,\n          -38,\n          69,\n          66,\n          83,\n          -20,\n          55,\n          69,\n          6,\n          -119,\n          -126,\n          65,\n          93,\n          -84,\n          13,\n          66,\n          -64,\n          13,\n          -124,\n          66,\n          -116,\n          111,\n          29,\n          66,\n          106,\n          -79,\n          4,\n          66,\n          -78,\n          27,\n          -61,\n          66,\n          -73,\n          62,\n          66,\n          66,\n          124,\n          -106,\n          74,\n          66,\n          -120,\n          121,\n          -28,\n          66,\n          -120,\n          19,\n          -35,\n          66,\n          93,\n          -71,\n          -102,\n          66,\n          127,\n          122,\n          -106,\n          66,\n          95,\n          76,\n          -1,\n          66,\n          -64,\n          -38,\n          52,\n          66,\n          -81,\n          -115,\n          -11,\n          66,\n          -111,\n          -13,\n          73,\n          66,\n          127,\n          -108,\n          114,\n          66,\n          -113,\n          122,\n          81,\n          66,\n          -84,\n          -109,\n          105,\n          66,\n          -117,\n          -71,\n          77,\n          66,\n          69,\n          -106,\n          -87,\n          66,\n          -69,\n          -29,\n          -22,\n          66,\n          -116,\n          28,\n          -119,\n          66,\n          -62,\n          57,\n          -34,\n          66,\n          127,\n          -21,\n          -41,\n          66,\n          -87,\n          44,\n          56,\n          66,\n          -79,\n          -104,\n          -44,\n          64,\n          2,\n          60,\n          80,\n          66,\n          71,\n          -103,\n          39,\n          66,\n          -100,\n          22,\n          31,\n          66,\n          124,\n          126,\n          -115,\n          66,\n          -119,\n          39,\n          -78,\n          66,\n          -118,\n          -71,\n          -128,\n          66,\n          103,\n          -48,\n          42,\n          66,\n          -103,\n          46,\n          4,\n          66,\n          -103,\n          116,\n          -29,\n          65,\n          -1,\n          50,\n          -106,\n          66,\n          -81,\n          27,\n          -126,\n          66,\n          -118,\n          -16,\n          -126,\n          66,\n          73,\n          -28,\n          -126,\n          66,\n          -111,\n          -48,\n          -107,\n          66,\n          -96,\n          -64,\n          124,\n          66,\n          -121,\n          -15,\n          7,\n          66,\n          -80,\n          41,\n          18,\n          66,\n          -76,\n          91,\n          -20,\n          66,\n          78,\n          51,\n          76,\n          66,\n          75,\n          112,\n          8,\n          66,\n          -111,\n          -116,\n          -111,\n          66,\n          100,\n          -100,\n          70,\n          66,\n          104,\n          108,\n          47,\n          66,\n          9,\n          102,\n          -76,\n          66,\n          73,\n          97,\n          91,\n          66,\n          -109,\n          41,\n          21,\n          66,\n          -104,\n          42,\n          108,\n          66,\n          76,\n          -32,\n          54,\n          66,\n          123,\n          -123,\n          2,\n          66,\n          -112,\n          115,\n          22,\n          66,\n          120,\n          -86,\n          -81,\n          66,\n          -109,\n          -58,\n          47,\n          66,\n          -68,\n          -58,\n          81,\n          66,\n          -77,\n          -125,\n          32,\n          66,\n          -114,\n          70,\n          12,\n          66,\n          114,\n          17,\n          -72,\n          66,\n          -110,\n          17,\n          71,\n          66,\n          -89,\n          -32,\n          -107,\n          66,\n          -59,\n          -10,\n          -23,\n          66,\n          9,\n          -120,\n          -89,\n          66,\n          -120,\n          -81,\n          -57,\n          66,\n          68,\n          -49,\n          -115,\n          66,\n          97,\n          -97,\n          -55,\n          66,\n          -118,\n          12,\n          -125,\n          66,\n          -109,\n          49,\n          -97,\n          66,\n          75,\n          -84,\n          -41,\n          66,\n          -109,\n          -21,\n          -97,\n          66,\n          -97,\n          -82,\n          63,\n          66,\n          -104,\n          0,\n          -107,\n          66,\n          92,\n          24,\n          -53,\n          66,\n          -60,\n          -19,\n          104,\n          66,\n          -57,\n          -51,\n          -28,\n          66,\n          123,\n          91,\n          -13,\n          66,\n          -61,\n          111,\n          64,\n          66,\n          -99,\n          37,\n          -21,\n          66,\n          -99,\n          79,\n          -43,\n          66,\n          -70,\n          -96,\n          -90,\n          66,\n          69,\n          50,\n          71,\n          66,\n          -67,\n          84,\n          -112,\n          66,\n          87,\n          -1,\n          -1,\n          66,\n          -106,\n          123,\n          117,\n          66,\n          78,\n          -98,\n          -8,\n          66,\n          -123,\n          122,\n          -35,\n          66,\n          -114,\n          -27,\n          -127,\n          66,\n          68,\n          50,\n          -80,\n          66,\n          102,\n          -61,\n          -115,\n          66,\n          -97,\n          8,\n          109,\n          66,\n          -112,\n          44,\n          -10,\n          66,\n          -65,\n          56,\n          76,\n          66,\n          111,\n          25,\n          -34,\n          66,\n          124,\n          76,\n          -46,\n          66,\n          -77,\n          -73,\n          42,\n          66,\n          -70,\n          38,\n          39,\n          66,\n          -99,\n          69,\n          9,\n          66,\n          -64,\n          -92,\n          -96,\n          66,\n          96,\n          59,\n          -25,\n          66,\n          99,\n          -102,\n          33,\n          66,\n          -83,\n          35,\n          -34,\n          66,\n          121,\n          39,\n          -76,\n          66,\n          -64,\n          -70,\n          -41,\n          66,\n          -73,\n          -85,\n          -26,\n          66,\n          80,\n          17,\n          -6,\n          66,\n          78,\n          57,\n          -49,\n          66,\n          -102,\n          112,\n          119,\n          66,\n          -70,\n          102,\n          93,\n          66,\n          110,\n          -67,\n          43,\n          66,\n          89,\n          89,\n          50,\n          66,\n          79,\n          -83,\n          91,\n          66,\n          78,\n          8,\n          47,\n          66,\n          -63,\n          38,\n          -49,\n          66,\n          94,\n          2,\n          -105,\n          66,\n          -117,\n          -122,\n          -111,\n          66,\n          -107,\n          -17,\n          -29,\n          66,\n          -99,\n          70,\n          19,\n          66,\n          74,\n          -45,\n          -26,\n          66,\n          -59,\n          16,\n          -81,\n          66,\n          -60,\n          -17,\n          -96,\n          66,\n          -109,\n          107,\n          92,\n          66,\n          98,\n          78,\n          62,\n          66,\n          -101,\n          98,\n          -57,\n          66,\n          -115,\n          39,\n          -53,\n          66,\n          -114,\n          -82,\n          91,\n          66,\n          102,\n          57,\n          -65,\n          66,\n          -93,\n          -111,\n          30,\n          66,\n          76,\n          -45,\n          -50,\n          66,\n          -74,\n          -8,\n          3,\n          66,\n          -99,\n          13,\n          -86,\n          66,\n          -108,\n          112,\n          8,\n          66,\n          -64,\n          -11,\n          -109,\n          66,\n          -100,\n          15,\n          -23,\n          66,\n          101,\n          108,\n          114,\n          66,\n          -119,\n          13,\n          -90,\n          66,\n          -62,\n          16,\n          -42,\n          66,\n          -115,\n          -24,\n          7,\n          66,\n          81,\n          111,\n          67,\n          66,\n          68,\n          19,\n          12,\n          66,\n          107,\n          37,\n          -39,\n          66,\n          91,\n          88,\n          -80,\n          66,\n          -108,\n          64,\n          -89,\n          66,\n          -111,\n          114,\n          40,\n          66,\n          -115,\n          -28,\n          -26,\n          66,\n          70,\n          -79,\n          60,\n          66,\n          -67,\n          -53,\n          -78,\n          66,\n          -120,\n          -30,\n          -44,\n          66,\n          -104,\n          45,\n          -89,\n          66,\n          -65,\n          -27,\n          11,\n          66,\n          -108,\n          -97,\n          -88,\n          66,\n          -81,\n          -74,\n          -36,\n          66,\n          -116,\n          2,\n          -104,\n          66,\n          70,\n          -28,\n          120,\n          66,\n          80,\n          -24,\n          40,\n          66,\n          -69,\n          116,\n          11,\n          66,\n          -69,\n          53,\n          -57,\n          66,\n          74,\n          -66,\n          -114,\n          66,\n          -100,\n          117,\n          -121,\n          66,\n          -104,\n          34,\n          107,\n          66,\n          -122,\n          88,\n          2,\n          66,\n          -87,\n          -125,\n          -38,\n          66,\n          73,\n          43,\n          -5,\n          66,\n          90,\n          91,\n          -26,\n          66,\n          -75,\n          74,\n          -82,\n          66,\n          75,\n          -105,\n          110,\n          66,\n          -106,\n          53,\n          64,\n          66,\n          119,\n          -103,\n          -16,\n          66,\n          -61,\n          -20,\n          -41,\n          66,\n          -116,\n          -35,\n          7,\n          66,\n          -111,\n          -89,\n          -52,\n          66,\n          97,\n          103,\n          94,\n          66,\n          121,\n          -99,\n          -53,\n          66,\n          21,\n          -89,\n          -128,\n          66,\n          -60,\n          -14,\n          -33,\n          66,\n          -59,\n          124,\n          86,\n          66,\n          86,\n          -16,\n          53,\n          66,\n          69,\n          74,\n          -59,\n          66,\n          -83,\n          82,\n          8,\n          66,\n          -101,\n          -100,\n          79,\n          66,\n          -78,\n          -91,\n          -120,\n          66,\n          -95,\n          -94,\n          -104,\n          66,\n          -57,\n          51,\n          -50,\n          66,\n          91,\n          -13,\n          -90,\n          66,\n          78,\n          29,\n          71,\n          66,\n          81,\n          23,\n          -72,\n          66,\n          54,\n          -109,\n          -94,\n          66,\n          83,\n          127,\n          18,\n          66,\n          94,\n          -40,\n          1,\n          66,\n          -60,\n          85,\n          55,\n          66,\n          -109,\n          72,\n          -76,\n          66,\n          -96,\n          32,\n          67,\n          66,\n          -105,\n          22,\n          -9,\n          66,\n          93,\n          27,\n          48,\n          66,\n          -119,\n          -116,\n          -53,\n          66,\n          -108,\n          45,\n          112,\n          66,\n          107,\n          -58,\n          124,\n          66,\n          -70,\n          -70,\n          46,\n          66,\n          -65,\n          -69,\n          -74,\n          66,\n          -67,\n          45,\n          100,\n          66,\n          126,\n          64,\n          1,\n          66,\n          70,\n          46,\n          -98,\n          66,\n          102,\n          98,\n          -81,\n          66,\n          -102,\n          -40,\n          -128,\n          66,\n          -118,\n          6,\n          -100,\n          66,\n          72,\n          22,\n          -127,\n          66,\n          -75,\n          54,\n          25,\n          66,\n          87,\n          -14,\n          89,\n          66,\n          -70,\n          94,\n          -54,\n          66,\n          -108,\n          -53,\n          -67,\n          66,\n          -62,\n          -13,\n          -107,\n          66,\n          -64,\n          9,\n          -46,\n          66,\n          -109,\n          78,\n          -53,\n          66,\n          -77,\n          56,\n          -5,\n          66,\n          76,\n          -42,\n          -85,\n          66,\n          91,\n          62,\n          1,\n          66,\n          -112,\n          10,\n          14,\n          66,\n          -106,\n          34,\n          26,\n          66,\n          -106,\n          -102,\n          127,\n          66,\n          -61,\n          52,\n          102,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 230,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          717437978,\n          1119213980,\n          1118444668,\n          989286208,\n          970882574,\n          988223299,\n          726931786,\n          1117026662,\n          710338696,\n          602037004,\n          712655080,\n          969089990,\n          4,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162258294,\n          975725585,\n          1104078506,\n          1104332129,\n          597802270,\n          988276139,\n          602574268,\n          1117551892,\n          975479669,\n          626329463,\n          1011657829,\n          968552234,\n          4,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 24,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 24,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -1557102390726603767,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          334547014,\n          454423597,\n          590996778,\n          207284134,\n          1064297783,\n          340994793,\n          911955070,\n          337419323,\n          183057998,\n          757176701,\n          531099199,\n          1004924066,\n          459475923,\n          849672573,\n          1023248702,\n          634775925,\n          924285275,\n          497880063,\n          41503966,\n          317367421,\n          886170957,\n          202459491,\n          933685293,\n          720539343,\n          187430363,\n          179875177,\n          324372037,\n          237219754,\n          91203445,\n          190356075,\n          349218209,\n          196730430,\n          247066034,\n          259757171,\n          615832889,\n          1020566075,\n          868218209,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          106,\n          119,\n          16,\n          67,\n          -41,\n          2,\n          -41,\n          69,\n          30,\n          -127,\n          72,\n          66,\n          -86,\n          -67,\n          -82,\n          66,\n          96,\n          80,\n          3,\n          66,\n          -112,\n          -16,\n          -50,\n          66,\n          -116,\n          -81,\n          95,\n          66,\n          73,\n          -35,\n          108,\n          66,\n          -120,\n          -70,\n          31,\n          66,\n          70,\n          83,\n          102,\n          66,\n          125,\n          3,\n          124,\n          65,\n          -98,\n          71,\n          -4,\n          66,\n          -72,\n          111,\n          75,\n          66,\n          -99,\n          -123,\n          -22,\n          65,\n          -51,\n          113,\n          -47,\n          66,\n          87,\n          8,\n          -67,\n          66,\n          77,\n          -107,\n          -119,\n          66,\n          -122,\n          -66,\n          -30,\n          66,\n          -64,\n          -80,\n          -100,\n          66,\n          -122,\n          126,\n          77,\n          66,\n          75,\n          86,\n          8,\n          66,\n          -93,\n          118,\n          -125,\n          66,\n          -96,\n          -95,\n          85,\n          66,\n          -77,\n          -115,\n          -64,\n          65,\n          -101,\n          -72,\n          91,\n          66,\n          -126,\n          50,\n          83,\n          66,\n          -110,\n          -106,\n          -110,\n          66,\n          32,\n          -121,\n          -85,\n          66,\n          -71,\n          51,\n          -51,\n          66,\n          124,\n          -46,\n          65,\n          66,\n          -113,\n          -5,\n          16,\n          66,\n          116,\n          0,\n          120,\n          66,\n          -68,\n          -100,\n          -114,\n          66,\n          -72,\n          16,\n          83,\n          66,\n          -108,\n          3,\n          120,\n          66,\n          -77,\n          -58,\n          124,\n          66,\n          -77,\n          -56,\n          23,\n          66,\n          87,\n          -17,\n          -19,\n          66,\n          -124,\n          110,\n          -78,\n          66,\n          -61,\n          -53,\n          104,\n          66,\n          -115,\n          82,\n          120,\n          66,\n          96,\n          84,\n          -118,\n          66,\n          97,\n          -19,\n          57,\n          66,\n          -126,\n          -100,\n          73,\n          66,\n          71,\n          17,\n          -50,\n          66,\n          -111,\n          117,\n          -25,\n          66,\n          -105,\n          -116,\n          -25,\n          66,\n          -78,\n          55,\n          13,\n          66,\n          -71,\n          -24,\n          21,\n          66,\n          -76,\n          111,\n          -91,\n          66,\n          87,\n          -68,\n          124,\n          66,\n          -60,\n          -114,\n          -55,\n          66,\n          -69,\n          -90,\n          104,\n          66,\n          -88,\n          54,\n          109,\n          66,\n          -117,\n          -81,\n          -5,\n          66,\n          78,\n          -104,\n          13,\n          66,\n          -78,\n          -37,\n          -81,\n          66,\n          109,\n          59,\n          -105,\n          66,\n          -75,\n          77,\n          61,\n          66,\n          -73,\n          -16,\n          -2,\n          66,\n          119,\n          31,\n          -51,\n          66,\n          -115,\n          97,\n          106,\n          66,\n          122,\n          83,\n          -28,\n          66,\n          84,\n          126,\n          43,\n          66,\n          -93,\n          -9,\n          28,\n          66,\n          59,\n          -82,\n          -85,\n          66,\n          -83,\n          92,\n          74,\n          66,\n          -89,\n          -54,\n          126,\n          66,\n          79,\n          -118,\n          47,\n          66,\n          102,\n          -114,\n          87,\n          66,\n          -78,\n          58,\n          79,\n          66,\n          -111,\n          -102,\n          113,\n          66,\n          111,\n          -24,\n          100,\n          66,\n          -75,\n          -123,\n          -113,\n          66,\n          -74,\n          -105,\n          -86,\n          66,\n          -70,\n          -7,\n          -20,\n          66,\n          -77,\n          63,\n          19,\n          66,\n          -127,\n          123,\n          11,\n          66,\n          -103,\n          -81,\n          1,\n          66,\n          117,\n          -37,\n          -107,\n          66,\n          -61,\n          110,\n          -109,\n          66,\n          -112,\n          -20,\n          -123,\n          66,\n          -92,\n          117,\n          45,\n          66,\n          -117,\n          -18,\n          87,\n          66,\n          -69,\n          -98,\n          73,\n          66,\n          -117,\n          -78,\n          16,\n          66,\n          -59,\n          -123,\n          -84,\n          66,\n          75,\n          114,\n          35,\n          66,\n          -125,\n          -73,\n          -75,\n          66,\n          -80,\n          -28,\n          111,\n          66,\n          -100,\n          -97,\n          -22,\n          66,\n          -110,\n          115,\n          103,\n          66,\n          -59,\n          91,\n          27,\n          66,\n          81,\n          73,\n          -100,\n          66,\n          -120,\n          6,\n          -53,\n          66,\n          -69,\n          -114,\n          103,\n          66,\n          98,\n          -100,\n          15,\n          66,\n          -68,\n          75,\n          80,\n          66,\n          -61,\n          97,\n          8,\n          66,\n          -75,\n          -20,\n          54,\n          66,\n          -120,\n          38,\n          -95,\n          66,\n          77,\n          -115,\n          20,\n          66,\n          72,\n          121,\n          -115,\n          66,\n          78,\n          -18,\n          -7,\n          66,\n          -66,\n          -24,\n          -34,\n          66,\n          -79,\n          85,\n          -91,\n          66,\n          -67,\n          34,\n          -47,\n          66,\n          -71,\n          -97,\n          7,\n          66,\n          -86,\n          15,\n          -126,\n          66,\n          -67,\n          -111,\n          43,\n          66,\n          -124,\n          82,\n          -6,\n          66,\n          -80,\n          72,\n          120,\n          66,\n          78,\n          61,\n          -11,\n          66,\n          -113,\n          9,\n          73,\n          66,\n          -107,\n          -105,\n          0,\n          66,\n          74,\n          -107,\n          65,\n          66,\n          -100,\n          -5,\n          -63,\n          66,\n          -111,\n          1,\n          -65,\n          66,\n          -102,\n          71,\n          -19,\n          66,\n          -127,\n          109,\n          80,\n          66,\n          -103,\n          55,\n          110,\n          66,\n          -90,\n          -48,\n          35,\n          66,\n          -104,\n          -36,\n          46,\n          66,\n          93,\n          -28,\n          53,\n          66,\n          -103,\n          54,\n          -61,\n          66,\n          -62,\n          71,\n          31,\n          66,\n          102,\n          111,\n          46,\n          66,\n          70,\n          -87,\n          -36,\n          66,\n          -60,\n          89,\n          -90,\n          66,\n          -57,\n          -19,\n          -12,\n          66,\n          -99,\n          25,\n          -52,\n          66,\n          -73,\n          -61,\n          -71,\n          66,\n          -127,\n          -128,\n          -114,\n          66,\n          -110,\n          105,\n          84,\n          66,\n          -65,\n          29,\n          -31,\n          66,\n          -100,\n          55,\n          14,\n          66,\n          -86,\n          -38,\n          -7,\n          66,\n          81,\n          71,\n          -122,\n          66,\n          73,\n          92,\n          -56,\n          66,\n          -89,\n          -5,\n          -38,\n          65,\n          -96,\n          -96,\n          -50,\n          66,\n          -90,\n          29,\n          -59,\n          66,\n          74,\n          -85,\n          -47,\n          66,\n          -109,\n          3,\n          -13,\n          66,\n          76,\n          99,\n          47,\n          66,\n          -61,\n          17,\n          111,\n          66,\n          -96,\n          -65,\n          -69,\n          66,\n          83,\n          118,\n          0,\n          66,\n          -66,\n          83,\n          30,\n          66,\n          -106,\n          -48,\n          -54,\n          66,\n          -116,\n          83,\n          43,\n          66,\n          103,\n          -5,\n          33,\n          65,\n          -11,\n          -15,\n          -113,\n          66,\n          -121,\n          117,\n          -19,\n          66,\n          82,\n          -62,\n          -13,\n          66,\n          -117,\n          -124,\n          60,\n          66,\n          -106,\n          -99,\n          -47,\n          66,\n          -59,\n          -62,\n          32,\n          66,\n          -113,\n          102,\n          89,\n          66,\n          114,\n          53,\n          -126,\n          66,\n          -97,\n          -64,\n          -22,\n          66,\n          85,\n          84,\n          50,\n          66,\n          -61,\n          70,\n          -6,\n          66,\n          -113,\n          6,\n          -53,\n          66,\n          96,\n          -101,\n          -23,\n          66,\n          85,\n          103,\n          19,\n          66,\n          -68,\n          -25,\n          67,\n          66,\n          85,\n          -47,\n          74,\n          66,\n          -114,\n          124,\n          95,\n          66,\n          89,\n          -23,\n          -27,\n          66,\n          -111,\n          -90,\n          20,\n          66,\n          85,\n          -104,\n          2,\n          66,\n          -77,\n          90,\n          99,\n          66,\n          -79,\n          -107,\n          -7,\n          66,\n          98,\n          124,\n          111,\n          66,\n          86,\n          56,\n          -10,\n          66,\n          -72,\n          -104,\n          57,\n          66,\n          -111,\n          -65,\n          -43,\n          66,\n          -113,\n          35,\n          -104,\n          66,\n          -99,\n          102,\n          -102,\n          66,\n          -118,\n          71,\n          -40,\n          66,\n          -109,\n          -45,\n          18,\n          66,\n          -103,\n          -112,\n          71,\n          66,\n          -119,\n          58,\n          60,\n          66,\n          -100,\n          55,\n          -33,\n          66,\n          -64,\n          -52,\n          -19,\n          66,\n          -67,\n          -48,\n          121,\n          66,\n          -114,\n          -113,\n          112,\n          66,\n          83,\n          64,\n          -8,\n          66,\n          119,\n          12,\n          64,\n          66,\n          122,\n          25,\n          32,\n          66,\n          -91,\n          33,\n          -72,\n          66,\n          -68,\n          -24,\n          37,\n          66,\n          -95,\n          -76,\n          122,\n          66,\n          80,\n          110,\n          60,\n          66,\n          73,\n          92,\n          -48,\n          66,\n          77,\n          -11,\n          106,\n          66,\n          86,\n          93,\n          -57,\n          66,\n          112,\n          69,\n          16,\n          66,\n          81,\n          122,\n          28,\n          66,\n          -117,\n          -57,\n          -67,\n          66,\n          69,\n          -124,\n          -37,\n          66,\n          82,\n          28,\n          -55,\n          66,\n          91,\n          -74,\n          99,\n          66,\n          -111,\n          15,\n          -113,\n          66,\n          -104,\n          43,\n          115,\n          66,\n          82,\n          2,\n          74,\n          66,\n          -117,\n          -66,\n          -97,\n          66,\n          110,\n          1,\n          39,\n          66,\n          -78,\n          -119,\n          -32,\n          66,\n          88,\n          -13,\n          34,\n          66,\n          -106,\n          12,\n          105,\n          66,\n          -60,\n          99,\n          -51,\n          66,\n          -112,\n          -78,\n          -118,\n          66,\n          -104,\n          107,\n          124,\n          66,\n          -74,\n          59,\n          45,\n          66,\n          -98,\n          28,\n          12,\n          66,\n          92,\n          99,\n          -38,\n          66,\n          -122,\n          -31,\n          8,\n          66,\n          68,\n          63,\n          53,\n          66,\n          103,\n          119,\n          94,\n          66,\n          -108,\n          54,\n          31,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 222,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          985046669,\n          1142949442,\n          1119015724,\n          1157213840,\n          773164723,\n          987705700,\n          597867872,\n          581689228,\n          1157399005,\n          1119154712,\n          973334957,\n          803749,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1033114486,\n          1142420885,\n          1102739963,\n          1142361671,\n          758300587,\n          975665623,\n          712458250,\n          581862005,\n          1117610944,\n          1104097918,\n          585916618,\n          798136,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 32,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 32,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6409199319416524683,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          1021646933,\n          647268721,\n          716742249,\n          116602150,\n          460414571,\n          41242661,\n          103863414,\n          800447167,\n          883388197,\n          932936781,\n          451910127,\n          1061072235,\n          169843189,\n          116108163,\n          987602797,\n          191433166,\n          880127831,\n          194078438,\n          1001698749,\n          258674287,\n          317697218,\n          576575781,\n          228028153,\n          437316001,\n          480413485,\n          249747785,\n          64201949,\n          523103735,\n          633141329,\n          204699517,\n          733193805,\n          208571447,\n          499115686,\n          974299113,\n          489373154,\n          1033721647,\n          257926714,\n          27354329,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          67,\n          -82,\n          -91,\n          100,\n          68,\n          90,\n          28,\n          55,\n          67,\n          120,\n          94,\n          -14,\n          67,\n          -43,\n          -13,\n          -27,\n          67,\n          -116,\n          -125,\n          -100,\n          66,\n          -83,\n          -122,\n          -36,\n          66,\n          -122,\n          -92,\n          41,\n          65,\n          -124,\n          -104,\n          -92,\n          66,\n          -69,\n          22,\n          -26,\n          66,\n          -114,\n          10,\n          -72,\n          65,\n          -21,\n          -30,\n          -124,\n          66,\n          -115,\n          37,\n          3,\n          66,\n          -99,\n          -87,\n          114,\n          66,\n          69,\n          -63,\n          -99,\n          66,\n          99,\n          119,\n          -122,\n          66,\n          -95,\n          10,\n          -75,\n          66,\n          99,\n          36,\n          11,\n          66,\n          126,\n          -8,\n          25,\n          66,\n          -69,\n          -58,\n          91,\n          66,\n          -110,\n          -31,\n          41,\n          66,\n          -106,\n          -126,\n          51,\n          66,\n          -68,\n          59,\n          119,\n          66,\n          -128,\n          26,\n          -61,\n          66,\n          96,\n          53,\n          -93,\n          66,\n          97,\n          -86,\n          88,\n          66,\n          93,\n          69,\n          -124,\n          66,\n          120,\n          -105,\n          34,\n          66,\n          -70,\n          111,\n          -87,\n          66,\n          111,\n          100,\n          -122,\n          66,\n          -99,\n          -4,\n          47,\n          66,\n          -123,\n          122,\n          -38,\n          66,\n          -100,\n          -57,\n          28,\n          65,\n          84,\n          -116,\n          -116,\n          66,\n          -95,\n          -28,\n          -96,\n          65,\n          10,\n          -28,\n          -35,\n          66,\n          -107,\n          -30,\n          13,\n          66,\n          -73,\n          -69,\n          117,\n          66,\n          87,\n          -87,\n          -60,\n          66,\n          -117,\n          -77,\n          109,\n          66,\n          -108,\n          81,\n          -43,\n          65,\n          32,\n          72,\n          -117,\n          66,\n          76,\n          -57,\n          -107,\n          66,\n          116,\n          6,\n          -7,\n          66,\n          -105,\n          15,\n          -4,\n          66,\n          -62,\n          -99,\n          124,\n          66,\n          71,\n          -43,\n          -43,\n          66,\n          79,\n          -81,\n          -22,\n          65,\n          -84,\n          -40,\n          38,\n          66,\n          -110,\n          -74,\n          -88,\n          66,\n          -93,\n          -49,\n          -26,\n          66,\n          115,\n          -72,\n          11,\n          66,\n          -95,\n          -5,\n          -128,\n          66,\n          -63,\n          79,\n          -41,\n          66,\n          -84,\n          -58,\n          96,\n          66,\n          -121,\n          33,\n          -78,\n          66,\n          -81,\n          125,\n          18,\n          66,\n          79,\n          68,\n          57,\n          66,\n          -69,\n          24,\n          -91,\n          66,\n          -115,\n          61,\n          111,\n          66,\n          -126,\n          77,\n          17,\n          66,\n          81,\n          124,\n          76,\n          66,\n          104,\n          -99,\n          87,\n          66,\n          -68,\n          -56,\n          89,\n          66,\n          118,\n          -74,\n          52,\n          66,\n          -72,\n          -32,\n          4,\n          66,\n          -98,\n          115,\n          -77,\n          66,\n          98,\n          -40,\n          127,\n          66,\n          -113,\n          -3,\n          -97,\n          66,\n          -128,\n          -66,\n          -89,\n          66,\n          -107,\n          31,\n          -113,\n          66,\n          123,\n          56,\n          -43,\n          66,\n          104,\n          35,\n          38,\n          66,\n          -109,\n          92,\n          -126,\n          66,\n          88,\n          -76,\n          69,\n          66,\n          -80,\n          16,\n          -93,\n          66,\n          83,\n          40,\n          123,\n          66,\n          -108,\n          -104,\n          30,\n          66,\n          119,\n          -14,\n          25,\n          66,\n          90,\n          112,\n          11,\n          64,\n          -109,\n          94,\n          88,\n          66,\n          -63,\n          -62,\n          29,\n          66,\n          84,\n          9,\n          -87,\n          66,\n          -89,\n          99,\n          120,\n          66,\n          117,\n          100,\n          21,\n          66,\n          -111,\n          -81,\n          -67,\n          66,\n          88,\n          12,\n          1,\n          66,\n          110,\n          2,\n          88,\n          66,\n          115,\n          -103,\n          13,\n          66,\n          123,\n          50,\n          -6,\n          66,\n          -118,\n          68,\n          21,\n          66,\n          -67,\n          31,\n          6,\n          66,\n          -62,\n          34,\n          123,\n          66,\n          -66,\n          -85,\n          10,\n          66,\n          -78,\n          -60,\n          55,\n          66,\n          -70,\n          70,\n          90,\n          66,\n          -113,\n          -32,\n          111,\n          66,\n          92,\n          67,\n          -113,\n          66,\n          -73,\n          -7,\n          -107,\n          66,\n          115,\n          -76,\n          113,\n          66,\n          73,\n          109,\n          37,\n          66,\n          95,\n          37,\n          78,\n          66,\n          -74,\n          -71,\n          90,\n          66,\n          -61,\n          -68,\n          100,\n          66,\n          68,\n          94,\n          -88,\n          66,\n          -102,\n          -110,\n          -88,\n          66,\n          -74,\n          49,\n          -110,\n          66,\n          -121,\n          125,\n          93,\n          66,\n          -109,\n          -2,\n          60,\n          66,\n          -96,\n          -21,\n          -107,\n          66,\n          -101,\n          -28,\n          -39,\n          66,\n          -105,\n          -82,\n          3,\n          66,\n          -120,\n          127,\n          118,\n          66,\n          74,\n          89,\n          28,\n          66,\n          -106,\n          -80,\n          -96,\n          66,\n          97,\n          -101,\n          -23,\n          66,\n          78,\n          115,\n          -108,\n          66,\n          80,\n          -20,\n          -102,\n          66,\n          -83,\n          -122,\n          120,\n          66,\n          -87,\n          -110,\n          97,\n          66,\n          101,\n          -121,\n          -54,\n          66,\n          -78,\n          -108,\n          -95,\n          66,\n          -94,\n          -98,\n          11,\n          66,\n          69,\n          -88,\n          -107,\n          63,\n          -69,\n          -59,\n          4,\n          66,\n          -83,\n          -16,\n          72,\n          66,\n          -115,\n          -52,\n          -11,\n          66,\n          -89,\n          105,\n          -13,\n          66,\n          -95,\n          116,\n          71,\n          66,\n          -67,\n          -91,\n          -65,\n          66,\n          84,\n          54,\n          -77,\n          66,\n          -103,\n          -106,\n          -90,\n          66,\n          -84,\n          101,\n          6,\n          66,\n          -102,\n          42,\n          4,\n          66,\n          101,\n          -56,\n          71,\n          66,\n          78,\n          41,\n          38,\n          66,\n          -81,\n          -88,\n          36,\n          66,\n          -110,\n          -51,\n          112,\n          66,\n          -72,\n          -87,\n          -86,\n          66,\n          -112,\n          -86,\n          -107,\n          66,\n          -89,\n          93,\n          -14,\n          66,\n          -71,\n          -47,\n          -27,\n          66,\n          -99,\n          98,\n          -38,\n          66,\n          -103,\n          -100,\n          -70,\n          66,\n          -102,\n          -84,\n          -11,\n          66,\n          -88,\n          -6,\n          -12,\n          66,\n          -88,\n          83,\n          80,\n          66,\n          -103,\n          -90,\n          -27,\n          66,\n          -101,\n          -121,\n          -49,\n          66,\n          -65,\n          117,\n          -74,\n          66,\n          -75,\n          107,\n          11,\n          66,\n          -108,\n          124,\n          -16,\n          66,\n          -72,\n          -43,\n          88,\n          66,\n          -65,\n          119,\n          -34,\n          66,\n          -112,\n          8,\n          55,\n          66,\n          -73,\n          59,\n          100,\n          66,\n          74,\n          53,\n          69,\n          66,\n          -103,\n          31,\n          112,\n          66,\n          -64,\n          12,\n          38,\n          66,\n          -121,\n          -112,\n          -33,\n          66,\n          83,\n          -99,\n          -80,\n          66,\n          -99,\n          -110,\n          63,\n          66,\n          -99,\n          -77,\n          92,\n          66,\n          81,\n          54,\n          -46,\n          66,\n          87,\n          -7,\n          -84,\n          66,\n          77,\n          -88,\n          62,\n          66,\n          103,\n          -127,\n          -3,\n          66,\n          -64,\n          -106,\n          16,\n          66,\n          69,\n          42,\n          68,\n          66,\n          -98,\n          105,\n          -98,\n          66,\n          -60,\n          83,\n          -96,\n          66,\n          -59,\n          -125,\n          106,\n          66,\n          -100,\n          -18,\n          -48,\n          66,\n          79,\n          117,\n          -19,\n          66,\n          -64,\n          36,\n          -5,\n          66,\n          -111,\n          -28,\n          87,\n          66,\n          86,\n          126,\n          6,\n          66,\n          -92,\n          70,\n          119,\n          66,\n          -70,\n          61,\n          -73,\n          66,\n          77,\n          42,\n          48,\n          66,\n          -64,\n          -67,\n          126,\n          66,\n          -105,\n          -46,\n          -41,\n          66,\n          -71,\n          2,\n          44,\n          66,\n          -96,\n          -38,\n          10,\n          66,\n          92,\n          13,\n          -5,\n          66,\n          79,\n          -62,\n          -109,\n          66,\n          -114,\n          113,\n          -104,\n          66,\n          78,\n          101,\n          37,\n          66,\n          -128,\n          -32,\n          97,\n          66,\n          93,\n          -47,\n          -108,\n          66,\n          -98,\n          -102,\n          12,\n          66,\n          -73,\n          24,\n          -42,\n          66,\n          -79,\n          -36,\n          124,\n          66,\n          -67,\n          -88,\n          80,\n          66,\n          -114,\n          -104,\n          -38,\n          66,\n          -101,\n          19,\n          46,\n          66,\n          125,\n          4,\n          -81,\n          66,\n          75,\n          46,\n          90,\n          66,\n          -68,\n          -41,\n          -94,\n          66,\n          -106,\n          -109,\n          -115,\n          66,\n          79,\n          -87,\n          -21,\n          66,\n          84,\n          -111,\n          -73,\n          66,\n          -97,\n          106,\n          120,\n          66,\n          -108,\n          27,\n          -80,\n          66,\n          -123,\n          -62,\n          67,\n          66,\n          -61,\n          69,\n          -42,\n          66,\n          109,\n          -32,\n          40,\n          66,\n          93,\n          -39,\n          -122,\n          66,\n          -61,\n          -122,\n          -65,\n          66,\n          -62,\n          -32,\n          -47,\n          66,\n          -64,\n          -17,\n          72,\n          66,\n          92,\n          -124,\n          26,\n          66,\n          -119,\n          -118,\n          116,\n          66,\n          -111,\n          -8,\n          28,\n          66,\n          -59,\n          68,\n          115,\n          66,\n          -114,\n          72,\n          -69,\n          66,\n          -69,\n          55,\n          127,\n          66,\n          -73,\n          27,\n          -40,\n          66,\n          -115,\n          -59,\n          -3,\n          66,\n          -98,\n          96,\n          52,\n          66,\n          70,\n          -107,\n          1,\n          66,\n          -114,\n          -73,\n          -104,\n          66,\n          71,\n          -21,\n          54,\n          66,\n          -110,\n          46,\n          -119,\n          66,\n          -60,\n          -125,\n          13,\n          66,\n          -118,\n          108,\n          -42,\n          66,\n          -67,\n          59,\n          -69,\n          66,\n          -63,\n          -110,\n          -63,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 227,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162176173,\n          1099306471,\n          974102908,\n          1161730016,\n          581376037,\n          602456443,\n          731705777,\n          730120136,\n          588297802,\n          1112579032,\n          629513977,\n          237465553,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162018588,\n          1117027489,\n          989520461,\n          1027747025,\n          588108236,\n          1112815571,\n          581396173,\n          772451791,\n          988473614,\n          1025956354,\n          588049028,\n          194419561,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 27,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 27,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6381589943519458827,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          352281810,\n          446095089,\n          1012246111,\n          849907043,\n          782611623,\n          763426931,\n          992660639,\n          215657078,\n          217409703,\n          623679705,\n          241085029,\n          364113786,\n          363982511,\n          92009673,\n          56743473,\n          790423497,\n          653489239,\n          863761909,\n          36568358,\n          209016702,\n          385558117,\n          329419439,\n          107947858,\n          472772570,\n          656668410,\n          358792059,\n          1014472255,\n          321861093,\n          379960866,\n          305114406,\n          706424673,\n          190769787,\n          441896547,\n          120269177,\n          334486061,\n          855607679,\n          259879998,\n          639696419,\n          14,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -27,\n          -117,\n          -90,\n          69,\n          97,\n          -12,\n          50,\n          67,\n          -57,\n          -21,\n          87,\n          68,\n          -102,\n          77,\n          4,\n          65,\n          25,\n          -115,\n          120,\n          67,\n          8,\n          11,\n          -116,\n          66,\n          -116,\n          94,\n          116,\n          66,\n          121,\n          76,\n          -37,\n          66,\n          -117,\n          19,\n          78,\n          66,\n          -126,\n          98,\n          -52,\n          66,\n          -117,\n          -81,\n          -40,\n          66,\n          -111,\n          -85,\n          88,\n          66,\n          -126,\n          -29,\n          -69,\n          66,\n          -88,\n          -7,\n          -57,\n          66,\n          -98,\n          11,\n          74,\n          65,\n          -37,\n          -93,\n          -4,\n          66,\n          96,\n          42,\n          -108,\n          66,\n          -81,\n          -79,\n          -108,\n          65,\n          -18,\n          31,\n          48,\n          66,\n          -126,\n          101,\n          -124,\n          64,\n          -26,\n          20,\n          106,\n          66,\n          -120,\n          -85,\n          42,\n          66,\n          -66,\n          -20,\n          -61,\n          66,\n          -89,\n          97,\n          -40,\n          66,\n          76,\n          40,\n          52,\n          66,\n          -104,\n          105,\n          71,\n          66,\n          -103,\n          -12,\n          -36,\n          66,\n          -126,\n          -59,\n          39,\n          66,\n          -60,\n          -91,\n          106,\n          66,\n          121,\n          -62,\n          119,\n          66,\n          82,\n          22,\n          9,\n          66,\n          80,\n          9,\n          127,\n          66,\n          -67,\n          -96,\n          28,\n          66,\n          -95,\n          -80,\n          -43,\n          64,\n          -17,\n          81,\n          57,\n          66,\n          -62,\n          -59,\n          -11,\n          66,\n          82,\n          -47,\n          -96,\n          65,\n          17,\n          1,\n          108,\n          66,\n          101,\n          -97,\n          -41,\n          66,\n          -98,\n          120,\n          -45,\n          66,\n          -74,\n          -71,\n          36,\n          66,\n          -117,\n          100,\n          -84,\n          66,\n          -71,\n          -104,\n          44,\n          66,\n          3,\n          5,\n          89,\n          66,\n          -74,\n          108,\n          -89,\n          66,\n          -103,\n          117,\n          20,\n          66,\n          -108,\n          70,\n          -21,\n          66,\n          -66,\n          80,\n          -44,\n          66,\n          68,\n          25,\n          -51,\n          66,\n          -110,\n          -115,\n          -23,\n          66,\n          -64,\n          68,\n          94,\n          66,\n          -66,\n          -120,\n          -121,\n          66,\n          109,\n          -85,\n          125,\n          66,\n          -68,\n          -28,\n          75,\n          66,\n          -88,\n          -9,\n          -106,\n          66,\n          -60,\n          -127,\n          58,\n          66,\n          -62,\n          -62,\n          -80,\n          66,\n          -97,\n          -106,\n          4,\n          66,\n          -62,\n          98,\n          -73,\n          66,\n          -79,\n          115,\n          -97,\n          66,\n          -101,\n          122,\n          -20,\n          66,\n          112,\n          -1,\n          -33,\n          66,\n          -73,\n          24,\n          -74,\n          66,\n          -96,\n          125,\n          81,\n          66,\n          -109,\n          -28,\n          -41,\n          66,\n          86,\n          -55,\n          -66,\n          66,\n          -76,\n          40,\n          -39,\n          66,\n          111,\n          41,\n          50,\n          66,\n          104,\n          14,\n          91,\n          66,\n          98,\n          62,\n          49,\n          66,\n          124,\n          45,\n          -39,\n          66,\n          -70,\n          15,\n          4,\n          66,\n          77,\n          41,\n          -110,\n          66,\n          -93,\n          -6,\n          20,\n          66,\n          99,\n          -34,\n          1,\n          66,\n          100,\n          3,\n          -51,\n          66,\n          77,\n          -116,\n          56,\n          66,\n          -75,\n          30,\n          45,\n          66,\n          -126,\n          -62,\n          35,\n          66,\n          -80,\n          -60,\n          8,\n          66,\n          -124,\n          -109,\n          -7,\n          66,\n          86,\n          112,\n          -118,\n          66,\n          78,\n          33,\n          -33,\n          66,\n          -68,\n          -124,\n          -95,\n          66,\n          -88,\n          26,\n          26,\n          66,\n          -104,\n          77,\n          38,\n          66,\n          -116,\n          23,\n          46,\n          66,\n          78,\n          26,\n          36,\n          66,\n          -73,\n          -34,\n          -20,\n          66,\n          -126,\n          44,\n          -102,\n          66,\n          -86,\n          93,\n          -39,\n          66,\n          -60,\n          93,\n          -47,\n          66,\n          -116,\n          -41,\n          -55,\n          66,\n          -102,\n          -118,\n          108,\n          66,\n          -97,\n          -105,\n          117,\n          66,\n          74,\n          -79,\n          -27,\n          63,\n          -63,\n          -29,\n          -25,\n          66,\n          -81,\n          -80,\n          -2,\n          66,\n          -111,\n          -21,\n          86,\n          66,\n          -68,\n          115,\n          -45,\n          66,\n          73,\n          63,\n          7,\n          66,\n          115,\n          84,\n          -107,\n          66,\n          -127,\n          105,\n          -86,\n          66,\n          92,\n          -90,\n          -57,\n          66,\n          -114,\n          -34,\n          -122,\n          66,\n          88,\n          -110,\n          1,\n          64,\n          -56,\n          29,\n          42,\n          66,\n          -104,\n          1,\n          -41,\n          66,\n          -97,\n          -78,\n          -4,\n          66,\n          -119,\n          -30,\n          -98,\n          66,\n          78,\n          105,\n          69,\n          66,\n          110,\n          -20,\n          -121,\n          66,\n          -60,\n          -35,\n          -72,\n          66,\n          -112,\n          -1,\n          15,\n          66,\n          -57,\n          70,\n          -128,\n          66,\n          114,\n          63,\n          97,\n          66,\n          -111,\n          -117,\n          99,\n          66,\n          -83,\n          -24,\n          -41,\n          66,\n          92,\n          109,\n          19,\n          66,\n          -66,\n          30,\n          -66,\n          66,\n          -107,\n          3,\n          0,\n          66,\n          77,\n          -54,\n          -44,\n          66,\n          -116,\n          100,\n          -15,\n          66,\n          -71,\n          -105,\n          -61,\n          66,\n          87,\n          115,\n          98,\n          66,\n          74,\n          24,\n          27,\n          66,\n          73,\n          111,\n          -59,\n          66,\n          -115,\n          -90,\n          -58,\n          66,\n          -87,\n          -36,\n          51,\n          66,\n          -117,\n          -92,\n          61,\n          66,\n          -94,\n          -94,\n          -110,\n          66,\n          -104,\n          114,\n          66,\n          66,\n          -72,\n          -55,\n          53,\n          66,\n          -64,\n          -123,\n          -45,\n          66,\n          -107,\n          -119,\n          -63,\n          66,\n          -79,\n          -51,\n          85,\n          66,\n          -91,\n          -4,\n          22,\n          66,\n          116,\n          -98,\n          105,\n          66,\n          -74,\n          -25,\n          -95,\n          66,\n          -78,\n          -99,\n          119,\n          66,\n          69,\n          100,\n          -23,\n          66,\n          123,\n          55,\n          -96,\n          66,\n          -76,\n          -64,\n          59,\n          66,\n          -62,\n          -82,\n          -67,\n          66,\n          -70,\n          -12,\n          18,\n          66,\n          97,\n          -99,\n          -87,\n          66,\n          -101,\n          101,\n          -49,\n          66,\n          103,\n          0,\n          18,\n          66,\n          -65,\n          -41,\n          -114,\n          66,\n          76,\n          -41,\n          20,\n          66,\n          96,\n          54,\n          67,\n          66,\n          87,\n          125,\n          118,\n          66,\n          -69,\n          34,\n          90,\n          66,\n          -120,\n          0,\n          4,\n          66,\n          -75,\n          -32,\n          -116,\n          66,\n          -66,\n          5,\n          -37,\n          66,\n          100,\n          52,\n          -67,\n          66,\n          -96,\n          32,\n          -104,\n          66,\n          97,\n          81,\n          -87,\n          66,\n          96,\n          -79,\n          93,\n          66,\n          127,\n          91,\n          80,\n          66,\n          -81,\n          39,\n          -43,\n          66,\n          -99,\n          27,\n          -50,\n          66,\n          95,\n          -44,\n          55,\n          66,\n          -111,\n          -70,\n          -28,\n          66,\n          -76,\n          41,\n          -38,\n          66,\n          -70,\n          33,\n          58,\n          66,\n          -108,\n          -107,\n          -34,\n          66,\n          -61,\n          75,\n          -39,\n          66,\n          -101,\n          116,\n          -67,\n          66,\n          102,\n          -77,\n          50,\n          66,\n          82,\n          -24,\n          -56,\n          66,\n          -65,\n          -45,\n          117,\n          66,\n          112,\n          118,\n          -25,\n          66,\n          -74,\n          -41,\n          -73,\n          66,\n          -96,\n          9,\n          127,\n          66,\n          96,\n          -84,\n          115,\n          66,\n          89,\n          -7,\n          -127,\n          66,\n          -70,\n          75,\n          6,\n          66,\n          -107,\n          -42,\n          -108,\n          66,\n          -97,\n          -29,\n          -59,\n          66,\n          87,\n          -123,\n          51,\n          66,\n          73,\n          31,\n          71,\n          66,\n          -64,\n          -65,\n          -34,\n          66,\n          -113,\n          -104,\n          10,\n          66,\n          -126,\n          99,\n          -85,\n          66,\n          74,\n          -79,\n          114,\n          66,\n          90,\n          -55,\n          20,\n          66,\n          -70,\n          3,\n          17,\n          66,\n          -92,\n          13,\n          -101,\n          66,\n          -101,\n          -90,\n          116,\n          66,\n          -100,\n          -117,\n          -94,\n          66,\n          104,\n          -104,\n          58,\n          66,\n          84,\n          -62,\n          79,\n          66,\n          99,\n          -55,\n          -37,\n          66,\n          -97,\n          88,\n          -66,\n          66,\n          -125,\n          59,\n          26,\n          66,\n          -96,\n          118,\n          28,\n          66,\n          -122,\n          84,\n          -93,\n          66,\n          78,\n          -47,\n          113,\n          66,\n          -65,\n          92,\n          64,\n          66,\n          -74,\n          40,\n          102,\n          66,\n          -66,\n          -65,\n          -22,\n          66,\n          87,\n          115,\n          -52,\n          66,\n          -120,\n          -5,\n          -102,\n          66,\n          -107,\n          20,\n          -90,\n          66,\n          -86,\n          113,\n          -26,\n          66,\n          121,\n          -99,\n          124,\n          66,\n          -64,\n          23,\n          14,\n          66,\n          -108,\n          30,\n          -44,\n          66,\n          113,\n          31,\n          11,\n          66,\n          79,\n          -112,\n          -29,\n          66,\n          -80,\n          -91,\n          22,\n          66,\n          109,\n          -92,\n          -51,\n          66,\n          93,\n          0,\n          -90,\n          66,\n          -121,\n          -111,\n          -25,\n          66,\n          -62,\n          118,\n          21,\n          66,\n          -113,\n          34,\n          116,\n          66,\n          -107,\n          -99,\n          45,\n          66,\n          -83,\n          79,\n          101,\n          66,\n          101,\n          -114,\n          36,\n          66,\n          86,\n          89,\n          -54,\n          66,\n          76,\n          -61,\n          83,\n          66,\n          -93,\n          57,\n          -6,\n          66,\n          83,\n          47,\n          -113,\n          66,\n          -109,\n          -89,\n          -8,\n          66,\n          -79,\n          -45,\n          -118,\n          66,\n          86,\n          62,\n          120,\n          66,\n          -70,\n          -108,\n          -128,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 229,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          586632050,\n          1161551879,\n          774283210,\n          1013980649,\n          758107400,\n          1140803963,\n          724700843,\n          1147909361,\n          753917911,\n          1156707913,\n          1116904261,\n          982900129,\n          1,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1160660299,\n          759776164,\n          643546618,\n          581690938,\n          631086548,\n          1104609203,\n          772475854,\n          1018765715,\n          624737357,\n          717188386,\n          643920232,\n          726273913,\n          1,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 25,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 25,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 4315119003239682117,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          919976138,\n          598153421,\n          911850099,\n          1058089295,\n          472597809,\n          48325977,\n          184355545,\n          1025637586,\n          1067899175,\n          804300141,\n          588347067,\n          780007033,\n          226026913,\n          867506907,\n          535546303,\n          486001593,\n          367601249,\n          200992302,\n          995732201,\n          731609342,\n          903854405,\n          460159671,\n          611882022,\n          876015154,\n          720693681,\n          220133731,\n          603434450,\n          207341437,\n          989431535,\n          907646381,\n          1052191299,\n          381257029,\n          346068294,\n          134122171,\n          759129197,\n          321561653,\n          917574851,\n          784835,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          83,\n          89,\n          37,\n          69,\n          82,\n          -50,\n          5,\n          66,\n          -57,\n          -42,\n          -67,\n          66,\n          75,\n          -14,\n          -61,\n          66,\n          -104,\n          -98,\n          -94,\n          66,\n          64,\n          58,\n          67,\n          66,\n          -100,\n          -40,\n          -101,\n          66,\n          -61,\n          -57,\n          -43,\n          66,\n          -69,\n          -108,\n          33,\n          66,\n          -85,\n          -128,\n          -59,\n          66,\n          -76,\n          -57,\n          123,\n          66,\n          -122,\n          122,\n          -6,\n          66,\n          93,\n          -81,\n          -41,\n          66,\n          105,\n          23,\n          65,\n          66,\n          -91,\n          -54,\n          123,\n          64,\n          -112,\n          -84,\n          114,\n          66,\n          -109,\n          -75,\n          -26,\n          66,\n          113,\n          24,\n          113,\n          66,\n          85,\n          -108,\n          77,\n          66,\n          -75,\n          27,\n          -12,\n          66,\n          -82,\n          45,\n          -1,\n          66,\n          -67,\n          56,\n          -46,\n          66,\n          -120,\n          72,\n          -118,\n          66,\n          106,\n          92,\n          -8,\n          65,\n          -65,\n          -14,\n          123,\n          66,\n          -104,\n          -75,\n          120,\n          66,\n          -105,\n          -101,\n          27,\n          66,\n          -69,\n          69,\n          -21,\n          66,\n          -64,\n          -32,\n          -73,\n          66,\n          -78,\n          -77,\n          35,\n          66,\n          -109,\n          -80,\n          75,\n          66,\n          -58,\n          -26,\n          -96,\n          66,\n          -113,\n          -67,\n          69,\n          66,\n          -80,\n          86,\n          -5,\n          66,\n          -65,\n          -66,\n          -49,\n          66,\n          -119,\n          21,\n          23,\n          66,\n          -121,\n          58,\n          32,\n          66,\n          -83,\n          -60,\n          5,\n          66,\n          -75,\n          117,\n          -42,\n          66,\n          -88,\n          33,\n          94,\n          66,\n          122,\n          25,\n          28,\n          66,\n          -109,\n          7,\n          -86,\n          66,\n          -77,\n          10,\n          -91,\n          66,\n          -74,\n          -22,\n          -67,\n          66,\n          90,\n          -72,\n          -37,\n          66,\n          95,\n          -105,\n          57,\n          66,\n          -62,\n          -121,\n          26,\n          66,\n          -67,\n          104,\n          -82,\n          66,\n          109,\n          -112,\n          -127,\n          66,\n          -110,\n          -77,\n          -43,\n          66,\n          -66,\n          -33,\n          -102,\n          66,\n          -122,\n          59,\n          22,\n          66,\n          -65,\n          -78,\n          52,\n          66,\n          72,\n          -91,\n          -104,\n          66,\n          49,\n          -65,\n          49,\n          66,\n          87,\n          -127,\n          -112,\n          66,\n          -110,\n          -101,\n          0,\n          66,\n          -117,\n          -29,\n          -39,\n          66,\n          71,\n          57,\n          33,\n          66,\n          113,\n          -19,\n          -50,\n          66,\n          90,\n          -90,\n          -44,\n          66,\n          -103,\n          86,\n          63,\n          66,\n          -116,\n          123,\n          30,\n          66,\n          -72,\n          116,\n          -1,\n          66,\n          -107,\n          -77,\n          108,\n          66,\n          -104,\n          54,\n          -4,\n          66,\n          -128,\n          27,\n          -101,\n          66,\n          75,\n          -43,\n          61,\n          66,\n          -110,\n          -79,\n          -93,\n          66,\n          106,\n          -66,\n          114,\n          66,\n          88,\n          -25,\n          -58,\n          66,\n          70,\n          -89,\n          24,\n          66,\n          -120,\n          -124,\n          -97,\n          66,\n          -110,\n          98,\n          -1,\n          66,\n          -106,\n          -16,\n          9,\n          66,\n          -112,\n          -44,\n          10,\n          66,\n          114,\n          38,\n          -21,\n          66,\n          -96,\n          125,\n          67,\n          66,\n          107,\n          71,\n          92,\n          66,\n          -81,\n          -26,\n          55,\n          66,\n          -60,\n          20,\n          9,\n          66,\n          -60,\n          49,\n          -105,\n          66,\n          97,\n          39,\n          16,\n          66,\n          -114,\n          50,\n          105,\n          66,\n          68,\n          -36,\n          -20,\n          66,\n          -124,\n          -56,\n          39,\n          66,\n          -69,\n          -59,\n          -14,\n          66,\n          115,\n          92,\n          -17,\n          66,\n          -66,\n          -104,\n          118,\n          66,\n          69,\n          -63,\n          26,\n          66,\n          -93,\n          85,\n          -52,\n          66,\n          -105,\n          94,\n          8,\n          66,\n          -67,\n          -89,\n          5,\n          66,\n          101,\n          -109,\n          55,\n          66,\n          108,\n          -81,\n          18,\n          66,\n          -59,\n          72,\n          -86,\n          66,\n          -92,\n          -66,\n          102,\n          66,\n          90,\n          43,\n          -55,\n          66,\n          -102,\n          -121,\n          -50,\n          66,\n          -61,\n          115,\n          -36,\n          66,\n          -62,\n          34,\n          -112,\n          66,\n          -71,\n          -32,\n          -61,\n          66,\n          -65,\n          -20,\n          24,\n          66,\n          -104,\n          -102,\n          -123,\n          66,\n          -99,\n          -90,\n          -43,\n          66,\n          -94,\n          34,\n          56,\n          66,\n          105,\n          -38,\n          -79,\n          66,\n          -101,\n          21,\n          -96,\n          66,\n          -113,\n          63,\n          121,\n          66,\n          78,\n          115,\n          36,\n          66,\n          -68,\n          -119,\n          -77,\n          66,\n          81,\n          -69,\n          -40,\n          66,\n          -119,\n          42,\n          127,\n          66,\n          -123,\n          -60,\n          123,\n          66,\n          -76,\n          -35,\n          -55,\n          66,\n          104,\n          16,\n          1,\n          66,\n          -77,\n          9,\n          -31,\n          66,\n          -68,\n          -51,\n          122,\n          66,\n          -96,\n          43,\n          89,\n          66,\n          -120,\n          73,\n          -2,\n          66,\n          -117,\n          28,\n          81,\n          66,\n          -78,\n          35,\n          108,\n          66,\n          -63,\n          100,\n          56,\n          66,\n          71,\n          24,\n          112,\n          66,\n          -107,\n          -35,\n          10,\n          66,\n          -67,\n          61,\n          77,\n          65,\n          -21,\n          -95,\n          -6,\n          66,\n          -109,\n          -66,\n          55,\n          66,\n          -71,\n          36,\n          -32,\n          66,\n          -63,\n          60,\n          50,\n          66,\n          -70,\n          102,\n          17,\n          66,\n          -104,\n          104,\n          57,\n          66,\n          -73,\n          28,\n          62,\n          66,\n          47,\n          -8,\n          -81,\n          66,\n          -100,\n          -95,\n          98,\n          66,\n          -95,\n          81,\n          93,\n          66,\n          92,\n          13,\n          71,\n          66,\n          -66,\n          -54,\n          122,\n          66,\n          -63,\n          77,\n          50,\n          66,\n          -99,\n          -82,\n          -97,\n          66,\n          92,\n          74,\n          -122,\n          66,\n          -109,\n          -118,\n          -27,\n          66,\n          76,\n          115,\n          67,\n          66,\n          -71,\n          -36,\n          21,\n          66,\n          -111,\n          71,\n          -8,\n          66,\n          -112,\n          28,\n          -97,\n          66,\n          -63,\n          -60,\n          41,\n          66,\n          -119,\n          -55,\n          -26,\n          66,\n          89,\n          77,\n          77,\n          66,\n          -109,\n          -16,\n          8,\n          66,\n          77,\n          13,\n          41,\n          66,\n          100,\n          -82,\n          -88,\n          66,\n          -62,\n          -94,\n          -42,\n          66,\n          -111,\n          -120,\n          108,\n          66,\n          -116,\n          -87,\n          -74,\n          66,\n          -85,\n          -71,\n          114,\n          66,\n          -65,\n          -107,\n          5,\n          66,\n          -65,\n          55,\n          -72,\n          66,\n          93,\n          46,\n          -12,\n          66,\n          82,\n          82,\n          55,\n          66,\n          81,\n          -112,\n          8,\n          66,\n          -90,\n          97,\n          -110,\n          66,\n          -106,\n          124,\n          -113,\n          66,\n          80,\n          99,\n          -46,\n          66,\n          -106,\n          71,\n          -109,\n          66,\n          99,\n          -91,\n          -57,\n          66,\n          -98,\n          -77,\n          -87,\n          66,\n          -61,\n          -33,\n          -29,\n          66,\n          84,\n          -59,\n          -104,\n          66,\n          91,\n          -116,\n          25,\n          66,\n          -111,\n          -48,\n          -125,\n          66,\n          86,\n          -124,\n          -119,\n          65,\n          88,\n          -3,\n          18,\n          66,\n          126,\n          -53,\n          -55,\n          66,\n          -122,\n          76,\n          40,\n          66,\n          -108,\n          -4,\n          -37,\n          66,\n          -91,\n          -95,\n          -35,\n          66,\n          115,\n          -121,\n          49,\n          66,\n          -92,\n          -55,\n          102,\n          66,\n          96,\n          69,\n          82,\n          66,\n          96,\n          -1,\n          98,\n          66,\n          -88,\n          -72,\n          76,\n          66,\n          -67,\n          30,\n          -29,\n          66,\n          -64,\n          -70,\n          80,\n          66,\n          -118,\n          -105,\n          7,\n          66,\n          80,\n          -97,\n          111,\n          66,\n          -100,\n          -56,\n          -21,\n          66,\n          -71,\n          41,\n          -91,\n          66,\n          -114,\n          44,\n          -88,\n          66,\n          103,\n          -116,\n          31,\n          66,\n          69,\n          62,\n          -4,\n          66,\n          86,\n          -94,\n          -120,\n          66,\n          -66,\n          124,\n          -51,\n          66,\n          -66,\n          -93,\n          -86,\n          66,\n          -116,\n          -101,\n          -78,\n          66,\n          -103,\n          33,\n          102,\n          66,\n          -79,\n          -114,\n          -114,\n          66,\n          -74,\n          -52,\n          -69,\n          66,\n          85,\n          16,\n          -30,\n          66,\n          -89,\n          22,\n          36,\n          66,\n          -68,\n          -27,\n          19,\n          66,\n          -111,\n          -31,\n          -102,\n          66,\n          86,\n          -99,\n          -21,\n          66,\n          72,\n          123,\n          -8,\n          66,\n          -100,\n          77,\n          -26,\n          66,\n          99,\n          97,\n          -105,\n          66,\n          -113,\n          -38,\n          -9,\n          66,\n          -64,\n          -46,\n          -26,\n          66,\n          70,\n          7,\n          83,\n          66,\n          -82,\n          -56,\n          -22,\n          66,\n          -117,\n          53,\n          89,\n          66,\n          -117,\n          -17,\n          80,\n          66,\n          -79,\n          61,\n          77,\n          66,\n          -84,\n          -79,\n          98,\n          66,\n          -95,\n          -50,\n          63,\n          66,\n          -120,\n          62,\n          72,\n          66,\n          76,\n          98,\n          43,\n          66,\n          -80,\n          43,\n          43,\n          66,\n          -108,\n          -42,\n          47,\n          66,\n          -62,\n          69,\n          -86,\n          66,\n          85,\n          103,\n          74,\n          66,\n          90,\n          -95,\n          88,\n          66,\n          102,\n          57,\n          45,\n          66,\n          -62,\n          124,\n          -80,\n          66,\n          -66,\n          -57,\n          -54,\n          66,\n          89,\n          -1,\n          22,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 226,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1119035087,\n          586452526,\n          731768003,\n          1112101646,\n          983611634,\n          730190915,\n          712121219,\n          583259686,\n          755627417,\n          712477904,\n          1102533682,\n          65339941,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162084315,\n          774289529,\n          1117434518,\n          1155167963,\n          581684128,\n          729935063,\n          602647289,\n          624965108,\n          983615629,\n          586682179,\n          581307889,\n          64592683,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 28,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 28,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6333024226119915415,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          725305810,\n          707364282,\n          574150990,\n          392820601,\n          524205869,\n          100595622,\n          601299947,\n          363825915,\n          521203237,\n          716237767,\n          503184678,\n          116342333,\n          975918205,\n          443637058,\n          780005205,\n          529975137,\n          124696053,\n          327093930,\n          460105249,\n          325913157,\n          591047777,\n          473073847,\n          213079766,\n          1033567925,\n          66516678,\n          259783970,\n          401528123,\n          737008446,\n          439068133,\n          514641389,\n          757262194,\n          857007535,\n          513349057,\n          1016106223,\n          371975618,\n          765893559,\n          799241414,\n          989324881,\n          1050789593,\n          23,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          66,\n          -69,\n          50,\n          -36,\n          68,\n          -15,\n          -80,\n          -76,\n          68,\n          69,\n          -46,\n          2,\n          68,\n          -115,\n          115,\n          3,\n          66,\n          88,\n          47,\n          -2,\n          66,\n          -107,\n          -60,\n          59,\n          66,\n          -67,\n          123,\n          105,\n          66,\n          -104,\n          -96,\n          -74,\n          66,\n          -107,\n          -88,\n          10,\n          66,\n          108,\n          -38,\n          41,\n          66,\n          -67,\n          102,\n          -49,\n          66,\n          -116,\n          -64,\n          111,\n          66,\n          -64,\n          -72,\n          -109,\n          66,\n          -79,\n          -46,\n          72,\n          66,\n          -68,\n          -64,\n          28,\n          66,\n          -105,\n          69,\n          69,\n          66,\n          71,\n          87,\n          84,\n          66,\n          -111,\n          9,\n          36,\n          66,\n          -122,\n          126,\n          90,\n          66,\n          81,\n          -107,\n          -9,\n          66,\n          -98,\n          28,\n          51,\n          66,\n          78,\n          105,\n          -19,\n          66,\n          -60,\n          43,\n          32,\n          66,\n          119,\n          -80,\n          -59,\n          66,\n          -97,\n          -37,\n          -48,\n          66,\n          -112,\n          39,\n          78,\n          66,\n          124,\n          90,\n          -61,\n          66,\n          -93,\n          -110,\n          5,\n          66,\n          78,\n          31,\n          77,\n          66,\n          96,\n          -14,\n          -70,\n          66,\n          -81,\n          -13,\n          -74,\n          66,\n          -99,\n          -28,\n          -40,\n          66,\n          -106,\n          81,\n          -6,\n          66,\n          -95,\n          27,\n          104,\n          66,\n          89,\n          -123,\n          -2,\n          66,\n          -59,\n          -50,\n          68,\n          66,\n          95,\n          30,\n          -110,\n          66,\n          104,\n          40,\n          -12,\n          66,\n          -63,\n          62,\n          54,\n          66,\n          -83,\n          50,\n          -54,\n          66,\n          -122,\n          25,\n          60,\n          66,\n          -109,\n          -50,\n          -117,\n          66,\n          96,\n          -22,\n          -102,\n          66,\n          104,\n          -115,\n          90,\n          66,\n          -74,\n          63,\n          -60,\n          66,\n          79,\n          26,\n          39,\n          66,\n          -70,\n          -62,\n          -58,\n          66,\n          -80,\n          -33,\n          22,\n          66,\n          -109,\n          67,\n          68,\n          66,\n          -122,\n          60,\n          22,\n          66,\n          84,\n          49,\n          -17,\n          66,\n          -116,\n          -84,\n          -52,\n          66,\n          57,\n          -22,\n          119,\n          66,\n          46,\n          11,\n          18,\n          66,\n          68,\n          26,\n          -122,\n          66,\n          -70,\n          -16,\n          -100,\n          66,\n          -62,\n          -117,\n          66,\n          66,\n          -114,\n          -111,\n          -128,\n          66,\n          77,\n          -18,\n          11,\n          66,\n          -125,\n          78,\n          101,\n          66,\n          -64,\n          68,\n          100,\n          66,\n          -118,\n          57,\n          -17,\n          66,\n          84,\n          29,\n          -32,\n          66,\n          101,\n          -90,\n          2,\n          65,\n          43,\n          10,\n          -122,\n          66,\n          -79,\n          63,\n          -112,\n          66,\n          -92,\n          91,\n          84,\n          66,\n          -121,\n          -114,\n          -24,\n          66,\n          89,\n          55,\n          -15,\n          66,\n          -76,\n          110,\n          -54,\n          66,\n          -63,\n          32,\n          -43,\n          66,\n          76,\n          38,\n          -97,\n          66,\n          -97,\n          119,\n          86,\n          66,\n          83,\n          -121,\n          50,\n          66,\n          -97,\n          -13,\n          -126,\n          66,\n          -68,\n          126,\n          -78,\n          66,\n          -66,\n          101,\n          -86,\n          66,\n          -100,\n          45,\n          83,\n          66,\n          -84,\n          -57,\n          95,\n          66,\n          -74,\n          100,\n          -97,\n          66,\n          89,\n          -109,\n          77,\n          66,\n          -77,\n          -85,\n          -125,\n          66,\n          -112,\n          121,\n          -42,\n          66,\n          -115,\n          54,\n          111,\n          66,\n          -124,\n          95,\n          -96,\n          66,\n          -61,\n          -106,\n          -75,\n          66,\n          110,\n          -5,\n          119,\n          66,\n          -123,\n          -9,\n          -118,\n          66,\n          118,\n          78,\n          73,\n          66,\n          70,\n          51,\n          101,\n          66,\n          -102,\n          -35,\n          -93,\n          66,\n          91,\n          72,\n          -91,\n          66,\n          -98,\n          -60,\n          -107,\n          66,\n          -114,\n          114,\n          -28,\n          66,\n          -105,\n          -13,\n          26,\n          66,\n          83,\n          -62,\n          -75,\n          66,\n          -122,\n          -91,\n          54,\n          66,\n          72,\n          -60,\n          -75,\n          66,\n          -105,\n          -91,\n          -116,\n          66,\n          -120,\n          97,\n          2,\n          66,\n          -78,\n          -27,\n          -65,\n          66,\n          83,\n          -8,\n          66,\n          66,\n          -70,\n          101,\n          19,\n          66,\n          -109,\n          66,\n          -113,\n          66,\n          74,\n          -24,\n          -60,\n          66,\n          -65,\n          20,\n          -109,\n          66,\n          108,\n          12,\n          -38,\n          66,\n          -110,\n          -39,\n          49,\n          66,\n          -111,\n          118,\n          -25,\n          66,\n          -99,\n          -96,\n          -79,\n          66,\n          -120,\n          -105,\n          62,\n          66,\n          -107,\n          33,\n          -35,\n          66,\n          -61,\n          -76,\n          29,\n          66,\n          -113,\n          -2,\n          17,\n          66,\n          -115,\n          -122,\n          -75,\n          66,\n          -74,\n          122,\n          18,\n          66,\n          -67,\n          -113,\n          -102,\n          66,\n          -59,\n          -92,\n          15,\n          66,\n          -63,\n          -28,\n          31,\n          66,\n          -101,\n          107,\n          12,\n          66,\n          -97,\n          -84,\n          -77,\n          66,\n          109,\n          46,\n          -4,\n          66,\n          87,\n          -119,\n          -33,\n          66,\n          -114,\n          -96,\n          91,\n          66,\n          71,\n          -73,\n          -79,\n          66,\n          -115,\n          110,\n          36,\n          66,\n          88,\n          33,\n          118,\n          66,\n          -103,\n          35,\n          -28,\n          66,\n          -62,\n          57,\n          64,\n          66,\n          -111,\n          -72,\n          7,\n          66,\n          91,\n          -62,\n          -95,\n          66,\n          -70,\n          -70,\n          -60,\n          66,\n          -62,\n          -25,\n          -127,\n          66,\n          -57,\n          -7,\n          74,\n          66,\n          -112,\n          26,\n          -80,\n          66,\n          -82,\n          -77,\n          76,\n          66,\n          127,\n          -56,\n          -31,\n          66,\n          -59,\n          9,\n          -13,\n          66,\n          -114,\n          -18,\n          47,\n          66,\n          -128,\n          -27,\n          -53,\n          66,\n          97,\n          67,\n          112,\n          66,\n          -103,\n          -43,\n          60,\n          66,\n          -108,\n          16,\n          48,\n          66,\n          -72,\n          -44,\n          96,\n          66,\n          -76,\n          -9,\n          -9,\n          66,\n          -63,\n          -1,\n          -28,\n          66,\n          -117,\n          45,\n          103,\n          66,\n          -91,\n          37,\n          54,\n          66,\n          77,\n          102,\n          -80,\n          66,\n          -105,\n          91,\n          25,\n          66,\n          -80,\n          -73,\n          -104,\n          66,\n          -106,\n          53,\n          42,\n          66,\n          85,\n          -116,\n          -103,\n          66,\n          77,\n          125,\n          -116,\n          66,\n          78,\n          -122,\n          -86,\n          66,\n          97,\n          106,\n          58,\n          66,\n          105,\n          -87,\n          10,\n          66,\n          -113,\n          67,\n          -41,\n          66,\n          -128,\n          99,\n          46,\n          66,\n          -111,\n          -1,\n          62,\n          66,\n          -86,\n          5,\n          25,\n          66,\n          83,\n          -79,\n          24,\n          66,\n          -65,\n          -11,\n          -106,\n          66,\n          -111,\n          -95,\n          51,\n          66,\n          -65,\n          -62,\n          -89,\n          66,\n          90,\n          -49,\n          119,\n          66,\n          -75,\n          90,\n          20,\n          66,\n          -113,\n          67,\n          73,\n          66,\n          -107,\n          -113,\n          31,\n          66,\n          85,\n          60,\n          -127,\n          66,\n          -114,\n          -90,\n          -111,\n          66,\n          97,\n          -9,\n          0,\n          66,\n          -63,\n          12,\n          120,\n          66,\n          -118,\n          114,\n          -27,\n          66,\n          -111,\n          101,\n          12,\n          66,\n          74,\n          -10,\n          62,\n          66,\n          82,\n          -28,\n          109,\n          66,\n          -84,\n          -115,\n          -127,\n          66,\n          -61,\n          -115,\n          65,\n          66,\n          87,\n          34,\n          -103,\n          66,\n          -92,\n          39,\n          -59,\n          66,\n          83,\n          -115,\n          -99,\n          66,\n          -126,\n          -111,\n          88,\n          66,\n          -112,\n          -75,\n          -30,\n          66,\n          -64,\n          100,\n          -57,\n          66,\n          -73,\n          18,\n          15,\n          66,\n          -127,\n          -72,\n          -101,\n          66,\n          -115,\n          124,\n          63,\n          66,\n          -102,\n          -121,\n          40,\n          66,\n          -101,\n          92,\n          -82,\n          66,\n          -101,\n          -90,\n          -14,\n          66,\n          -124,\n          4,\n          17,\n          66,\n          -112,\n          -43,\n          114,\n          66,\n          -68,\n          -34,\n          24,\n          66,\n          -100,\n          -44,\n          -106,\n          66,\n          -62,\n          -66,\n          -104,\n          66,\n          -105,\n          -66,\n          106,\n          66,\n          84,\n          113,\n          -79,\n          66,\n          69,\n          17,\n          66,\n          66,\n          69,\n          -125,\n          37,\n          66,\n          86,\n          -91,\n          40,\n          66,\n          -105,\n          -40,\n          75,\n          66,\n          -98,\n          -126,\n          113,\n          66,\n          -67,\n          119,\n          -123,\n          66,\n          -60,\n          -2,\n          123,\n          66,\n          -70,\n          33,\n          -23,\n          66,\n          -114,\n          89,\n          121,\n          66,\n          -121,\n          -124,\n          -106,\n          66,\n          -70,\n          -26,\n          -121,\n          66,\n          107,\n          105,\n          -51,\n          66,\n          92,\n          -114,\n          33,\n          66,\n          -107,\n          -32,\n          24,\n          66,\n          -63,\n          -107,\n          -37,\n          66,\n          -116,\n          -80,\n          95,\n          66,\n          -79,\n          -90,\n          -58,\n          66,\n          -70,\n          106,\n          -32,\n          66,\n          -63,\n          -102,\n          70,\n          66,\n          -66,\n          56,\n          -36,\n          66,\n          -101,\n          -9,\n          -97,\n          66,\n          -79,\n          37,\n          76,\n          66,\n          -92,\n          91,\n          21,\n          66,\n          88,\n          7,\n          107,\n          66,\n          -120,\n          -43,\n          -74,\n          66,\n          -62,\n          -118,\n          -39,\n          66,\n          -128,\n          -13,\n          80,\n          66,\n          83,\n          120,\n          -24,\n          66,\n          99,\n          -90,\n          101,\n          66,\n          -123,\n          27,\n          114,\n          66,\n          -117,\n          -70,\n          126,\n          66,\n          -96,\n          -69,\n          -9,\n          66,\n          -95,\n          48,\n          105,\n          66,\n          93,\n          120,\n          -57,\n          66,\n          -93,\n          104,\n          44,\n          66,\n          73,\n          15,\n          53,\n          66,\n          81,\n          -113,\n          -13,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 235,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1031526980,\n          1160647460,\n          624953951,\n          1147293544,\n          985265089,\n          629206132,\n          717259129,\n          631289606,\n          597605485,\n          1155152623,\n          586091902,\n          586526062,\n          1093,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1162261427,\n          1157469749,\n          629166770,\n          1104313957,\n          601859371,\n          1140806993,\n          729488185,\n          712397668,\n          640915378,\n          1017098248,\n          586149910,\n          602647168,\n          1174,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 19,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 19,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 7119250912786373213,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          371250761,\n          54467019,\n          526247358,\n          733308751,\n          274331871,\n          123272041,\n          90887897,\n          799383353,\n          212970423,\n          765642462,\n          607554857,\n          599727953,\n          1042733287,\n          900966834,\n          1067826743,\n          47425093,\n          366059497,\n          212403539,\n          865144499,\n          475104463,\n          317101918,\n          571837785,\n          477616993,\n          762113089,\n          880712445,\n          588639557,\n          249354317,\n          37284826,\n          873768693,\n          764752943,\n          662042155,\n          492263526,\n          45852887,\n          595420995,\n          934720617,\n          121212257,\n          778082150,\n          33389169,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          67,\n          -6,\n          -78,\n          -67,\n          69,\n          34,\n          72,\n          124,\n          66,\n          -127,\n          86,\n          -4,\n          66,\n          -110,\n          98,\n          63,\n          68,\n          -17,\n          28,\n          -118,\n          66,\n          74,\n          -114,\n          11,\n          66,\n          111,\n          93,\n          -34,\n          69,\n          124,\n          120,\n          100,\n          66,\n          -74,\n          -124,\n          -41,\n          66,\n          -67,\n          -41,\n          56,\n          66,\n          107,\n          98,\n          33,\n          66,\n          107,\n          -11,\n          121,\n          66,\n          -69,\n          -1,\n          -72,\n          66,\n          -115,\n          110,\n          -108,\n          66,\n          45,\n          -80,\n          34,\n          66,\n          77,\n          -105,\n          97,\n          66,\n          -122,\n          28,\n          -3,\n          66,\n          84,\n          75,\n          104,\n          66,\n          91,\n          126,\n          -22,\n          66,\n          -61,\n          -112,\n          -26,\n          66,\n          -109,\n          -39,\n          76,\n          66,\n          -70,\n          -11,\n          82,\n          66,\n          72,\n          -14,\n          8,\n          66,\n          -89,\n          45,\n          -106,\n          66,\n          72,\n          3,\n          -108,\n          66,\n          -73,\n          -109,\n          -85,\n          66,\n          -66,\n          125,\n          -111,\n          66,\n          124,\n          99,\n          57,\n          66,\n          6,\n          -126,\n          114,\n          65,\n          22,\n          20,\n          -49,\n          66,\n          -97,\n          -20,\n          -91,\n          66,\n          113,\n          -83,\n          15,\n          66,\n          -96,\n          -76,\n          -70,\n          66,\n          -90,\n          42,\n          54,\n          66,\n          -117,\n          -94,\n          -65,\n          66,\n          108,\n          69,\n          9,\n          66,\n          -127,\n          114,\n          111,\n          66,\n          -63,\n          -35,\n          -21,\n          66,\n          -121,\n          22,\n          101,\n          66,\n          -104,\n          94,\n          88,\n          66,\n          -86,\n          126,\n          90,\n          66,\n          -73,\n          20,\n          -21,\n          66,\n          -104,\n          -29,\n          -42,\n          66,\n          -110,\n          -18,\n          -128,\n          66,\n          70,\n          45,\n          -78,\n          66,\n          126,\n          -16,\n          -29,\n          66,\n          -60,\n          -109,\n          52,\n          66,\n          70,\n          114,\n          19,\n          66,\n          127,\n          64,\n          -27,\n          66,\n          -108,\n          -115,\n          -29,\n          66,\n          -83,\n          61,\n          114,\n          66,\n          4,\n          -28,\n          -1,\n          66,\n          70,\n          -114,\n          -16,\n          66,\n          -68,\n          -101,\n          116,\n          66,\n          -70,\n          31,\n          -122,\n          66,\n          -65,\n          31,\n          15,\n          66,\n          -107,\n          -124,\n          -40,\n          66,\n          -106,\n          -49,\n          -122,\n          66,\n          -76,\n          16,\n          -90,\n          66,\n          -60,\n          99,\n          -92,\n          66,\n          -118,\n          -58,\n          -112,\n          66,\n          -118,\n          -118,\n          -46,\n          66,\n          84,\n          119,\n          111,\n          66,\n          -98,\n          19,\n          -84,\n          66,\n          -126,\n          106,\n          75,\n          66,\n          -70,\n          -59,\n          31,\n          66,\n          -123,\n          -77,\n          101,\n          66,\n          -62,\n          62,\n          -128,\n          66,\n          72,\n          66,\n          109,\n          66,\n          -64,\n          82,\n          43,\n          66,\n          75,\n          18,\n          21,\n          66,\n          -108,\n          42,\n          -46,\n          66,\n          99,\n          -57,\n          99,\n          66,\n          127,\n          -64,\n          60,\n          66,\n          -67,\n          109,\n          81,\n          66,\n          -126,\n          -114,\n          123,\n          66,\n          -67,\n          -33,\n          8,\n          66,\n          121,\n          45,\n          -54,\n          66,\n          -78,\n          51,\n          5,\n          66,\n          -102,\n          5,\n          -85,\n          66,\n          -65,\n          -34,\n          -89,\n          66,\n          75,\n          100,\n          -17,\n          66,\n          68,\n          51,\n          -59,\n          66,\n          -66,\n          117,\n          98,\n          66,\n          93,\n          -43,\n          -50,\n          66,\n          -107,\n          -127,\n          -54,\n          66,\n          111,\n          -56,\n          -9,\n          66,\n          121,\n          -11,\n          42,\n          66,\n          -69,\n          -11,\n          104,\n          66,\n          68,\n          58,\n          -4,\n          66,\n          -108,\n          28,\n          56,\n          66,\n          -60,\n          -39,\n          -21,\n          66,\n          -115,\n          -123,\n          63,\n          66,\n          -122,\n          -73,\n          -91,\n          66,\n          -113,\n          -4,\n          -92,\n          66,\n          -104,\n          -44,\n          -82,\n          66,\n          -106,\n          -14,\n          3,\n          66,\n          103,\n          2,\n          -62,\n          66,\n          99,\n          -33,\n          -120,\n          66,\n          113,\n          22,\n          -92,\n          66,\n          -112,\n          3,\n          39,\n          66,\n          -65,\n          -67,\n          -21,\n          66,\n          86,\n          -25,\n          42,\n          66,\n          -66,\n          19,\n          6,\n          66,\n          -116,\n          27,\n          -92,\n          66,\n          -87,\n          -118,\n          -82,\n          66,\n          -60,\n          35,\n          -15,\n          66,\n          -90,\n          -62,\n          -108,\n          66,\n          98,\n          98,\n          47,\n          66,\n          -111,\n          0,\n          76,\n          66,\n          82,\n          86,\n          21,\n          66,\n          -85,\n          80,\n          -63,\n          66,\n          -114,\n          -35,\n          -124,\n          66,\n          -110,\n          49,\n          -84,\n          66,\n          102,\n          -52,\n          -121,\n          66,\n          -65,\n          -1,\n          121,\n          66,\n          -120,\n          87,\n          81,\n          66,\n          80,\n          -36,\n          -51,\n          66,\n          -93,\n          -114,\n          -59,\n          66,\n          -65,\n          126,\n          -33,\n          66,\n          -73,\n          -49,\n          -20,\n          66,\n          -84,\n          -97,\n          -52,\n          66,\n          -115,\n          -67,\n          -18,\n          66,\n          121,\n          83,\n          -74,\n          66,\n          -70,\n          -43,\n          13,\n          66,\n          -93,\n          -45,\n          -72,\n          66,\n          -101,\n          98,\n          -98,\n          66,\n          -73,\n          -24,\n          -42,\n          66,\n          115,\n          77,\n          -94,\n          66,\n          75,\n          -127,\n          12,\n          66,\n          -104,\n          117,\n          -77,\n          66,\n          -106,\n          -24,\n          112,\n          66,\n          -118,\n          13,\n          126,\n          66,\n          83,\n          80,\n          115,\n          66,\n          -68,\n          90,\n          -69,\n          65,\n          -43,\n          -122,\n          94,\n          66,\n          110,\n          96,\n          23,\n          66,\n          -63,\n          47,\n          -64,\n          66,\n          -121,\n          -3,\n          34,\n          66,\n          -81,\n          44,\n          100,\n          66,\n          72,\n          46,\n          -73,\n          66,\n          -93,\n          -53,\n          55,\n          66,\n          -79,\n          -100,\n          -48,\n          66,\n          -76,\n          86,\n          105,\n          66,\n          122,\n          -84,\n          -49,\n          66,\n          119,\n          -119,\n          122,\n          66,\n          -83,\n          -83,\n          -10,\n          66,\n          -120,\n          15,\n          -42,\n          66,\n          121,\n          56,\n          -59,\n          66,\n          -61,\n          65,\n          13,\n          66,\n          -99,\n          -65,\n          64,\n          66,\n          -82,\n          -38,\n          60,\n          66,\n          125,\n          -43,\n          -105,\n          66,\n          92,\n          6,\n          -93,\n          66,\n          -125,\n          88,\n          5,\n          66,\n          -118,\n          -119,\n          6,\n          66,\n          -103,\n          -109,\n          32,\n          66,\n          -66,\n          -111,\n          -62,\n          66,\n          -83,\n          120,\n          13,\n          66,\n          -103,\n          -60,\n          -63,\n          66,\n          -109,\n          100,\n          72,\n          66,\n          86,\n          74,\n          81,\n          66,\n          -87,\n          -104,\n          -89,\n          66,\n          -84,\n          -61,\n          -41,\n          66,\n          -67,\n          -70,\n          -111,\n          66,\n          -121,\n          -106,\n          -98,\n          66,\n          100,\n          -73,\n          107,\n          66,\n          -121,\n          5,\n          66,\n          66,\n          -103,\n          40,\n          -42,\n          66,\n          71,\n          -98,\n          43,\n          66,\n          -98,\n          122,\n          82,\n          66,\n          -103,\n          -96,\n          112,\n          66,\n          -120,\n          119,\n          -51,\n          66,\n          -61,\n          -94,\n          -31,\n          66,\n          69,\n          61,\n          84,\n          66,\n          -99,\n          90,\n          -2,\n          66,\n          -108,\n          90,\n          -85,\n          66,\n          -87,\n          -52,\n          -56,\n          66,\n          -119,\n          -40,\n          -127,\n          66,\n          -65,\n          11,\n          -108,\n          66,\n          71,\n          70,\n          -106,\n          66,\n          -117,\n          -110,\n          17,\n          66,\n          -105,\n          3,\n          117,\n          66,\n          84,\n          -83,\n          -128,\n          66,\n          72,\n          -52,\n          -81,\n          66,\n          93,\n          -68,\n          -109,\n          66,\n          -66,\n          66,\n          75,\n          66,\n          85,\n          -105,\n          37,\n          66,\n          -80,\n          73,\n          -8,\n          66,\n          -71,\n          -117,\n          61,\n          66,\n          -107,\n          -115,\n          -103,\n          66,\n          -77,\n          -92,\n          23,\n          66,\n          87,\n          -90,\n          61,\n          66,\n          -79,\n          -68,\n          -4,\n          66,\n          -71,\n          -58,\n          -122,\n          66,\n          -127,\n          38,\n          6,\n          66,\n          110,\n          -118,\n          -128,\n          66,\n          -121,\n          -118,\n          35,\n          66,\n          101,\n          12,\n          -94,\n          66,\n          -74,\n          102,\n          -68,\n          66,\n          -107,\n          85,\n          -70,\n          66,\n          -63,\n          -64,\n          102,\n          66,\n          70,\n          91,\n          -42,\n          66,\n          -116,\n          -50,\n          74,\n          66,\n          -103,\n          39,\n          -30,\n          66,\n          87,\n          105,\n          -23,\n          66,\n          -115,\n          -100,\n          51,\n          66,\n          -116,\n          -55,\n          -45,\n          66,\n          89,\n          100,\n          61,\n          66,\n          76,\n          -58,\n          25,\n          66,\n          -109,\n          80,\n          100,\n          66,\n          76,\n          -73,\n          8,\n          66,\n          111,\n          -52,\n          -41,\n          66,\n          83,\n          0,\n          -41,\n          66,\n          82,\n          52,\n          32,\n          66,\n          106,\n          24,\n          17,\n          66,\n          -67,\n          -88,\n          -96,\n          66,\n          75,\n          65,\n          -53,\n          66,\n          -116,\n          -16,\n          50,\n          66,\n          -109,\n          -63,\n          1,\n          66,\n          -63,\n          -14,\n          124,\n          66,\n          80,\n          -113,\n          1,\n          66,\n          -113,\n          2,\n          99,\n          66,\n          76,\n          18,\n          84,\n          66,\n          -66,\n          -46,\n          -40,\n          66,\n          -63,\n          70,\n          -4,\n          66,\n          100,\n          50,\n          -44,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 227,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1031500007,\n          758875145,\n          1155325478,\n          715844138,\n          1112243794,\n          596273638,\n          1142353088,\n          729940802,\n          1011620069,\n          1016981216,\n          1097935118,\n          212845067,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1031497735,\n          1142890465,\n          1147827020,\n          1104681797,\n          983510630,\n          586707592,\n          1112651468,\n          987879955,\n          640359319,\n          1012138873,\n          753580870,\n          208304176,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 27,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 27,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5390658293349841947,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          790717654,\n          588904867,\n          474147898,\n          228253029,\n          120528719,\n          338146909,\n          531166966,\n          60143701,\n          882735958,\n          787914407,\n          1012267954,\n          525558851,\n          367957971,\n          622143046,\n          761484757,\n          904996546,\n          371112575,\n          128625089,\n          571927037,\n          77573037,\n          1054648271,\n          796185802,\n          333035051,\n          102548827,\n          733134645,\n          599582421,\n          460564471,\n          534890677,\n          530100033,\n          535661734,\n          198900787,\n          619286362,\n          213686742,\n          584439022,\n          534562089,\n          708441634,\n          311475017,\n          99,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          67,\n          -93,\n          -27,\n          69,\n          47,\n          80,\n          -19,\n          66,\n          -86,\n          71,\n          56,\n          66,\n          -92,\n          98,\n          -46,\n          66,\n          -72,\n          5,\n          40,\n          66,\n          85,\n          121,\n          30,\n          66,\n          55,\n          116,\n          -7,\n          66,\n          -98,\n          -102,\n          -121,\n          66,\n          -73,\n          -67,\n          -52,\n          66,\n          -114,\n          48,\n          -72,\n          66,\n          -120,\n          -113,\n          -62,\n          66,\n          28,\n          -59,\n          -58,\n          66,\n          -64,\n          -121,\n          -55,\n          66,\n          -120,\n          -28,\n          72,\n          66,\n          45,\n          113,\n          49,\n          66,\n          69,\n          -76,\n          59,\n          65,\n          94,\n          -96,\n          -120,\n          66,\n          -62,\n          85,\n          100,\n          66,\n          -110,\n          76,\n          0,\n          66,\n          81,\n          3,\n          20,\n          66,\n          122,\n          -99,\n          -100,\n          66,\n          69,\n          104,\n          74,\n          66,\n          -102,\n          16,\n          30,\n          66,\n          -84,\n          120,\n          -87,\n          66,\n          56,\n          -26,\n          88,\n          66,\n          -77,\n          68,\n          54,\n          66,\n          78,\n          -28,\n          33,\n          66,\n          -72,\n          20,\n          -59,\n          66,\n          -70,\n          96,\n          121,\n          66,\n          71,\n          -72,\n          -94,\n          66,\n          -111,\n          46,\n          37,\n          66,\n          -88,\n          34,\n          -63,\n          66,\n          -98,\n          125,\n          126,\n          66,\n          85,\n          56,\n          37,\n          66,\n          -69,\n          45,\n          -44,\n          66,\n          -64,\n          -40,\n          63,\n          66,\n          -68,\n          4,\n          51,\n          66,\n          111,\n          -71,\n          62,\n          66,\n          -108,\n          -8,\n          20,\n          66,\n          -120,\n          55,\n          126,\n          66,\n          -80,\n          116,\n          39,\n          66,\n          79,\n          -58,\n          84,\n          66,\n          -110,\n          -120,\n          -14,\n          66,\n          -69,\n          54,\n          -119,\n          66,\n          -67,\n          9,\n          -123,\n          66,\n          89,\n          -48,\n          65,\n          66,\n          -123,\n          -102,\n          -39,\n          66,\n          -93,\n          105,\n          -89,\n          66,\n          -87,\n          118,\n          47,\n          66,\n          -80,\n          -91,\n          -99,\n          66,\n          -71,\n          -105,\n          -92,\n          66,\n          -84,\n          113,\n          15,\n          66,\n          -97,\n          -12,\n          104,\n          66,\n          -63,\n          -51,\n          123,\n          66,\n          89,\n          -121,\n          52,\n          66,\n          -117,\n          23,\n          -43,\n          66,\n          117,\n          -9,\n          -120,\n          66,\n          -114,\n          -120,\n          96,\n          66,\n          -126,\n          113,\n          -40,\n          66,\n          100,\n          -76,\n          32,\n          66,\n          -74,\n          100,\n          8,\n          66,\n          -104,\n          26,\n          120,\n          66,\n          -81,\n          18,\n          36,\n          66,\n          101,\n          -122,\n          87,\n          66,\n          -99,\n          -66,\n          -83,\n          66,\n          -73,\n          58,\n          -56,\n          66,\n          79,\n          -12,\n          -5,\n          66,\n          -68,\n          18,\n          117,\n          66,\n          -118,\n          -5,\n          -72,\n          66,\n          -64,\n          38,\n          120,\n          66,\n          -107,\n          51,\n          57,\n          66,\n          75,\n          92,\n          -118,\n          66,\n          74,\n          27,\n          -62,\n          66,\n          -74,\n          -71,\n          25,\n          66,\n          -97,\n          111,\n          -18,\n          66,\n          -116,\n          -119,\n          -84,\n          66,\n          -62,\n          -58,\n          31,\n          66,\n          -79,\n          6,\n          96,\n          66,\n          -77,\n          -112,\n          37,\n          66,\n          -79,\n          18,\n          -74,\n          66,\n          -107,\n          8,\n          54,\n          66,\n          -69,\n          -64,\n          106,\n          66,\n          -100,\n          31,\n          45,\n          66,\n          -62,\n          -43,\n          -100,\n          66,\n          -116,\n          -53,\n          0,\n          66,\n          -64,\n          52,\n          75,\n          66,\n          -113,\n          -81,\n          63,\n          66,\n          -70,\n          -85,\n          -71,\n          66,\n          -59,\n          -62,\n          -11,\n          66,\n          -59,\n          -120,\n          43,\n          66,\n          -64,\n          2,\n          -55,\n          66,\n          -77,\n          76,\n          76,\n          66,\n          -112,\n          -27,\n          56,\n          66,\n          -67,\n          -49,\n          108,\n          66,\n          77,\n          -32,\n          75,\n          66,\n          -64,\n          61,\n          112,\n          66,\n          84,\n          -109,\n          -96,\n          66,\n          98,\n          1,\n          -32,\n          66,\n          -87,\n          -25,\n          50,\n          66,\n          -121,\n          -80,\n          -102,\n          66,\n          -105,\n          -43,\n          104,\n          66,\n          94,\n          -70,\n          113,\n          66,\n          -122,\n          92,\n          -117,\n          66,\n          -75,\n          -96,\n          109,\n          66,\n          -72,\n          104,\n          -93,\n          66,\n          -114,\n          -60,\n          102,\n          66,\n          -60,\n          -53,\n          -107,\n          66,\n          105,\n          125,\n          97,\n          66,\n          -89,\n          115,\n          -65,\n          66,\n          91,\n          -58,\n          96,\n          66,\n          -60,\n          92,\n          107,\n          66,\n          -115,\n          29,\n          -64,\n          66,\n          -108,\n          8,\n          59,\n          66,\n          -112,\n          -108,\n          -31,\n          66,\n          -102,\n          42,\n          -38,\n          66,\n          -102,\n          126,\n          -9,\n          66,\n          -70,\n          -25,\n          65,\n          66,\n          86,\n          -86,\n          42,\n          66,\n          -108,\n          116,\n          38,\n          66,\n          -76,\n          -18,\n          -26,\n          66,\n          99,\n          30,\n          -68,\n          66,\n          -78,\n          -115,\n          69,\n          66,\n          -109,\n          -68,\n          -93,\n          66,\n          -118,\n          -120,\n          -87,\n          66,\n          -127,\n          -68,\n          55,\n          66,\n          74,\n          71,\n          108,\n          66,\n          -93,\n          -91,\n          -42,\n          66,\n          -76,\n          -3,\n          -70,\n          66,\n          -110,\n          1,\n          1,\n          66,\n          -114,\n          -34,\n          52,\n          66,\n          85,\n          -85,\n          -101,\n          66,\n          79,\n          65,\n          -100,\n          66,\n          94,\n          -37,\n          88,\n          66,\n          -99,\n          92,\n          -69,\n          66,\n          -94,\n          83,\n          67,\n          66,\n          99,\n          -113,\n          -27,\n          66,\n          -105,\n          -113,\n          46,\n          66,\n          -108,\n          120,\n          -100,\n          66,\n          82,\n          -29,\n          -63,\n          66,\n          -68,\n          64,\n          40,\n          66,\n          -97,\n          -52,\n          -122,\n          66,\n          -113,\n          23,\n          -110,\n          66,\n          -116,\n          -100,\n          22,\n          66,\n          76,\n          93,\n          23,\n          66,\n          -116,\n          13,\n          6,\n          66,\n          124,\n          80,\n          124,\n          66,\n          99,\n          -81,\n          78,\n          66,\n          -118,\n          -127,\n          -114,\n          66,\n          116,\n          -94,\n          74,\n          66,\n          -124,\n          95,\n          -56,\n          66,\n          -120,\n          46,\n          10,\n          66,\n          -68,\n          95,\n          -113,\n          66,\n          -101,\n          6,\n          -14,\n          66,\n          -116,\n          -48,\n          27,\n          66,\n          93,\n          115,\n          124,\n          66,\n          -113,\n          112,\n          -44,\n          66,\n          110,\n          37,\n          61,\n          66,\n          71,\n          67,\n          -46,\n          66,\n          -121,\n          79,\n          -53,\n          66,\n          114,\n          66,\n          -83,\n          66,\n          71,\n          41,\n          52,\n          66,\n          -115,\n          -28,\n          72,\n          66,\n          -98,\n          121,\n          -36,\n          66,\n          -102,\n          -105,\n          -30,\n          66,\n          -65,\n          77,\n          -113,\n          66,\n          72,\n          105,\n          -100,\n          66,\n          -68,\n          -11,\n          56,\n          66,\n          88,\n          55,\n          123,\n          66,\n          -124,\n          -98,\n          112,\n          66,\n          -69,\n          -5,\n          21,\n          66,\n          79,\n          -84,\n          -43,\n          66,\n          -106,\n          74,\n          -97,\n          66,\n          -102,\n          -5,\n          -1,\n          66,\n          93,\n          107,\n          88,\n          66,\n          -65,\n          78,\n          25,\n          66,\n          -119,\n          -121,\n          112,\n          66,\n          96,\n          94,\n          105,\n          66,\n          90,\n          -13,\n          -119,\n          66,\n          -62,\n          86,\n          -8,\n          66,\n          94,\n          85,\n          -100,\n          66,\n          80,\n          41,\n          -75,\n          66,\n          -114,\n          90,\n          -23,\n          66,\n          90,\n          27,\n          -41,\n          66,\n          -97,\n          -48,\n          -42,\n          66,\n          -115,\n          49,\n          -77,\n          66,\n          -94,\n          49,\n          -43,\n          66,\n          -67,\n          -17,\n          -30,\n          66,\n          -75,\n          -101,\n          -111,\n          66,\n          89,\n          91,\n          -40,\n          66,\n          70,\n          -49,\n          -65,\n          66,\n          -76,\n          23,\n          -115,\n          66,\n          -65,\n          -118,\n          -90,\n          66,\n          -75,\n          16,\n          68,\n          66,\n          -78,\n          -81,\n          -119,\n          66,\n          -68,\n          70,\n          25,\n          66,\n          -100,\n          -88,\n          63,\n          66,\n          73,\n          124,\n          -73,\n          66,\n          -72,\n          48,\n          -99,\n          66,\n          -71,\n          68,\n          -27,\n          66,\n          74,\n          88,\n          -80,\n          66,\n          -108,\n          -17,\n          86,\n          66,\n          114,\n          90,\n          8,\n          66,\n          -97,\n          -40,\n          87,\n          66,\n          -116,\n          88,\n          -81,\n          66,\n          -110,\n          120,\n          34,\n          66,\n          -116,\n          14,\n          11,\n          66,\n          -96,\n          18,\n          -10,\n          66,\n          -100,\n          -68,\n          116,\n          66,\n          -122,\n          -128,\n          110,\n          66,\n          78,\n          -56,\n          -13,\n          66,\n          -78,\n          74,\n          56,\n          66,\n          -101,\n          -59,\n          -46,\n          66,\n          -115,\n          120,\n          84,\n          66,\n          76,\n          84,\n          -19,\n          66,\n          77,\n          -110,\n          -49,\n          66,\n          -102,\n          55,\n          89,\n          66,\n          -111,\n          -86,\n          10,\n          66,\n          -72,\n          19,\n          -25,\n          66,\n          -76,\n          55,\n          51,\n          66,\n          -108,\n          -22,\n          115,\n          66,\n          -101,\n          -50,\n          75,\n          66,\n          -105,\n          -57,\n          70,\n          66,\n          81,\n          70,\n          -53,\n          66,\n          75,\n          42,\n          100,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 224,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1112237405,\n          1157475338,\n          970168472,\n          730019626,\n          1028095307,\n          716905132,\n          1118616631,\n          597253418,\n          1116904262,\n          716670170,\n          624177455,\n          7174807,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1117066864,\n          1157299163,\n          583500320,\n          1018060645,\n          769821569,\n          758697997,\n          602646439,\n          772646333,\n          1098289409,\n          1104127810,\n          582728054,\n          7371553,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 30,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 30,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -721376606247191852,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          993471586,\n          768789051,\n          802720882,\n          240230183,\n          724497905,\n          876430671,\n          1034626670,\n          130002661,\n          791844159,\n          380193277,\n          468146233,\n          706381885,\n          58403877,\n          396199281,\n          446029754,\n          531492421,\n          195618473,\n          757852245,\n          229068193,\n          920984815,\n          708438567,\n          506430822,\n          258858533,\n          895073403,\n          53792599,\n          804555757,\n          465537135,\n          582404898,\n          707302726,\n          515423065,\n          375700174,\n          880518827,\n          1059512290,\n          875783977,\n          1055211223,\n          313476475,\n          868188026,\n          1288133,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          67,\n          -63,\n          5,\n          -26,\n          65,\n          35,\n          -92,\n          -54,\n          65,\n          -101,\n          -102,\n          -126,\n          66,\n          -100,\n          110,\n          38,\n          65,\n          -37,\n          42,\n          -108,\n          66,\n          -118,\n          83,\n          97,\n          65,\n          44,\n          -85,\n          -22,\n          66,\n          -122,\n          123,\n          -28,\n          66,\n          -91,\n          -111,\n          0,\n          66,\n          -112,\n          -44,\n          -23,\n          66,\n          37,\n          -20,\n          -119,\n          66,\n          -66,\n          -54,\n          -75,\n          66,\n          -102,\n          86,\n          -14,\n          66,\n          71,\n          -79,\n          -58,\n          66,\n          79,\n          -107,\n          117,\n          66,\n          117,\n          -81,\n          54,\n          66,\n          -109,\n          -70,\n          4,\n          66,\n          100,\n          -8,\n          -98,\n          66,\n          11,\n          -7,\n          -122,\n          66,\n          -90,\n          61,\n          -51,\n          66,\n          100,\n          39,\n          16,\n          66,\n          76,\n          -29,\n          -76,\n          66,\n          -105,\n          57,\n          80,\n          66,\n          124,\n          41,\n          43,\n          66,\n          -110,\n          50,\n          96,\n          66,\n          16,\n          -30,\n          117,\n          66,\n          -106,\n          32,\n          -98,\n          66,\n          -117,\n          -60,\n          11,\n          66,\n          -63,\n          7,\n          23,\n          66,\n          62,\n          -20,\n          87,\n          66,\n          113,\n          -112,\n          85,\n          66,\n          -72,\n          -39,\n          118,\n          66,\n          -107,\n          -95,\n          47,\n          66,\n          -67,\n          91,\n          117,\n          66,\n          93,\n          66,\n          17,\n          66,\n          -79,\n          3,\n          100,\n          66,\n          -74,\n          54,\n          -63,\n          66,\n          124,\n          -7,\n          -87,\n          66,\n          -124,\n          -73,\n          -68,\n          66,\n          -59,\n          -108,\n          -73,\n          66,\n          -64,\n          39,\n          3,\n          66,\n          -93,\n          -122,\n          -86,\n          66,\n          -99,\n          -63,\n          -58,\n          66,\n          93,\n          -36,\n          102,\n          66,\n          104,\n          16,\n          -56,\n          66,\n          114,\n          -32,\n          16,\n          66,\n          109,\n          24,\n          -27,\n          66,\n          -125,\n          125,\n          -104,\n          66,\n          93,\n          -120,\n          -13,\n          66,\n          -113,\n          -21,\n          -57,\n          66,\n          -124,\n          -21,\n          -107,\n          66,\n          -122,\n          20,\n          111,\n          66,\n          -119,\n          10,\n          10,\n          66,\n          72,\n          90,\n          -7,\n          66,\n          -119,\n          -88,\n          36,\n          66,\n          68,\n          115,\n          -8,\n          66,\n          -70,\n          -68,\n          -81,\n          66,\n          -70,\n          -127,\n          78,\n          66,\n          -61,\n          -101,\n          -79,\n          66,\n          76,\n          127,\n          -78,\n          66,\n          -108,\n          104,\n          -40,\n          66,\n          -106,\n          13,\n          107,\n          66,\n          -67,\n          -63,\n          115,\n          66,\n          -77,\n          -78,\n          -122,\n          66,\n          -88,\n          45,\n          -71,\n          66,\n          -99,\n          -32,\n          -126,\n          66,\n          -111,\n          -67,\n          -37,\n          66,\n          -109,\n          17,\n          -46,\n          66,\n          -79,\n          109,\n          103,\n          66,\n          -95,\n          -13,\n          6,\n          66,\n          -103,\n          -106,\n          8,\n          66,\n          -89,\n          -55,\n          71,\n          66,\n          -126,\n          -94,\n          -56,\n          66,\n          -110,\n          20,\n          124,\n          66,\n          75,\n          -29,\n          -92,\n          66,\n          -73,\n          80,\n          69,\n          66,\n          84,\n          -108,\n          -125,\n          66,\n          -115,\n          116,\n          -56,\n          66,\n          -98,\n          -84,\n          -17,\n          66,\n          111,\n          -15,\n          120,\n          66,\n          -115,\n          119,\n          -20,\n          66,\n          108,\n          119,\n          -90,\n          66,\n          -111,\n          -4,\n          1,\n          66,\n          73,\n          -104,\n          -33,\n          66,\n          -80,\n          -85,\n          -115,\n          66,\n          -103,\n          -121,\n          88,\n          66,\n          108,\n          -46,\n          53,\n          66,\n          118,\n          55,\n          -70,\n          66,\n          -125,\n          123,\n          32,\n          66,\n          -81,\n          106,\n          82,\n          66,\n          -101,\n          55,\n          -120,\n          66,\n          -79,\n          107,\n          78,\n          66,\n          74,\n          78,\n          -89,\n          66,\n          71,\n          -12,\n          -53,\n          66,\n          -73,\n          21,\n          -24,\n          66,\n          74,\n          42,\n          -106,\n          66,\n          -118,\n          -40,\n          110,\n          66,\n          -107,\n          2,\n          -105,\n          66,\n          -114,\n          -118,\n          -9,\n          66,\n          -106,\n          13,\n          52,\n          66,\n          -65,\n          116,\n          -69,\n          66,\n          -106,\n          26,\n          23,\n          66,\n          -102,\n          -112,\n          -2,\n          66,\n          -62,\n          106,\n          104,\n          66,\n          -87,\n          -30,\n          -3,\n          66,\n          70,\n          32,\n          5,\n          66,\n          -62,\n          -33,\n          -118,\n          66,\n          -70,\n          -70,\n          50,\n          66,\n          -106,\n          67,\n          -24,\n          66,\n          -110,\n          85,\n          -73,\n          66,\n          95,\n          -95,\n          48,\n          66,\n          -77,\n          96,\n          35,\n          66,\n          -70,\n          86,\n          53,\n          66,\n          -59,\n          -121,\n          -104,\n          66,\n          80,\n          -91,\n          5,\n          66,\n          77,\n          -125,\n          -127,\n          66,\n          101,\n          -121,\n          55,\n          66,\n          -81,\n          102,\n          -91,\n          66,\n          -66,\n          -6,\n          -81,\n          66,\n          109,\n          72,\n          -45,\n          66,\n          78,\n          71,\n          58,\n          66,\n          -115,\n          68,\n          -4,\n          66,\n          -67,\n          77,\n          54,\n          66,\n          118,\n          77,\n          -49,\n          66,\n          126,\n          -52,\n          -43,\n          66,\n          -93,\n          -47,\n          -110,\n          66,\n          -62,\n          -6,\n          -102,\n          66,\n          75,\n          -91,\n          -92,\n          66,\n          -99,\n          68,\n          -126,\n          66,\n          115,\n          51,\n          -23,\n          66,\n          -66,\n          99,\n          0,\n          66,\n          96,\n          -118,\n          117,\n          66,\n          -115,\n          -15,\n          12,\n          66,\n          -115,\n          -18,\n          -64,\n          66,\n          88,\n          -89,\n          -71,\n          66,\n          81,\n          -50,\n          4,\n          66,\n          -65,\n          111,\n          -67,\n          66,\n          100,\n          15,\n          -22,\n          66,\n          98,\n          124,\n          92,\n          66,\n          110,\n          -115,\n          53,\n          66,\n          105,\n          11,\n          61,\n          66,\n          96,\n          -115,\n          29,\n          66,\n          -107,\n          38,\n          45,\n          66,\n          -88,\n          17,\n          -126,\n          66,\n          -119,\n          69,\n          -66,\n          66,\n          -64,\n          6,\n          60,\n          66,\n          101,\n          119,\n          -101,\n          66,\n          -106,\n          25,\n          -65,\n          66,\n          80,\n          -127,\n          -40,\n          66,\n          -109,\n          87,\n          44,\n          66,\n          -110,\n          84,\n          -43,\n          66,\n          104,\n          -33,\n          122,\n          66,\n          -63,\n          -59,\n          71,\n          66,\n          -110,\n          -5,\n          39,\n          66,\n          89,\n          127,\n          -80,\n          66,\n          89,\n          -91,\n          -96,\n          66,\n          83,\n          105,\n          28,\n          66,\n          120,\n          -38,\n          50,\n          66,\n          -81,\n          -39,\n          -21,\n          66,\n          -123,\n          73,\n          -109,\n          66,\n          105,\n          -69,\n          75,\n          66,\n          -105,\n          49,\n          -107,\n          66,\n          -72,\n          -108,\n          -104,\n          66,\n          -105,\n          -6,\n          50,\n          66,\n          -67,\n          -114,\n          -106,\n          66,\n          -109,\n          -46,\n          -1,\n          66,\n          93,\n          80,\n          -14,\n          66,\n          -125,\n          -103,\n          6,\n          66,\n          -66,\n          -84,\n          -24,\n          66,\n          -73,\n          -73,\n          -94,\n          66,\n          -114,\n          32,\n          -98,\n          66,\n          -120,\n          123,\n          72,\n          66,\n          -63,\n          -10,\n          63,\n          66,\n          -97,\n          -95,\n          90,\n          66,\n          -112,\n          -99,\n          -49,\n          66,\n          -61,\n          117,\n          0,\n          66,\n          -64,\n          -94,\n          27,\n          66,\n          -112,\n          46,\n          -97,\n          66,\n          86,\n          19,\n          -23,\n          66,\n          93,\n          110,\n          -79,\n          66,\n          -62,\n          26,\n          -127,\n          66,\n          -68,\n          -107,\n          -30,\n          66,\n          -78,\n          65,\n          -30,\n          66,\n          -104,\n          -107,\n          87,\n          66,\n          -79,\n          77,\n          34,\n          66,\n          84,\n          102,\n          58,\n          66,\n          119,\n          -69,\n          84,\n          66,\n          -111,\n          -88,\n          0,\n          66,\n          -116,\n          -115,\n          48,\n          66,\n          77,\n          -110,\n          2,\n          66,\n          89,\n          109,\n          -105,\n          66,\n          -71,\n          110,\n          109,\n          66,\n          -81,\n          60,\n          -76,\n          66,\n          73,\n          -85,\n          -32,\n          66,\n          111,\n          71,\n          -108,\n          66,\n          -79,\n          -82,\n          -83,\n          66,\n          -59,\n          -45,\n          -72,\n          66,\n          73,\n          -117,\n          25,\n          66,\n          -102,\n          10,\n          -7,\n          66,\n          -109,\n          30,\n          -103,\n          66,\n          -105,\n          96,\n          -110,\n          66,\n          -72,\n          -48,\n          53,\n          66,\n          86,\n          -76,\n          74,\n          66,\n          -73,\n          31,\n          5,\n          66,\n          87,\n          -68,\n          83,\n          66,\n          -65,\n          -13,\n          98,\n          66,\n          120,\n          -74,\n          6,\n          66,\n          -70,\n          117,\n          45,\n          66,\n          -63,\n          -56,\n          -125,\n          66,\n          119,\n          28,\n          92,\n          66,\n          88,\n          67,\n          98,\n          66,\n          90,\n          -59,\n          -77,\n          66,\n          -107,\n          -106,\n          77,\n          66,\n          -102,\n          -64,\n          -45,\n          66,\n          -62,\n          -108,\n          79,\n          66,\n          -101,\n          13,\n          116,\n          66,\n          -62,\n          19,\n          -89,\n          66,\n          97,\n          -112,\n          27,\n          66,\n          82,\n          -54,\n          86,\n          66,\n          -75,\n          -17,\n          -29,\n          66,\n          115,\n          117,\n          -95,\n          66,\n          -104,\n          109,\n          -93,\n          66,\n          -110,\n          -25,\n          -14,\n          66,\n          -64,\n          33,\n          -4,\n          66,\n          -102,\n          -9,\n          48,\n          66,\n          -128,\n          -10,\n          28,\n          66,\n          -109,\n          -23,\n          93,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 227,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          758363846,\n          1033114000,\n          602653985,\n          1147673933,\n          975107957,\n          624945118,\n          1030986049,\n          1032580345,\n          1018529530,\n          1030985788,\n          1027542172,\n          238358821,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1160487538,\n          1033121209,\n          1114411118,\n          1147831316,\n          1026543992,\n          629748499,\n          970154401,\n          595538690,\n          1142595601,\n          1017155110,\n          726748519,\n          195313423,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 27,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 27,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3064446777312786653,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          525536174,\n          127760097,\n          320911086,\n          171375735,\n          917425314,\n          226089954,\n          637343330,\n          1016292691,\n          1017764653,\n          627886389,\n          334409029,\n          1071285746,\n          465505389,\n          236438983,\n          1038801102,\n          35575509,\n          64149302,\n          396273830,\n          264093143,\n          324760629,\n          572319814,\n          118798133,\n          265658314,\n          34915875,\n          381288249,\n          207036323,\n          43732163,\n          96458681,\n          321687411,\n          481089206,\n          196128550,\n          459990567,\n          1038163145,\n          215293857,\n          996015725,\n          866553421,\n          872072570,\n          509917042,\n          10,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -66,\n          32,\n          -18,\n          67,\n          121,\n          65,\n          77,\n          68,\n          1,\n          39,\n          -22,\n          69,\n          117,\n          -56,\n          -3,\n          66,\n          -82,\n          115,\n          20,\n          66,\n          86,\n          57,\n          -3,\n          66,\n          -105,\n          119,\n          -57,\n          65,\n          -75,\n          123,\n          49,\n          66,\n          -88,\n          11,\n          -54,\n          66,\n          -92,\n          -123,\n          -72,\n          66,\n          -113,\n          32,\n          20,\n          66,\n          51,\n          -88,\n          3,\n          66,\n          -89,\n          77,\n          32,\n          66,\n          -128,\n          -88,\n          -74,\n          66,\n          124,\n          124,\n          125,\n          65,\n          -51,\n          -13,\n          69,\n          66,\n          -84,\n          -78,\n          60,\n          66,\n          -88,\n          42,\n          104,\n          66,\n          -118,\n          51,\n          105,\n          66,\n          76,\n          -6,\n          16,\n          66,\n          89,\n          33,\n          65,\n          66,\n          -110,\n          -91,\n          -56,\n          66,\n          90,\n          -26,\n          -123,\n          66,\n          -101,\n          -55,\n          -16,\n          66,\n          -65,\n          51,\n          -58,\n          66,\n          -88,\n          108,\n          -2,\n          66,\n          110,\n          26,\n          85,\n          66,\n          -111,\n          -25,\n          -96,\n          66,\n          -96,\n          -6,\n          33,\n          66,\n          -119,\n          111,\n          126,\n          66,\n          -84,\n          -47,\n          4,\n          66,\n          94,\n          99,\n          80,\n          66,\n          -82,\n          35,\n          -84,\n          65,\n          -128,\n          -5,\n          122,\n          66,\n          84,\n          -2,\n          -84,\n          66,\n          -94,\n          33,\n          20,\n          66,\n          -79,\n          -85,\n          35,\n          65,\n          109,\n          105,\n          -27,\n          66,\n          -116,\n          -43,\n          17,\n          66,\n          -71,\n          -9,\n          46,\n          66,\n          81,\n          -116,\n          21,\n          66,\n          -90,\n          -93,\n          124,\n          66,\n          85,\n          -84,\n          34,\n          66,\n          -74,\n          53,\n          0,\n          66,\n          -102,\n          66,\n          78,\n          66,\n          -97,\n          99,\n          3,\n          66,\n          -121,\n          123,\n          97,\n          66,\n          -71,\n          18,\n          -25,\n          66,\n          -115,\n          -71,\n          -55,\n          66,\n          -113,\n          124,\n          26,\n          66,\n          -64,\n          -41,\n          21,\n          66,\n          79,\n          86,\n          46,\n          66,\n          -91,\n          99,\n          74,\n          66,\n          -82,\n          101,\n          -27,\n          66,\n          -126,\n          -50,\n          3,\n          66,\n          -117,\n          -52,\n          -111,\n          66,\n          -64,\n          -19,\n          100,\n          66,\n          -108,\n          74,\n          -14,\n          66,\n          -71,\n          7,\n          -62,\n          66,\n          -74,\n          75,\n          -66,\n          66,\n          -94,\n          5,\n          93,\n          66,\n          -90,\n          -119,\n          37,\n          66,\n          -128,\n          -28,\n          3,\n          66,\n          -101,\n          -124,\n          7,\n          66,\n          -73,\n          101,\n          -6,\n          66,\n          -90,\n          105,\n          22,\n          66,\n          -69,\n          100,\n          86,\n          66,\n          93,\n          30,\n          -97,\n          66,\n          -85,\n          101,\n          -44,\n          66,\n          -117,\n          -83,\n          -115,\n          66,\n          -116,\n          -98,\n          -63,\n          66,\n          112,\n          49,\n          15,\n          66,\n          -102,\n          -98,\n          -117,\n          66,\n          95,\n          -107,\n          60,\n          66,\n          95,\n          -53,\n          -79,\n          66,\n          -67,\n          69,\n          47,\n          66,\n          113,\n          103,\n          -111,\n          66,\n          -103,\n          42,\n          96,\n          66,\n          115,\n          48,\n          63,\n          66,\n          -64,\n          -109,\n          -58,\n          66,\n          -117,\n          -36,\n          -121,\n          66,\n          101,\n          -120,\n          63,\n          66,\n          -100,\n          89,\n          -24,\n          66,\n          97,\n          -37,\n          -62,\n          66,\n          -87,\n          -31,\n          -126,\n          66,\n          -81,\n          32,\n          -110,\n          66,\n          -61,\n          82,\n          61,\n          66,\n          -101,\n          -108,\n          -114,\n          66,\n          -60,\n          110,\n          -93,\n          66,\n          -84,\n          -69,\n          94,\n          66,\n          -94,\n          -38,\n          -95,\n          66,\n          -60,\n          -61,\n          123,\n          66,\n          -106,\n          120,\n          -128,\n          66,\n          -116,\n          110,\n          27,\n          66,\n          -99,\n          113,\n          65,\n          66,\n          -91,\n          106,\n          71,\n          66,\n          -72,\n          -38,\n          -19,\n          66,\n          -96,\n          -89,\n          52,\n          66,\n          -122,\n          116,\n          -2,\n          66,\n          -100,\n          94,\n          44,\n          66,\n          -108,\n          60,\n          -50,\n          66,\n          -97,\n          9,\n          -128,\n          66,\n          -72,\n          40,\n          26,\n          66,\n          -107,\n          -8,\n          -19,\n          66,\n          -78,\n          2,\n          -71,\n          66,\n          -124,\n          95,\n          1,\n          66,\n          -126,\n          79,\n          -16,\n          66,\n          94,\n          -105,\n          40,\n          66,\n          72,\n          -114,\n          -106,\n          66,\n          -69,\n          17,\n          127,\n          66,\n          81,\n          56,\n          -77,\n          66,\n          80,\n          119,\n          -48,\n          66,\n          -127,\n          -86,\n          -89,\n          66,\n          89,\n          60,\n          -27,\n          66,\n          -104,\n          106,\n          65,\n          66,\n          -106,\n          21,\n          -115,\n          66,\n          -97,\n          -25,\n          -105,\n          66,\n          -84,\n          -5,\n          -108,\n          66,\n          -94,\n          44,\n          -91,\n          66,\n          -113,\n          70,\n          106,\n          66,\n          -83,\n          -119,\n          39,\n          66,\n          -74,\n          54,\n          42,\n          66,\n          -78,\n          -10,\n          127,\n          66,\n          -107,\n          -81,\n          110,\n          66,\n          -127,\n          113,\n          -95,\n          66,\n          -116,\n          3,\n          -47,\n          66,\n          -113,\n          17,\n          58,\n          66,\n          -123,\n          75,\n          13,\n          66,\n          -123,\n          61,\n          126,\n          66,\n          -117,\n          -46,\n          69,\n          66,\n          -105,\n          -51,\n          42,\n          66,\n          89,\n          3,\n          68,\n          66,\n          -75,\n          84,\n          -47,\n          66,\n          -64,\n          72,\n          51,\n          66,\n          83,\n          -91,\n          73,\n          66,\n          87,\n          99,\n          -47,\n          66,\n          -109,\n          122,\n          33,\n          66,\n          99,\n          -103,\n          -56,\n          66,\n          72,\n          124,\n          57,\n          66,\n          -106,\n          58,\n          -29,\n          66,\n          -125,\n          -57,\n          75,\n          66,\n          -98,\n          -67,\n          117,\n          66,\n          -107,\n          -127,\n          5,\n          66,\n          -111,\n          -17,\n          86,\n          66,\n          -111,\n          18,\n          104,\n          66,\n          -102,\n          -63,\n          -123,\n          66,\n          111,\n          -5,\n          -33,\n          66,\n          69,\n          90,\n          37,\n          66,\n          75,\n          -34,\n          41,\n          66,\n          73,\n          -14,\n          -69,\n          66,\n          79,\n          50,\n          -11,\n          66,\n          -122,\n          -38,\n          9,\n          66,\n          93,\n          -124,\n          -57,\n          66,\n          -61,\n          116,\n          -110,\n          66,\n          -117,\n          -70,\n          -79,\n          66,\n          -59,\n          10,\n          36,\n          66,\n          68,\n          -74,\n          38,\n          66,\n          -69,\n          46,\n          -54,\n          66,\n          107,\n          36,\n          78,\n          66,\n          -74,\n          13,\n          -14,\n          66,\n          -101,\n          56,\n          5,\n          66,\n          -110,\n          34,\n          -41,\n          66,\n          -107,\n          -41,\n          -128,\n          66,\n          -102,\n          -77,\n          -105,\n          66,\n          -119,\n          76,\n          125,\n          66,\n          122,\n          49,\n          94,\n          66,\n          82,\n          92,\n          94,\n          66,\n          -64,\n          82,\n          -9,\n          66,\n          79,\n          98,\n          89,\n          66,\n          91,\n          13,\n          -114,\n          66,\n          82,\n          -113,\n          -7,\n          66,\n          -89,\n          55,\n          -97,\n          66,\n          -76,\n          11,\n          27,\n          66,\n          -118,\n          -2,\n          -4,\n          66,\n          -84,\n          53,\n          -89,\n          66,\n          -110,\n          -74,\n          -127,\n          66,\n          -108,\n          -38,\n          -14,\n          66,\n          -118,\n          88,\n          -99,\n          66,\n          -80,\n          -1,\n          -106,\n          66,\n          -76,\n          -99,\n          29,\n          66,\n          -74,\n          -79,\n          65,\n          66,\n          -95,\n          105,\n          -83,\n          66,\n          68,\n          -88,\n          -98,\n          66,\n          -122,\n          25,\n          17,\n          66,\n          84,\n          -50,\n          63,\n          66,\n          -109,\n          51,\n          -8,\n          66,\n          78,\n          84,\n          9,\n          66,\n          -103,\n          127,\n          114,\n          66,\n          -112,\n          75,\n          -94,\n          66,\n          -109,\n          -9,\n          22,\n          66,\n          -62,\n          25,\n          65,\n          66,\n          -104,\n          -8,\n          -54,\n          66,\n          -101,\n          -126,\n          7,\n          66,\n          -92,\n          8,\n          -79,\n          66,\n          87,\n          9,\n          96,\n          66,\n          -65,\n          -14,\n          -40,\n          66,\n          -75,\n          74,\n          -5,\n          66,\n          -79,\n          -121,\n          117,\n          66,\n          -121,\n          115,\n          -43,\n          66,\n          -110,\n          -122,\n          -27,\n          66,\n          77,\n          -59,\n          -87,\n          66,\n          -63,\n          -19,\n          102,\n          66,\n          -121,\n          -67,\n          3,\n          66,\n          -64,\n          59,\n          -91,\n          66,\n          -119,\n          -6,\n          -65,\n          66,\n          87,\n          -45,\n          -115,\n          66,\n          102,\n          -23,\n          60,\n          66,\n          81,\n          -81,\n          -5,\n          66,\n          -118,\n          -114,\n          -26,\n          66,\n          -95,\n          38,\n          -22,\n          66,\n          -111,\n          -49,\n          -90,\n          66,\n          -65,\n          -45,\n          100,\n          66,\n          94,\n          -52,\n          -115,\n          66,\n          -106,\n          38,\n          -127,\n          66,\n          -65,\n          -89,\n          26,\n          66,\n          -109,\n          -8,\n          122,\n          66,\n          -59,\n          107,\n          -43,\n          66,\n          93,\n          17,\n          50,\n          66,\n          -105,\n          77,\n          43,\n          66,\n          -114,\n          -119,\n          -61,\n          66,\n          101,\n          -35,\n          -74,\n          66,\n          -116,\n          -71,\n          -42,\n          66,\n          -60,\n          -20,\n          -53,\n          66,\n          91,\n          49,\n          81,\n          66,\n          -99,\n          45,\n          -100,\n          66,\n          -112,\n          -21,\n          83,\n          66,\n          -59,\n          -111,\n          -36,\n          66,\n          113,\n          73,\n          -21,\n          66,\n          -69,\n          97,\n          -9,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 229,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          759940217,\n          597851188,\n          710862452,\n          1147299142,\n          1018214702,\n          1141329659,\n          638607614,\n          626310868,\n          724876739,\n          629196647,\n          753337382,\n          754052459,\n          1,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          759953299,\n          1118623282,\n          770038325,\n          1013253622,\n          1142418806,\n          1013979832,\n          645521126,\n          1032858751,\n          600324118,\n          725328427,\n          1097770954,\n          581668981,\n          1,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 25,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 25,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3783375162460323258,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          242072258,\n          191800929,\n          203791159,\n          454404915,\n          505204563,\n          656883261,\n          519542867,\n          195778297,\n          775481585,\n          735769639,\n          476420433,\n          761706846,\n          922314183,\n          630279338,\n          737627169,\n          446614269,\n          603625393,\n          664086629,\n          1008561203,\n          184010425,\n          857847133,\n          357600482,\n          400676778,\n          55664323,\n          1034286397,\n          237479118,\n          215256139,\n          311630131,\n          1045764134,\n          259042911,\n          68938075,\n          170233683,\n          1065034913,\n          124249926,\n          578258254,\n          1031211085,\n          267216079,\n          2239,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          24,\n          -67,\n          33,\n          69,\n          83,\n          124,\n          99,\n          69,\n          119,\n          -25,\n          -72,\n          66,\n          78,\n          49,\n          -116,\n          69,\n          -124,\n          22,\n          -128,\n          66,\n          27,\n          123,\n          82,\n          66,\n          28,\n          93,\n          -95,\n          66,\n          23,\n          59,\n          -80,\n          66,\n          57,\n          65,\n          9,\n          66,\n          -126,\n          -52,\n          43,\n          66,\n          -99,\n          44,\n          95,\n          66,\n          -105,\n          49,\n          -75,\n          66,\n          125,\n          -88,\n          -53,\n          66,\n          -95,\n          82,\n          36,\n          66,\n          -88,\n          -11,\n          90,\n          66,\n          113,\n          -60,\n          -93,\n          66,\n          -94,\n          -57,\n          123,\n          66,\n          -76,\n          -65,\n          103,\n          66,\n          -128,\n          -45,\n          -101,\n          66,\n          89,\n          -56,\n          19,\n          66,\n          -65,\n          -103,\n          -28,\n          66,\n          125,\n          83,\n          -20,\n          66,\n          -107,\n          23,\n          22,\n          66,\n          122,\n          40,\n          38,\n          66,\n          83,\n          -67,\n          -43,\n          66,\n          -90,\n          -113,\n          13,\n          66,\n          95,\n          -68,\n          36,\n          66,\n          -85,\n          55,\n          94,\n          66,\n          -109,\n          8,\n          -121,\n          66,\n          80,\n          -109,\n          13,\n          66,\n          -104,\n          -21,\n          -120,\n          66,\n          -114,\n          -90,\n          -98,\n          66,\n          109,\n          -8,\n          -62,\n          66,\n          -67,\n          -114,\n          -60,\n          66,\n          -77,\n          14,\n          -53,\n          66,\n          91,\n          95,\n          -18,\n          66,\n          113,\n          29,\n          -117,\n          66,\n          -63,\n          -77,\n          62,\n          66,\n          -68,\n          98,\n          -82,\n          66,\n          82,\n          -6,\n          48,\n          66,\n          -114,\n          26,\n          126,\n          66,\n          121,\n          46,\n          49,\n          66,\n          -99,\n          103,\n          46,\n          66,\n          119,\n          -34,\n          33,\n          66,\n          -107,\n          -59,\n          -123,\n          66,\n          -63,\n          58,\n          57,\n          66,\n          -59,\n          10,\n          -118,\n          66,\n          -106,\n          -12,\n          -84,\n          66,\n          -116,\n          29,\n          -97,\n          64,\n          -19,\n          90,\n          39,\n          66,\n          -80,\n          81,\n          -23,\n          66,\n          -119,\n          16,\n          50,\n          66,\n          -118,\n          66,\n          -114,\n          66,\n          104,\n          -24,\n          107,\n          66,\n          85,\n          105,\n          -41,\n          66,\n          -125,\n          120,\n          55,\n          66,\n          -106,\n          12,\n          126,\n          66,\n          -112,\n          94,\n          -30,\n          66,\n          -115,\n          -21,\n          62,\n          66,\n          -108,\n          82,\n          -90,\n          66,\n          -128,\n          95,\n          -128,\n          66,\n          -72,\n          90,\n          -4,\n          66,\n          -74,\n          -72,\n          78,\n          66,\n          83,\n          124,\n          -105,\n          66,\n          -82,\n          -117,\n          59,\n          66,\n          -60,\n          0,\n          23,\n          66,\n          -71,\n          -117,\n          -66,\n          66,\n          -67,\n          -46,\n          -26,\n          66,\n          -72,\n          -84,\n          114,\n          66,\n          -103,\n          109,\n          103,\n          66,\n          -63,\n          -99,\n          32,\n          66,\n          -62,\n          82,\n          -49,\n          66,\n          64,\n          -14,\n          127,\n          66,\n          -70,\n          -68,\n          69,\n          66,\n          -128,\n          -54,\n          54,\n          66,\n          -78,\n          -122,\n          -14,\n          66,\n          88,\n          -36,\n          -48,\n          66,\n          -127,\n          -104,\n          -108,\n          66,\n          -81,\n          66,\n          -46,\n          66,\n          124,\n          -97,\n          116,\n          66,\n          82,\n          8,\n          -45,\n          66,\n          -66,\n          18,\n          -67,\n          66,\n          -106,\n          -68,\n          -94,\n          66,\n          -73,\n          115,\n          -22,\n          66,\n          -115,\n          -116,\n          -29,\n          66,\n          -120,\n          -44,\n          -4,\n          66,\n          -128,\n          -65,\n          -110,\n          66,\n          -79,\n          -99,\n          -36,\n          66,\n          76,\n          93,\n          -45,\n          66,\n          -121,\n          -1,\n          -20,\n          66,\n          -108,\n          85,\n          -118,\n          66,\n          117,\n          -86,\n          -49,\n          66,\n          -73,\n          60,\n          -25,\n          66,\n          -123,\n          94,\n          -123,\n          66,\n          -106,\n          115,\n          -28,\n          66,\n          -107,\n          -118,\n          -39,\n          66,\n          -108,\n          107,\n          -89,\n          66,\n          -123,\n          112,\n          119,\n          66,\n          -124,\n          -44,\n          -80,\n          66,\n          -105,\n          -105,\n          -120,\n          66,\n          126,\n          -28,\n          101,\n          66,\n          -119,\n          106,\n          126,\n          66,\n          -102,\n          59,\n          -89,\n          66,\n          102,\n          67,\n          61,\n          66,\n          -59,\n          -59,\n          -120,\n          66,\n          -70,\n          75,\n          -26,\n          66,\n          -127,\n          29,\n          -82,\n          66,\n          -126,\n          36,\n          -30,\n          66,\n          86,\n          96,\n          41,\n          66,\n          -107,\n          11,\n          18,\n          66,\n          70,\n          99,\n          92,\n          66,\n          -65,\n          8,\n          51,\n          66,\n          -109,\n          75,\n          86,\n          66,\n          -78,\n          -121,\n          103,\n          66,\n          -96,\n          108,\n          -43,\n          66,\n          -117,\n          45,\n          26,\n          66,\n          -120,\n          -126,\n          99,\n          66,\n          103,\n          -7,\n          -40,\n          66,\n          78,\n          -34,\n          -63,\n          66,\n          -122,\n          -128,\n          33,\n          66,\n          -110,\n          -120,\n          74,\n          66,\n          -65,\n          -88,\n          108,\n          66,\n          -108,\n          -125,\n          126,\n          66,\n          -128,\n          126,\n          -96,\n          66,\n          -81,\n          -101,\n          -95,\n          66,\n          -126,\n          -9,\n          -48,\n          66,\n          -65,\n          56,\n          -104,\n          66,\n          105,\n          7,\n          -55,\n          66,\n          108,\n          3,\n          -85,\n          66,\n          -107,\n          43,\n          -85,\n          66,\n          -110,\n          67,\n          35,\n          66,\n          -76,\n          126,\n          -119,\n          66,\n          -66,\n          84,\n          -115,\n          66,\n          -118,\n          111,\n          -55,\n          66,\n          36,\n          -11,\n          -11,\n          66,\n          90,\n          -16,\n          -14,\n          66,\n          -66,\n          50,\n          -39,\n          66,\n          96,\n          2,\n          42,\n          66,\n          107,\n          92,\n          -39,\n          66,\n          -66,\n          -60,\n          -19,\n          66,\n          70,\n          79,\n          16,\n          66,\n          -65,\n          -98,\n          39,\n          66,\n          -110,\n          63,\n          50,\n          66,\n          -114,\n          94,\n          -58,\n          66,\n          -101,\n          -6,\n          -45,\n          66,\n          -106,\n          -125,\n          112,\n          66,\n          -120,\n          -58,\n          45,\n          66,\n          84,\n          -101,\n          -90,\n          66,\n          -63,\n          76,\n          -48,\n          66,\n          -79,\n          -40,\n          -31,\n          66,\n          -67,\n          -116,\n          58,\n          66,\n          -67,\n          -100,\n          -93,\n          66,\n          -121,\n          -53,\n          -17,\n          66,\n          75,\n          -55,\n          -106,\n          66,\n          -61,\n          19,\n          -128,\n          66,\n          78,\n          -38,\n          -97,\n          66,\n          103,\n          -99,\n          45,\n          66,\n          -66,\n          84,\n          -26,\n          66,\n          -124,\n          36,\n          -8,\n          66,\n          -110,\n          -76,\n          35,\n          66,\n          -105,\n          113,\n          97,\n          66,\n          -61,\n          -36,\n          56,\n          66,\n          117,\n          -116,\n          121,\n          66,\n          -116,\n          -48,\n          93,\n          66,\n          -68,\n          58,\n          81,\n          66,\n          -81,\n          103,\n          -54,\n          66,\n          -112,\n          68,\n          25,\n          66,\n          -106,\n          -37,\n          107,\n          66,\n          -78,\n          -32,\n          -54,\n          66,\n          -118,\n          -58,\n          28,\n          66,\n          -66,\n          74,\n          112,\n          66,\n          -69,\n          9,\n          22,\n          66,\n          -100,\n          -43,\n          -66,\n          66,\n          106,\n          -12,\n          112,\n          66,\n          87,\n          46,\n          -25,\n          66,\n          -80,\n          -48,\n          -103,\n          66,\n          73,\n          125,\n          12,\n          66,\n          -109,\n          112,\n          77,\n          66,\n          73,\n          77,\n          -75,\n          66,\n          115,\n          99,\n          -56,\n          66,\n          110,\n          -108,\n          -106,\n          66,\n          -90,\n          101,\n          110,\n          66,\n          -68,\n          -10,\n          -70,\n          66,\n          70,\n          25,\n          -94,\n          66,\n          -115,\n          -22,\n          -3,\n          66,\n          -76,\n          -60,\n          -42,\n          66,\n          109,\n          -101,\n          -52,\n          66,\n          -101,\n          -42,\n          -117,\n          66,\n          99,\n          -47,\n          33,\n          66,\n          84,\n          -121,\n          116,\n          66,\n          -63,\n          -50,\n          -57,\n          66,\n          -112,\n          -90,\n          -11,\n          66,\n          -119,\n          23,\n          -98,\n          66,\n          -107,\n          35,\n          117,\n          66,\n          -112,\n          55,\n          -78,\n          66,\n          -70,\n          99,\n          126,\n          66,\n          108,\n          -93,\n          28,\n          66,\n          92,\n          -15,\n          -21,\n          66,\n          -64,\n          14,\n          -112,\n          66,\n          -72,\n          -35,\n          48,\n          66,\n          -112,\n          -39,\n          39,\n          66,\n          74,\n          18,\n          -45,\n          66,\n          -70,\n          -124,\n          -50,\n          66,\n          89,\n          33,\n          -10,\n          66,\n          -61,\n          -2,\n          -26,\n          66,\n          -80,\n          107,\n          6,\n          66,\n          -127,\n          73,\n          -119,\n          66,\n          112,\n          -18,\n          11,\n          66,\n          86,\n          -13,\n          80,\n          66,\n          -109,\n          -52,\n          30,\n          66,\n          -116,\n          36,\n          -82,\n          66,\n          -72,\n          -97,\n          4,\n          66,\n          -71,\n          15,\n          55,\n          66,\n          -63,\n          32,\n          -108,\n          66,\n          77,\n          -29,\n          16,\n          66,\n          -61,\n          -15,\n          -114,\n          66,\n          85,\n          58,\n          -110,\n          66,\n          -72,\n          89,\n          -96,\n          66,\n          -110,\n          -62,\n          -5,\n          66,\n          -60,\n          92,\n          -26,\n          66,\n          -62,\n          45,\n          -14,\n          66,\n          93,\n          68,\n          -20,\n          66,\n          78,\n          31,\n          88,\n          66,\n          -104,\n          33,\n          -84,\n          66,\n          -83,\n          -99,\n          116,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 225,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          602397242,\n          985291288,\n          1117381055,\n          631286267,\n          1159871039,\n          769877906,\n          710290934,\n          586643026,\n          767911738,\n          984495190,\n          970343104,\n          21583228,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          760295146,\n          983690483,\n          711067073,\n          640852154,\n          1141474031,\n          729666032,\n          1011663907,\n          716671138,\n          1143049792,\n          587764751,\n          1025973418,\n          23295112,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 29,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 29,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3471042119123364184,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          741444190,\n          938385206,\n          316895023,\n          44092536,\n          450799169,\n          377427443,\n          638928997,\n          452949597,\n          512919603,\n          337998883,\n          903566707,\n          622771047,\n          756776681,\n          731166579,\n          481097137,\n          606252761,\n          756853481,\n          368532603,\n          661968943,\n          203769137,\n          744733669,\n          382914295,\n          524069309,\n          483703645,\n          890564083,\n          69856686,\n          724561007,\n          592623267,\n          1012898549,\n          94819938,\n          113187541,\n          497646006,\n          195005658,\n          1063779809,\n          1010747097,\n          1000913278,\n          316377073,\n          1052735469,\n          26727,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          17,\n          -116,\n          45,\n          68,\n          -43,\n          -11,\n          48,\n          68,\n          -60,\n          -8,\n          49,\n          66,\n          68,\n          -98,\n          42,\n          65,\n          -6,\n          -13,\n          -69,\n          69,\n          113,\n          -21,\n          124,\n          66,\n          -61,\n          -83,\n          26,\n          66,\n          -102,\n          51,\n          39,\n          66,\n          121,\n          83,\n          116,\n          66,\n          80,\n          -102,\n          -53,\n          66,\n          -64,\n          77,\n          90,\n          66,\n          -127,\n          -103,\n          -65,\n          66,\n          107,\n          -113,\n          15,\n          66,\n          -117,\n          80,\n          76,\n          66,\n          124,\n          101,\n          78,\n          66,\n          -79,\n          -110,\n          -8,\n          69,\n          104,\n          110,\n          -126,\n          66,\n          -96,\n          27,\n          8,\n          64,\n          -68,\n          -60,\n          119,\n          66,\n          92,\n          -34,\n          79,\n          66,\n          97,\n          -88,\n          56,\n          66,\n          -118,\n          -39,\n          -12,\n          66,\n          -64,\n          75,\n          12,\n          66,\n          -110,\n          -80,\n          90,\n          66,\n          -99,\n          74,\n          -85,\n          66,\n          -63,\n          124,\n          -114,\n          66,\n          -108,\n          -63,\n          79,\n          66,\n          -94,\n          -44,\n          -28,\n          66,\n          -116,\n          115,\n          99,\n          66,\n          -88,\n          18,\n          -68,\n          66,\n          115,\n          -123,\n          20,\n          66,\n          113,\n          -17,\n          95,\n          66,\n          -124,\n          -42,\n          81,\n          66,\n          -84,\n          110,\n          72,\n          66,\n          106,\n          12,\n          65,\n          66,\n          93,\n          115,\n          -80,\n          66,\n          -106,\n          -18,\n          0,\n          66,\n          93,\n          -31,\n          -75,\n          66,\n          -76,\n          115,\n          -16,\n          66,\n          -87,\n          114,\n          -80,\n          66,\n          -112,\n          -12,\n          60,\n          66,\n          90,\n          -114,\n          117,\n          66,\n          -100,\n          0,\n          74,\n          66,\n          -72,\n          127,\n          75,\n          66,\n          -102,\n          30,\n          -8,\n          66,\n          -77,\n          52,\n          -74,\n          66,\n          78,\n          45,\n          82,\n          66,\n          -116,\n          59,\n          26,\n          66,\n          110,\n          113,\n          -88,\n          66,\n          -107,\n          -44,\n          -65,\n          66,\n          -62,\n          -121,\n          22,\n          66,\n          -104,\n          110,\n          99,\n          66,\n          -100,\n          -17,\n          57,\n          65,\n          55,\n          -102,\n          -8,\n          66,\n          89,\n          113,\n          116,\n          66,\n          -104,\n          40,\n          -46,\n          66,\n          -110,\n          90,\n          18,\n          66,\n          -72,\n          109,\n          -36,\n          66,\n          -95,\n          -34,\n          10,\n          66,\n          -67,\n          -14,\n          -110,\n          66,\n          78,\n          -105,\n          -42,\n          66,\n          86,\n          -48,\n          79,\n          66,\n          -99,\n          69,\n          -16,\n          66,\n          -76,\n          98,\n          105,\n          66,\n          -98,\n          76,\n          -41,\n          66,\n          -59,\n          24,\n          -96,\n          66,\n          78,\n          -40,\n          -50,\n          66,\n          116,\n          7,\n          -62,\n          66,\n          -61,\n          122,\n          -126,\n          66,\n          -91,\n          113,\n          3,\n          66,\n          -107,\n          -15,\n          94,\n          66,\n          -60,\n          31,\n          0,\n          66,\n          -104,\n          -18,\n          -65,\n          66,\n          114,\n          116,\n          66,\n          66,\n          82,\n          76,\n          45,\n          66,\n          -63,\n          108,\n          108,\n          66,\n          -105,\n          -6,\n          17,\n          66,\n          -61,\n          92,\n          104,\n          66,\n          115,\n          34,\n          -77,\n          66,\n          87,\n          55,\n          -126,\n          66,\n          -108,\n          -49,\n          89,\n          66,\n          -124,\n          113,\n          96,\n          66,\n          -103,\n          72,\n          15,\n          66,\n          -104,\n          -53,\n          105,\n          66,\n          -115,\n          -56,\n          -79,\n          66,\n          -122,\n          -120,\n          80,\n          66,\n          -122,\n          -21,\n          53,\n          66,\n          -99,\n          -51,\n          -97,\n          66,\n          -59,\n          -16,\n          -126,\n          66,\n          -69,\n          -49,\n          39,\n          65,\n          -50,\n          112,\n          -107,\n          66,\n          -93,\n          -9,\n          -33,\n          66,\n          93,\n          105,\n          -103,\n          66,\n          -102,\n          -16,\n          -64,\n          66,\n          -84,\n          118,\n          -65,\n          66,\n          -69,\n          115,\n          24,\n          66,\n          -100,\n          106,\n          19,\n          66,\n          118,\n          35,\n          -124,\n          66,\n          -74,\n          98,\n          -50,\n          66,\n          -104,\n          -50,\n          46,\n          66,\n          -94,\n          24,\n          92,\n          66,\n          -60,\n          -118,\n          -28,\n          66,\n          84,\n          107,\n          -56,\n          66,\n          -128,\n          118,\n          101,\n          66,\n          113,\n          68,\n          -128,\n          66,\n          -75,\n          -113,\n          -104,\n          66,\n          74,\n          8,\n          109,\n          66,\n          -72,\n          -113,\n          26,\n          66,\n          106,\n          -62,\n          -99,\n          66,\n          -116,\n          -24,\n          -120,\n          66,\n          -68,\n          -5,\n          -100,\n          66,\n          -87,\n          -80,\n          77,\n          66,\n          118,\n          6,\n          26,\n          66,\n          81,\n          -55,\n          -128,\n          66,\n          -103,\n          -88,\n          41,\n          66,\n          -113,\n          44,\n          36,\n          66,\n          -112,\n          -19,\n          11,\n          66,\n          -61,\n          19,\n          72,\n          66,\n          -70,\n          33,\n          15,\n          66,\n          -73,\n          110,\n          -103,\n          66,\n          -105,\n          -65,\n          55,\n          66,\n          98,\n          44,\n          26,\n          66,\n          -72,\n          21,\n          -93,\n          66,\n          73,\n          79,\n          31,\n          66,\n          -67,\n          -43,\n          92,\n          66,\n          -71,\n          12,\n          -102,\n          66,\n          91,\n          -127,\n          125,\n          66,\n          111,\n          1,\n          104,\n          66,\n          127,\n          58,\n          94,\n          66,\n          -109,\n          50,\n          79,\n          66,\n          -122,\n          -41,\n          97,\n          66,\n          90,\n          -59,\n          -64,\n          66,\n          -84,\n          70,\n          78,\n          66,\n          -112,\n          112,\n          -61,\n          66,\n          -71,\n          90,\n          -95,\n          66,\n          -120,\n          85,\n          111,\n          66,\n          99,\n          59,\n          126,\n          66,\n          80,\n          -93,\n          76,\n          66,\n          -108,\n          6,\n          64,\n          66,\n          -63,\n          -35,\n          53,\n          66,\n          -85,\n          -41,\n          31,\n          66,\n          52,\n          84,\n          -100,\n          66,\n          -107,\n          -83,\n          20,\n          66,\n          -72,\n          112,\n          -41,\n          66,\n          87,\n          36,\n          10,\n          66,\n          75,\n          -115,\n          80,\n          66,\n          93,\n          6,\n          125,\n          66,\n          -113,\n          84,\n          -61,\n          66,\n          -121,\n          52,\n          41,\n          66,\n          -63,\n          85,\n          -74,\n          66,\n          -75,\n          -54,\n          123,\n          66,\n          -120,\n          -72,\n          -40,\n          66,\n          92,\n          44,\n          6,\n          66,\n          -128,\n          -54,\n          23,\n          66,\n          -74,\n          -108,\n          -105,\n          66,\n          -69,\n          -43,\n          25,\n          66,\n          73,\n          49,\n          -98,\n          66,\n          85,\n          -34,\n          -27,\n          66,\n          81,\n          -55,\n          48,\n          66,\n          127,\n          82,\n          29,\n          66,\n          -80,\n          64,\n          63,\n          66,\n          -101,\n          49,\n          -6,\n          66,\n          -128,\n          51,\n          58,\n          66,\n          -97,\n          28,\n          63,\n          66,\n          -109,\n          -100,\n          -80,\n          66,\n          57,\n          -25,\n          5,\n          66,\n          -110,\n          54,\n          105,\n          66,\n          -100,\n          -50,\n          23,\n          66,\n          -123,\n          -76,\n          -20,\n          66,\n          109,\n          84,\n          113,\n          66,\n          -69,\n          17,\n          74,\n          66,\n          116,\n          -122,\n          -127,\n          66,\n          -122,\n          -48,\n          -115,\n          66,\n          -74,\n          88,\n          34,\n          66,\n          -72,\n          38,\n          78,\n          66,\n          83,\n          -82,\n          -81,\n          66,\n          -105,\n          2,\n          -10,\n          66,\n          -111,\n          -24,\n          -42,\n          66,\n          -64,\n          37,\n          -8,\n          66,\n          -65,\n          40,\n          78,\n          66,\n          -122,\n          -61,\n          126,\n          66,\n          -65,\n          84,\n          -40,\n          66,\n          -82,\n          78,\n          39,\n          66,\n          -59,\n          29,\n          -11,\n          66,\n          100,\n          96,\n          127,\n          66,\n          99,\n          -37,\n          91,\n          66,\n          -65,\n          -42,\n          91,\n          66,\n          -108,\n          -79,\n          27,\n          66,\n          -60,\n          44,\n          -106,\n          66,\n          -70,\n          17,\n          27,\n          66,\n          -83,\n          9,\n          80,\n          66,\n          -70,\n          -42,\n          -54,\n          66,\n          -98,\n          -86,\n          64,\n          66,\n          -69,\n          -46,\n          -31,\n          66,\n          76,\n          89,\n          -114,\n          66,\n          74,\n          -39,\n          -127,\n          66,\n          -99,\n          70,\n          -54,\n          66,\n          -96,\n          83,\n          102,\n          66,\n          -109,\n          15,\n          116,\n          66,\n          84,\n          47,\n          111,\n          66,\n          69,\n          105,\n          -26,\n          66,\n          85,\n          -67,\n          100,\n          66,\n          -72,\n          116,\n          -81,\n          66,\n          74,\n          126,\n          43,\n          66,\n          -103,\n          -18,\n          57,\n          66,\n          -64,\n          2,\n          77,\n          66,\n          -108,\n          47,\n          94,\n          66,\n          -107,\n          -123,\n          -49,\n          66,\n          101,\n          11,\n          -7,\n          66,\n          -67,\n          126,\n          -126,\n          66,\n          -61,\n          -46,\n          -40,\n          66,\n          89,\n          -92,\n          -109,\n          66,\n          -75,\n          27,\n          79,\n          66,\n          -103,\n          -24,\n          98,\n          66,\n          -70,\n          120,\n          -71,\n          66,\n          -116,\n          125,\n          -77,\n          66,\n          -109,\n          -78,\n          -107,\n          66,\n          114,\n          100,\n          33,\n          66,\n          -106,\n          72,\n          112,\n          66,\n          104,\n          -87,\n          60,\n          66,\n          -110,\n          44,\n          -123,\n          66,\n          -105,\n          -39,\n          -66,\n          66,\n          -110,\n          86,\n          104,\n          66,\n          100,\n          53,\n          93,\n          66,\n          -125,\n          -18,\n          -65,\n          66,\n          -78,\n          -113,\n          -84,\n          66,\n          91,\n          29,\n          -115,\n          66,\n          84,\n          54,\n          -3,\n          66,\n          88,\n          98,\n          -19,\n          66,\n          82,\n          -28,\n          -89,\n          66,\n          -73,\n          52,\n          69,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 231,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1162241054,\n          1114254304,\n          1104602657,\n          727010285,\n          626549009,\n          1140765443,\n          989461384,\n          625774706,\n          1103245001,\n          753917179,\n          1013378243,\n          710454697,\n          16,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          602653108,\n          989300059,\n          1118977784,\n          582930724,\n          600526102,\n          1098282644,\n          624243316,\n          639138976,\n          1104607445,\n          1013454742,\n          626323955,\n          581150426,\n          13,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 23,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 23,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -8641322014209361855,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          132573906,\n          535248293,\n          870950241,\n          975039569,\n          173225913,\n          349495291,\n          117793841,\n          623462321,\n          984259509,\n          460237761,\n          490000187,\n          450074085,\n          976857025,\n          662434898,\n          493005906,\n          306087154,\n          330993590,\n          750497519,\n          1042614487,\n          662472669,\n          309630133,\n          311799482,\n          1050507890,\n          647199571,\n          590923353,\n          1055718205,\n          363952701,\n          354200681,\n          715860198,\n          522750030,\n          728614695,\n          529617982,\n          728286510,\n          603296801,\n          317525931,\n          188020851,\n          748530730,\n          31043,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          44,\n          95,\n          -90,\n          69,\n          88,\n          110,\n          111,\n          68,\n          -73,\n          -20,\n          78,\n          67,\n          -20,\n          40,\n          -108,\n          69,\n          2,\n          27,\n          30,\n          64,\n          -91,\n          -48,\n          101,\n          66,\n          29,\n          91,\n          118,\n          66,\n          -128,\n          -34,\n          -19,\n          65,\n          -86,\n          -37,\n          22,\n          66,\n          -97,\n          121,\n          -34,\n          66,\n          -59,\n          54,\n          74,\n          66,\n          -121,\n          60,\n          83,\n          66,\n          -107,\n          22,\n          125,\n          66,\n          -117,\n          -114,\n          -51,\n          66,\n          -100,\n          104,\n          80,\n          66,\n          86,\n          95,\n          -55,\n          66,\n          -81,\n          85,\n          107,\n          65,\n          -51,\n          114,\n          80,\n          66,\n          -111,\n          -29,\n          29,\n          66,\n          -66,\n          -17,\n          -10,\n          66,\n          99,\n          106,\n          -29,\n          66,\n          9,\n          68,\n          -32,\n          66,\n          -107,\n          -59,\n          -48,\n          66,\n          -104,\n          117,\n          1,\n          66,\n          -86,\n          117,\n          -83,\n          66,\n          -121,\n          -74,\n          16,\n          66,\n          -89,\n          116,\n          37,\n          66,\n          -76,\n          -81,\n          33,\n          66,\n          -107,\n          -48,\n          -1,\n          66,\n          -116,\n          68,\n          -125,\n          66,\n          123,\n          102,\n          -58,\n          66,\n          -123,\n          111,\n          16,\n          66,\n          90,\n          119,\n          84,\n          66,\n          -100,\n          47,\n          125,\n          66,\n          -114,\n          -119,\n          1,\n          66,\n          -65,\n          116,\n          -60,\n          66,\n          -122,\n          -3,\n          -41,\n          66,\n          -93,\n          -48,\n          95,\n          66,\n          -103,\n          100,\n          -25,\n          66,\n          -82,\n          -94,\n          -26,\n          64,\n          -87,\n          -103,\n          42,\n          66,\n          106,\n          71,\n          -105,\n          66,\n          -127,\n          125,\n          89,\n          66,\n          -89,\n          -99,\n          -61,\n          66,\n          -119,\n          1,\n          -45,\n          66,\n          -74,\n          6,\n          113,\n          66,\n          -63,\n          58,\n          112,\n          66,\n          -80,\n          -73,\n          25,\n          66,\n          -112,\n          124,\n          -61,\n          66,\n          63,\n          -31,\n          -53,\n          66,\n          -68,\n          -106,\n          83,\n          66,\n          -108,\n          -88,\n          45,\n          66,\n          -67,\n          -37,\n          -91,\n          66,\n          122,\n          -108,\n          61,\n          66,\n          -108,\n          -44,\n          -14,\n          66,\n          -66,\n          125,\n          -111,\n          66,\n          -63,\n          -87,\n          107,\n          66,\n          -103,\n          -15,\n          99,\n          66,\n          -82,\n          38,\n          78,\n          66,\n          -99,\n          -1,\n          -69,\n          66,\n          80,\n          22,\n          -64,\n          66,\n          -107,\n          71,\n          -101,\n          66,\n          109,\n          -54,\n          -97,\n          66,\n          -110,\n          60,\n          13,\n          66,\n          76,\n          39,\n          -81,\n          66,\n          -61,\n          -40,\n          -80,\n          66,\n          -122,\n          -80,\n          4,\n          66,\n          98,\n          -32,\n          7,\n          66,\n          -111,\n          5,\n          -79,\n          66,\n          113,\n          -104,\n          96,\n          66,\n          -110,\n          67,\n          -86,\n          66,\n          -104,\n          -76,\n          77,\n          66,\n          -109,\n          -70,\n          -32,\n          66,\n          -61,\n          -37,\n          25,\n          66,\n          -106,\n          35,\n          -18,\n          66,\n          82,\n          99,\n          93,\n          66,\n          92,\n          -105,\n          -102,\n          66,\n          -111,\n          10,\n          121,\n          66,\n          -61,\n          80,\n          99,\n          66,\n          -72,\n          -113,\n          -67,\n          66,\n          -105,\n          -26,\n          -78,\n          66,\n          74,\n          29,\n          112,\n          66,\n          72,\n          -113,\n          -34,\n          66,\n          91,\n          34,\n          -25,\n          66,\n          -72,\n          -1,\n          96,\n          66,\n          -62,\n          -127,\n          3,\n          66,\n          76,\n          40,\n          -52,\n          66,\n          -108,\n          61,\n          -65,\n          66,\n          -70,\n          -111,\n          -92,\n          66,\n          -64,\n          -9,\n          -39,\n          66,\n          -66,\n          -123,\n          -1,\n          66,\n          108,\n          57,\n          -22,\n          66,\n          -119,\n          -102,\n          -122,\n          66,\n          -97,\n          110,\n          -123,\n          66,\n          70,\n          -128,\n          -106,\n          66,\n          -121,\n          80,\n          23,\n          66,\n          -82,\n          43,\n          81,\n          66,\n          -113,\n          -87,\n          -47,\n          66,\n          82,\n          -57,\n          41,\n          66,\n          -115,\n          89,\n          -90,\n          66,\n          85,\n          -35,\n          -120,\n          66,\n          -117,\n          50,\n          11,\n          66,\n          102,\n          -48,\n          -6,\n          66,\n          91,\n          76,\n          -55,\n          66,\n          93,\n          23,\n          -32,\n          66,\n          83,\n          114,\n          -123,\n          66,\n          127,\n          92,\n          121,\n          66,\n          -81,\n          -14,\n          -64,\n          66,\n          68,\n          99,\n          -41,\n          66,\n          -64,\n          108,\n          92,\n          66,\n          -73,\n          13,\n          -75,\n          66,\n          -85,\n          108,\n          123,\n          66,\n          -67,\n          -55,\n          -11,\n          66,\n          100,\n          123,\n          -98,\n          66,\n          126,\n          73,\n          68,\n          66,\n          -65,\n          -19,\n          -64,\n          66,\n          -95,\n          -124,\n          122,\n          66,\n          -123,\n          56,\n          114,\n          66,\n          70,\n          -114,\n          -127,\n          66,\n          69,\n          19,\n          82,\n          66,\n          -108,\n          94,\n          1,\n          66,\n          -110,\n          -73,\n          -33,\n          66,\n          -106,\n          -71,\n          -13,\n          66,\n          -116,\n          110,\n          74,\n          66,\n          80,\n          115,\n          -66,\n          66,\n          -109,\n          16,\n          81,\n          66,\n          -60,\n          -27,\n          -67,\n          66,\n          -109,\n          64,\n          -112,\n          66,\n          101,\n          -54,\n          -109,\n          66,\n          125,\n          39,\n          -39,\n          66,\n          -102,\n          -118,\n          49,\n          66,\n          -95,\n          120,\n          120,\n          66,\n          -61,\n          4,\n          8,\n          66,\n          84,\n          118,\n          95,\n          66,\n          -76,\n          -34,\n          -58,\n          66,\n          -80,\n          -50,\n          -57,\n          66,\n          -106,\n          40,\n          -23,\n          66,\n          91,\n          -72,\n          102,\n          66,\n          88,\n          -95,\n          117,\n          66,\n          -87,\n          115,\n          65,\n          66,\n          -84,\n          114,\n          49,\n          66,\n          -64,\n          -40,\n          -84,\n          66,\n          -114,\n          -109,\n          -4,\n          66,\n          68,\n          62,\n          -47,\n          66,\n          -120,\n          94,\n          53,\n          66,\n          -62,\n          53,\n          -101,\n          66,\n          -123,\n          -2,\n          117,\n          66,\n          -91,\n          -2,\n          28,\n          66,\n          89,\n          45,\n          71,\n          66,\n          -106,\n          -28,\n          122,\n          66,\n          -120,\n          33,\n          58,\n          66,\n          -100,\n          -58,\n          -22,\n          66,\n          -61,\n          8,\n          -3,\n          66,\n          -104,\n          -42,\n          -78,\n          66,\n          -59,\n          61,\n          104,\n          66,\n          -128,\n          42,\n          -100,\n          66,\n          -110,\n          100,\n          69,\n          66,\n          -122,\n          -111,\n          -47,\n          66,\n          -89,\n          88,\n          -69,\n          66,\n          -64,\n          -63,\n          59,\n          66,\n          77,\n          -39,\n          115,\n          66,\n          -78,\n          -7,\n          93,\n          66,\n          -106,\n          89,\n          -58,\n          66,\n          78,\n          46,\n          61,\n          66,\n          87,\n          -4,\n          -28,\n          66,\n          -107,\n          4,\n          20,\n          66,\n          -111,\n          -53,\n          -37,\n          66,\n          -59,\n          -9,\n          -58,\n          66,\n          -72,\n          81,\n          31,\n          66,\n          91,\n          103,\n          -118,\n          66,\n          -70,\n          -108,\n          -81,\n          66,\n          -71,\n          111,\n          48,\n          66,\n          -81,\n          35,\n          -121,\n          66,\n          -122,\n          -85,\n          19,\n          66,\n          -61,\n          121,\n          49,\n          66,\n          -88,\n          -13,\n          16,\n          66,\n          -89,\n          -110,\n          83,\n          66,\n          -113,\n          24,\n          108,\n          66,\n          -81,\n          -28,\n          90,\n          66,\n          77,\n          -102,\n          -29,\n          66,\n          72,\n          90,\n          -44,\n          66,\n          -120,\n          -94,\n          119,\n          66,\n          -118,\n          -80,\n          -63,\n          66,\n          77,\n          -72,\n          -40,\n          66,\n          -65,\n          41,\n          -66,\n          66,\n          -101,\n          -16,\n          58,\n          66,\n          -67,\n          23,\n          -81,\n          66,\n          -120,\n          -103,\n          -32,\n          66,\n          -118,\n          -87,\n          33,\n          66,\n          -63,\n          -101,\n          -58,\n          66,\n          -112,\n          74,\n          -128,\n          66,\n          106,\n          -66,\n          0,\n          66,\n          -64,\n          47,\n          62,\n          66,\n          -105,\n          -117,\n          -119,\n          66,\n          -105,\n          79,\n          -106,\n          66,\n          -104,\n          -124,\n          120,\n          66,\n          -62,\n          -9,\n          -86,\n          66,\n          -115,\n          125,\n          86,\n          66,\n          -116,\n          62,\n          117,\n          66,\n          -115,\n          -103,\n          -73,\n          66,\n          -113,\n          121,\n          118,\n          66,\n          38,\n          36,\n          118,\n          66,\n          71,\n          -32,\n          -24,\n          66,\n          -100,\n          -53,\n          -83,\n          66,\n          93,\n          41,\n          21,\n          66,\n          -115,\n          -26,\n          -78,\n          66,\n          116,\n          31,\n          93,\n          66,\n          -64,\n          -46,\n          -61,\n          66,\n          -68,\n          25,\n          54,\n          66,\n          -99,\n          -105,\n          -45,\n          66,\n          87,\n          -44,\n          26,\n          66,\n          -125,\n          -3,\n          -76,\n          66,\n          -73,\n          -16,\n          101,\n          66,\n          -106,\n          46,\n          2,\n          66,\n          91,\n          -53,\n          -108,\n          66,\n          -108,\n          13,\n          35,\n          66,\n          -67,\n          89,\n          32,\n          66,\n          -123,\n          -124,\n          -5,\n          66,\n          76,\n          -59,\n          -39,\n          66,\n          78,\n          61,\n          119,\n          66,\n          -123,\n          -44,\n          -23,\n          66,\n          -68,\n          -88,\n          109,\n          66,\n          86,\n          51,\n          64,\n          66,\n          -75,\n          -87,\n          -62,\n          66,\n          -69,\n          -22,\n          -29,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 225,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          583495001,\n          1162202407,\n          602398130,\n          1160646646,\n          602119403,\n          975106741,\n          1160467126,\n          982978898,\n          596269241,\n          588040204,\n          625791545,\n          21605011,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          755649931,\n          767932000,\n          597674267,\n          1025946878,\n          601943065,\n          755470705,\n          1141291529,\n          582990422,\n          595745080,\n          711927458,\n          597271643,\n          22253821,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 29,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 29,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3689043991782272360,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          262758222,\n          1005436393,\n          917574752,\n          1066177649,\n          514159657,\n          59980517,\n          332782026,\n          358988389,\n          228128063,\n          735937598,\n          322887361,\n          83008051,\n          1045599270,\n          648394923,\n          656314069,\n          606527421,\n          129042914,\n          733469031,\n          189992371,\n          191841351,\n          184476717,\n          795006183,\n          1059144738,\n          56047987,\n          232466402,\n          1013951987,\n          366041271,\n          232328641,\n          659909335,\n          904439593,\n          880277601,\n          268277061,\n          456726518,\n          1071208101,\n          878024191,\n          521440453,\n          443865535,\n          383727041,\n          11001765,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          54,\n          -78,\n          126,\n          69,\n          25,\n          9,\n          53,\n          66,\n          6,\n          106,\n          52,\n          66,\n          -63,\n          -7,\n          70,\n          66,\n          -76,\n          44,\n          98,\n          65,\n          50,\n          -39,\n          17,\n          66,\n          -82,\n          -27,\n          -120,\n          66,\n          31,\n          81,\n          -124,\n          66,\n          -113,\n          -106,\n          82,\n          66,\n          116,\n          -126,\n          3,\n          66,\n          -99,\n          -122,\n          -120,\n          66,\n          -108,\n          -49,\n          82,\n          64,\n          -99,\n          65,\n          -83,\n          66,\n          79,\n          118,\n          96,\n          66,\n          -98,\n          -49,\n          -103,\n          66,\n          -68,\n          -82,\n          10,\n          66,\n          109,\n          70,\n          106,\n          66,\n          91,\n          24,\n          31,\n          66,\n          -115,\n          -43,\n          17,\n          66,\n          74,\n          -78,\n          -18,\n          66,\n          -97,\n          -33,\n          -39,\n          66,\n          -95,\n          -81,\n          106,\n          65,\n          19,\n          122,\n          -54,\n          66,\n          72,\n          81,\n          100,\n          66,\n          -105,\n          19,\n          80,\n          66,\n          -119,\n          68,\n          89,\n          66,\n          -83,\n          -84,\n          77,\n          66,\n          -68,\n          -88,\n          -93,\n          66,\n          -74,\n          37,\n          -9,\n          66,\n          -125,\n          25,\n          -116,\n          66,\n          -83,\n          25,\n          -23,\n          66,\n          91,\n          88,\n          4,\n          66,\n          -66,\n          122,\n          -106,\n          66,\n          -60,\n          48,\n          121,\n          66,\n          -119,\n          -1,\n          -104,\n          66,\n          -112,\n          -10,\n          -101,\n          66,\n          -76,\n          78,\n          5,\n          66,\n          -70,\n          92,\n          -124,\n          66,\n          -63,\n          -75,\n          -101,\n          66,\n          85,\n          1,\n          41,\n          66,\n          -101,\n          97,\n          93,\n          66,\n          36,\n          119,\n          116,\n          66,\n          -100,\n          112,\n          -59,\n          66,\n          96,\n          52,\n          -114,\n          66,\n          -71,\n          3,\n          15,\n          66,\n          80,\n          124,\n          80,\n          66,\n          -78,\n          122,\n          32,\n          66,\n          -71,\n          -28,\n          -10,\n          66,\n          76,\n          52,\n          -15,\n          66,\n          -109,\n          92,\n          -120,\n          66,\n          -109,\n          28,\n          -30,\n          66,\n          83,\n          -9,\n          -10,\n          66,\n          -110,\n          83,\n          -31,\n          66,\n          -84,\n          109,\n          20,\n          66,\n          -83,\n          74,\n          -40,\n          66,\n          -111,\n          75,\n          -41,\n          66,\n          -98,\n          81,\n          -15,\n          66,\n          72,\n          4,\n          3,\n          66,\n          -110,\n          0,\n          97,\n          66,\n          -90,\n          -45,\n          -110,\n          66,\n          -119,\n          57,\n          16,\n          66,\n          -65,\n          -23,\n          -96,\n          66,\n          89,\n          -29,\n          -122,\n          66,\n          -102,\n          58,\n          20,\n          66,\n          95,\n          -60,\n          114,\n          66,\n          -98,\n          61,\n          35,\n          66,\n          101,\n          -28,\n          -14,\n          66,\n          -97,\n          45,\n          -52,\n          66,\n          -80,\n          -19,\n          51,\n          66,\n          -91,\n          -58,\n          25,\n          66,\n          94,\n          -113,\n          81,\n          66,\n          -67,\n          -15,\n          123,\n          66,\n          -67,\n          69,\n          -78,\n          66,\n          -91,\n          -70,\n          -64,\n          66,\n          -121,\n          -114,\n          -66,\n          66,\n          -103,\n          48,\n          4,\n          66,\n          -119,\n          -89,\n          -26,\n          66,\n          104,\n          -59,\n          45,\n          66,\n          81,\n          2,\n          96,\n          66,\n          124,\n          -110,\n          -74,\n          66,\n          -78,\n          36,\n          7,\n          66,\n          79,\n          -12,\n          127,\n          66,\n          -61,\n          109,\n          78,\n          66,\n          91,\n          45,\n          -1,\n          66,\n          -107,\n          82,\n          -70,\n          66,\n          -68,\n          -49,\n          126,\n          66,\n          113,\n          126,\n          47,\n          66,\n          -100,\n          -112,\n          -29,\n          66,\n          -111,\n          50,\n          -68,\n          66,\n          121,\n          -99,\n          -64,\n          66,\n          -99,\n          -80,\n          13,\n          66,\n          -117,\n          11,\n          -62,\n          66,\n          80,\n          43,\n          -33,\n          66,\n          -111,\n          70,\n          15,\n          66,\n          -77,\n          -57,\n          -11,\n          66,\n          -85,\n          -31,\n          -59,\n          66,\n          -78,\n          96,\n          67,\n          66,\n          104,\n          48,\n          70,\n          66,\n          -100,\n          -122,\n          34,\n          66,\n          -90,\n          83,\n          -41,\n          66,\n          88,\n          -35,\n          -13,\n          66,\n          71,\n          84,\n          -34,\n          66,\n          69,\n          -76,\n          103,\n          66,\n          75,\n          89,\n          35,\n          66,\n          -60,\n          -119,\n          -96,\n          66,\n          99,\n          95,\n          -8,\n          66,\n          105,\n          5,\n          -58,\n          66,\n          -104,\n          -120,\n          -110,\n          66,\n          83,\n          20,\n          99,\n          66,\n          -99,\n          79,\n          14,\n          66,\n          68,\n          45,\n          24,\n          66,\n          -79,\n          30,\n          -116,\n          66,\n          -105,\n          80,\n          -20,\n          66,\n          -111,\n          39,\n          61,\n          66,\n          92,\n          -77,\n          45,\n          66,\n          -85,\n          -95,\n          124,\n          66,\n          -110,\n          -119,\n          -96,\n          66,\n          -62,\n          79,\n          -82,\n          66,\n          -75,\n          54,\n          -64,\n          66,\n          -115,\n          94,\n          -124,\n          66,\n          -111,\n          26,\n          -118,\n          66,\n          -122,\n          16,\n          -41,\n          66,\n          -99,\n          4,\n          -64,\n          66,\n          -118,\n          -36,\n          27,\n          66,\n          92,\n          26,\n          41,\n          66,\n          -103,\n          94,\n          -15,\n          66,\n          84,\n          -99,\n          -107,\n          66,\n          78,\n          -34,\n          -34,\n          66,\n          -104,\n          -38,\n          -95,\n          66,\n          -105,\n          -99,\n          124,\n          66,\n          -63,\n          117,\n          61,\n          66,\n          114,\n          40,\n          116,\n          66,\n          -59,\n          62,\n          -111,\n          66,\n          -123,\n          -67,\n          -7,\n          66,\n          -127,\n          45,\n          -55,\n          66,\n          -62,\n          107,\n          -119,\n          66,\n          -67,\n          -13,\n          -106,\n          66,\n          107,\n          -86,\n          -93,\n          66,\n          91,\n          6,\n          -96,\n          66,\n          97,\n          32,\n          -77,\n          66,\n          -94,\n          -33,\n          -29,\n          66,\n          -69,\n          108,\n          45,\n          66,\n          -103,\n          -47,\n          107,\n          66,\n          -103,\n          67,\n          40,\n          66,\n          -79,\n          -77,\n          -94,\n          66,\n          101,\n          -40,\n          67,\n          66,\n          -103,\n          -7,\n          126,\n          66,\n          -97,\n          56,\n          -83,\n          66,\n          -123,\n          101,\n          -123,\n          66,\n          -82,\n          5,\n          -69,\n          66,\n          72,\n          20,\n          -119,\n          66,\n          71,\n          2,\n          125,\n          66,\n          81,\n          -87,\n          -22,\n          66,\n          92,\n          114,\n          116,\n          66,\n          -82,\n          64,\n          92,\n          66,\n          -65,\n          30,\n          61,\n          66,\n          101,\n          -49,\n          123,\n          66,\n          -121,\n          -67,\n          -60,\n          66,\n          -73,\n          -127,\n          -106,\n          66,\n          -61,\n          -71,\n          4,\n          66,\n          -107,\n          73,\n          -45,\n          66,\n          -83,\n          -65,\n          -57,\n          66,\n          -111,\n          -13,\n          -75,\n          66,\n          -81,\n          100,\n          2,\n          66,\n          113,\n          119,\n          4,\n          66,\n          -75,\n          103,\n          -68,\n          66,\n          -106,\n          -106,\n          -81,\n          66,\n          -83,\n          119,\n          -83,\n          66,\n          100,\n          -51,\n          44,\n          66,\n          -83,\n          107,\n          -69,\n          66,\n          -68,\n          -33,\n          -12,\n          66,\n          -59,\n          77,\n          -35,\n          66,\n          -106,\n          76,\n          3,\n          66,\n          -128,\n          -28,\n          -125,\n          66,\n          -109,\n          107,\n          34,\n          66,\n          -120,\n          68,\n          74,\n          66,\n          -95,\n          -5,\n          73,\n          66,\n          -114,\n          6,\n          48,\n          66,\n          -62,\n          -49,\n          77,\n          66,\n          -64,\n          61,\n          -42,\n          66,\n          -119,\n          -109,\n          46,\n          66,\n          74,\n          8,\n          -25,\n          66,\n          -65,\n          29,\n          114,\n          66,\n          109,\n          -21,\n          -50,\n          66,\n          81,\n          102,\n          43,\n          66,\n          -70,\n          100,\n          -86,\n          66,\n          -123,\n          74,\n          -103,\n          66,\n          -82,\n          41,\n          -37,\n          66,\n          40,\n          26,\n          50,\n          66,\n          86,\n          -95,\n          15,\n          66,\n          97,\n          -49,\n          51,\n          66,\n          79,\n          -107,\n          -126,\n          66,\n          -78,\n          -79,\n          -42,\n          66,\n          94,\n          86,\n          45,\n          66,\n          -124,\n          -41,\n          3,\n          66,\n          -74,\n          -27,\n          81,\n          66,\n          118,\n          -120,\n          64,\n          66,\n          -105,\n          40,\n          -23,\n          66,\n          -101,\n          -35,\n          51,\n          66,\n          -96,\n          -123,\n          43,\n          66,\n          -113,\n          101,\n          57,\n          66,\n          -87,\n          -121,\n          86,\n          66,\n          -102,\n          81,\n          -112,\n          66,\n          109,\n          -127,\n          -38,\n          66,\n          105,\n          12,\n          34,\n          66,\n          68,\n          124,\n          78,\n          66,\n          -89,\n          -117,\n          25,\n          66,\n          69,\n          73,\n          93,\n          66,\n          -94,\n          33,\n          35,\n          66,\n          -64,\n          -71,\n          -88,\n          66,\n          -107,\n          -113,\n          -29,\n          66,\n          -64,\n          -40,\n          83,\n          66,\n          76,\n          87,\n          -21,\n          66,\n          -113,\n          -22,\n          -25,\n          66,\n          -115,\n          -108,\n          -128,\n          66,\n          72,\n          -55,\n          4,\n          66,\n          72,\n          88,\n          53,\n          66,\n          -117,\n          -93,\n          -7,\n          66,\n          -61,\n          59,\n          -78,\n          66,\n          -115,\n          -89,\n          -7,\n          66,\n          107,\n          107,\n          39,\n          66,\n          -111,\n          88,\n          -66,\n          66,\n          -113,\n          -10,\n          -60,\n          66,\n          -63,\n          -65,\n          -8,\n          66,\n          -123,\n          -14,\n          -10,\n          66,\n          -69,\n          75,\n          47,\n          66,\n          -104,\n          110,\n          -37,\n          66,\n          83,\n          67,\n          2,\n          66,\n          -94,\n          -6,\n          -30,\n          66,\n          -98,\n          96,\n          97,\n          66,\n          77,\n          -65,\n          63,\n          66,\n          78,\n          -99,\n          -46,\n          66,\n          -59,\n          123,\n          109,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 233,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          774779471,\n          1100060917,\n          715317128,\n          588284744,\n          1104308044,\n          1146317395,\n          1160407984,\n          710802688,\n          638526365,\n          626546704,\n          1147656587,\n          582908873,\n          134,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1032562858,\n          1141355825,\n          760491007,\n          712130692,\n          1032561403,\n          1018588352,\n          1031283211,\n          772981298,\n          767687206,\n          772535596,\n          581196676,\n          625831187,\n          121,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 21,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 21,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6539383793818569112,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          379423442,\n          938577079,\n          199174183,\n          982996923,\n          632628559,\n          446142685,\n          259345726,\n          527763286,\n          766426417,\n          601459806,\n          355648947,\n          463720061,\n          186049849,\n          342617578,\n          634497351,\n          725542733,\n          204310701,\n          1051670899,\n          175413837,\n          241794773,\n          633304679,\n          886225718,\n          794341030,\n          766171102,\n          879540979,\n          579929779,\n          261576805,\n          1054049447,\n          527619513,\n          515292593,\n          865857201,\n          231853267,\n          592661842,\n          1060551363,\n          754435422,\n          657585651,\n          303761315,\n          108189046,\n          39628461,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          -121,\n          126,\n          -2,\n          68,\n          47,\n          59,\n          68,\n          66,\n          -76,\n          126,\n          56,\n          65,\n          -118,\n          -97,\n          70,\n          67,\n          78,\n          -110,\n          -115,\n          66,\n          52,\n          97,\n          51,\n          66,\n          23,\n          -5,\n          -2,\n          66,\n          -113,\n          -48,\n          -119,\n          66,\n          112,\n          48,\n          -47,\n          65,\n          -124,\n          3,\n          20,\n          66,\n          93,\n          -14,\n          92,\n          66,\n          78,\n          -47,\n          1,\n          66,\n          55,\n          -40,\n          -100,\n          66,\n          75,\n          45,\n          -4,\n          66,\n          -68,\n          114,\n          48,\n          66,\n          -67,\n          -125,\n          90,\n          66,\n          -97,\n          -5,\n          -62,\n          66,\n          13,\n          -84,\n          100,\n          66,\n          114,\n          20,\n          -6,\n          66,\n          -115,\n          80,\n          -10,\n          66,\n          -123,\n          84,\n          -62,\n          66,\n          -75,\n          -47,\n          -113,\n          66,\n          -103,\n          -78,\n          -105,\n          66,\n          -114,\n          89,\n          124,\n          66,\n          88,\n          -109,\n          -103,\n          66,\n          -88,\n          99,\n          -56,\n          65,\n          -79,\n          42,\n          27,\n          66,\n          -76,\n          126,\n          23,\n          66,\n          78,\n          87,\n          -75,\n          66,\n          -71,\n          34,\n          31,\n          66,\n          -116,\n          -37,\n          27,\n          66,\n          -59,\n          83,\n          53,\n          66,\n          -69,\n          -53,\n          85,\n          66,\n          120,\n          50,\n          3,\n          66,\n          -128,\n          96,\n          52,\n          66,\n          -99,\n          113,\n          -98,\n          66,\n          -77,\n          -38,\n          -38,\n          66,\n          -112,\n          -21,\n          109,\n          66,\n          89,\n          11,\n          -84,\n          66,\n          -67,\n          10,\n          34,\n          66,\n          102,\n          -35,\n          -8,\n          66,\n          76,\n          121,\n          120,\n          66,\n          -62,\n          122,\n          -15,\n          66,\n          -62,\n          10,\n          -45,\n          66,\n          -119,\n          84,\n          88,\n          66,\n          -73,\n          -36,\n          38,\n          66,\n          89,\n          39,\n          -88,\n          66,\n          70,\n          69,\n          29,\n          66,\n          -126,\n          34,\n          -60,\n          66,\n          -91,\n          45,\n          -128,\n          66,\n          107,\n          8,\n          -55,\n          66,\n          -107,\n          -76,\n          12,\n          66,\n          -75,\n          103,\n          50,\n          66,\n          -68,\n          45,\n          -102,\n          66,\n          -96,\n          50,\n          -102,\n          66,\n          -104,\n          76,\n          -84,\n          66,\n          72,\n          23,\n          -89,\n          66,\n          -118,\n          -21,\n          -16,\n          66,\n          -122,\n          -110,\n          65,\n          66,\n          -113,\n          -27,\n          70,\n          66,\n          75,\n          28,\n          -65,\n          66,\n          -118,\n          21,\n          110,\n          66,\n          -116,\n          -119,\n          52,\n          66,\n          -128,\n          -114,\n          -67,\n          66,\n          78,\n          -21,\n          83,\n          66,\n          -77,\n          -29,\n          -69,\n          66,\n          -121,\n          60,\n          19,\n          66,\n          100,\n          -87,\n          61,\n          66,\n          96,\n          104,\n          28,\n          66,\n          104,\n          37,\n          -15,\n          66,\n          -90,\n          123,\n          2,\n          66,\n          -109,\n          8,\n          45,\n          66,\n          -91,\n          -47,\n          -73,\n          66,\n          -107,\n          89,\n          35,\n          66,\n          -103,\n          68,\n          2,\n          66,\n          -105,\n          86,\n          121,\n          66,\n          -118,\n          -7,\n          -47,\n          66,\n          -108,\n          -122,\n          -32,\n          66,\n          -69,\n          11,\n          122,\n          66,\n          -114,\n          32,\n          -30,\n          66,\n          80,\n          -93,\n          -91,\n          66,\n          72,\n          -14,\n          125,\n          66,\n          -99,\n          107,\n          107,\n          66,\n          -76,\n          37,\n          -73,\n          66,\n          91,\n          68,\n          -91,\n          66,\n          -82,\n          64,\n          -33,\n          66,\n          -94,\n          -32,\n          -85,\n          66,\n          94,\n          2,\n          -122,\n          65,\n          -51,\n          -57,\n          -55,\n          66,\n          -68,\n          -44,\n          40,\n          66,\n          -100,\n          51,\n          119,\n          66,\n          -70,\n          46,\n          12,\n          66,\n          -101,\n          -60,\n          -6,\n          66,\n          -100,\n          -71,\n          -81,\n          65,\n          -112,\n          -92,\n          98,\n          66,\n          -107,\n          -23,\n          -82,\n          66,\n          -106,\n          45,\n          -59,\n          66,\n          -109,\n          16,\n          -31,\n          66,\n          -64,\n          -119,\n          -89,\n          66,\n          104,\n          -99,\n          62,\n          66,\n          -66,\n          -40,\n          -71,\n          66,\n          -60,\n          18,\n          -104,\n          63,\n          -67,\n          -118,\n          36,\n          66,\n          101,\n          -68,\n          10,\n          66,\n          -65,\n          34,\n          -23,\n          66,\n          -76,\n          98,\n          86,\n          66,\n          -86,\n          -99,\n          107,\n          66,\n          70,\n          23,\n          -81,\n          66,\n          -107,\n          120,\n          -26,\n          66,\n          -65,\n          105,\n          15,\n          66,\n          -64,\n          -103,\n          -9,\n          66,\n          -120,\n          -121,\n          -14,\n          66,\n          82,\n          -46,\n          52,\n          66,\n          -97,\n          24,\n          55,\n          66,\n          -94,\n          -87,\n          40,\n          66,\n          -100,\n          -27,\n          -100,\n          66,\n          97,\n          -125,\n          44,\n          66,\n          -70,\n          -127,\n          32,\n          66,\n          -59,\n          -113,\n          118,\n          66,\n          -127,\n          -52,\n          -83,\n          66,\n          90,\n          106,\n          70,\n          66,\n          -117,\n          41,\n          -39,\n          66,\n          -128,\n          -21,\n          93,\n          66,\n          -66,\n          -60,\n          -92,\n          66,\n          71,\n          -34,\n          65,\n          66,\n          -62,\n          110,\n          -29,\n          66,\n          -78,\n          28,\n          110,\n          66,\n          -100,\n          -119,\n          9,\n          66,\n          -66,\n          119,\n          68,\n          66,\n          -120,\n          72,\n          -85,\n          66,\n          -114,\n          -126,\n          6,\n          66,\n          -65,\n          114,\n          -100,\n          66,\n          -63,\n          98,\n          -103,\n          66,\n          -125,\n          51,\n          -114,\n          66,\n          109,\n          79,\n          50,\n          66,\n          -110,\n          80,\n          39,\n          66,\n          -120,\n          100,\n          -94,\n          66,\n          116,\n          59,\n          114,\n          66,\n          -73,\n          11,\n          -42,\n          66,\n          -93,\n          -121,\n          94,\n          66,\n          -108,\n          92,\n          77,\n          66,\n          -99,\n          64,\n          63,\n          66,\n          -86,\n          -52,\n          -90,\n          66,\n          -63,\n          45,\n          34,\n          66,\n          100,\n          53,\n          -63,\n          66,\n          74,\n          -73,\n          84,\n          66,\n          -73,\n          -32,\n          -11,\n          66,\n          -128,\n          95,\n          -3,\n          66,\n          -91,\n          -28,\n          81,\n          66,\n          -59,\n          -52,\n          -34,\n          66,\n          86,\n          -121,\n          -62,\n          66,\n          -121,\n          -35,\n          -106,\n          66,\n          -111,\n          78,\n          60,\n          66,\n          -59,\n          126,\n          -128,\n          66,\n          -107,\n          -91,\n          49,\n          66,\n          -108,\n          78,\n          -72,\n          66,\n          -106,\n          105,\n          24,\n          66,\n          113,\n          8,\n          22,\n          66,\n          -78,\n          -31,\n          -91,\n          66,\n          -73,\n          -6,\n          -126,\n          66,\n          -119,\n          -124,\n          -109,\n          66,\n          104,\n          82,\n          -81,\n          66,\n          107,\n          -74,\n          -104,\n          66,\n          -119,\n          -99,\n          25,\n          66,\n          -109,\n          -85,\n          48,\n          66,\n          117,\n          -21,\n          36,\n          66,\n          -119,\n          44,\n          -108,\n          66,\n          88,\n          -97,\n          31,\n          66,\n          -115,\n          -75,\n          109,\n          66,\n          -106,\n          -13,\n          116,\n          66,\n          -116,\n          0,\n          -48,\n          66,\n          -109,\n          -60,\n          79,\n          66,\n          94,\n          104,\n          100,\n          66,\n          97,\n          -26,\n          25,\n          66,\n          -103,\n          62,\n          79,\n          66,\n          -102,\n          -66,\n          68,\n          66,\n          -127,\n          -12,\n          85,\n          66,\n          -99,\n          33,\n          126,\n          66,\n          80,\n          -104,\n          58,\n          66,\n          72,\n          -38,\n          122,\n          66,\n          -113,\n          58,\n          -91,\n          66,\n          -103,\n          -23,\n          -31,\n          66,\n          91,\n          90,\n          105,\n          66,\n          77,\n          -15,\n          53,\n          66,\n          -106,\n          -82,\n          92,\n          66,\n          -115,\n          64,\n          102,\n          66,\n          77,\n          -19,\n          -79,\n          66,\n          -63,\n          -7,\n          -53,\n          66,\n          83,\n          -95,\n          97,\n          66,\n          -126,\n          -43,\n          80,\n          66,\n          -106,\n          48,\n          103,\n          66,\n          -60,\n          -93,\n          42,\n          66,\n          -74,\n          31,\n          -6,\n          66,\n          -67,\n          83,\n          -41,\n          66,\n          88,\n          -26,\n          -30,\n          66,\n          -99,\n          -45,\n          21,\n          66,\n          -115,\n          66,\n          -80,\n          66,\n          -124,\n          -107,\n          -100,\n          66,\n          105,\n          -53,\n          121,\n          66,\n          -62,\n          -97,\n          58,\n          66,\n          -59,\n          -101,\n          -3,\n          66,\n          -115,\n          46,\n          -22,\n          66,\n          90,\n          112,\n          46,\n          66,\n          68,\n          23,\n          -1,\n          66,\n          -78,\n          29,\n          -72,\n          66,\n          -61,\n          -17,\n          -63,\n          66,\n          -87,\n          -9,\n          65,\n          66,\n          109,\n          125,\n          -62,\n          66,\n          93,\n          -101,\n          83,\n          66,\n          -68,\n          -101,\n          -21,\n          66,\n          71,\n          -128,\n          -116,\n          66,\n          80,\n          -122,\n          115,\n          66,\n          -112,\n          103,\n          -65,\n          66,\n          90,\n          122,\n          -36,\n          66,\n          -125,\n          -5,\n          70,\n          66,\n          82,\n          -77,\n          34,\n          66,\n          -120,\n          75,\n          -69,\n          66,\n          -92,\n          77,\n          125,\n          66,\n          -107,\n          2,\n          100,\n          66,\n          -66,\n          116,\n          93,\n          66,\n          -94,\n          97,\n          -79,\n          66,\n          -97,\n          27,\n          -66,\n          66,\n          -61,\n          60,\n          -59,\n          66,\n          88,\n          -58,\n          -1,\n          66,\n          -106,\n          17,\n          -88,\n          66,\n          -116,\n          -89,\n          -46,\n          66,\n          82,\n          -21,\n          -71,\n          66,\n          81,\n          -97,\n          116,\n          66,\n          -108,\n          2,\n          -89,\n          66,\n          -116,\n          31,\n          -11,\n          66,\n          104,\n          -36,\n          39,\n          66,\n          -118,\n          87,\n          113,\n          66,\n          -110,\n          20,\n          -25,\n          66,\n          -116,\n          68,\n          24,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 234,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1160640629,\n          772987820,\n          969321077,\n          1013900867,\n          640140485,\n          1140824237,\n          1161729995,\n          753376669,\n          1099489100,\n          1157272180,\n          581216848,\n          600348886,\n          365,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          990067936,\n          731616341,\n          1013274026,\n          755154808,\n          645672113,\n          1147381025,\n          774818054,\n          1116902020,\n          1160585132,\n          769261171,\n          600291391,\n          624767948,\n          392,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 20,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 20,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7479358087348062431,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          888224293,\n          76652475,\n          182159171,\n          637983398,\n          773400121,\n          976854783,\n          1067228629,\n          892939501,\n          526060583,\n          599221425,\n          49191529,\n          853784681,\n          787926090,\n          750427586,\n          191829673,\n          504980517,\n          752175741,\n          228897834,\n          745219377,\n          366664030,\n          921802310,\n          233686191,\n          321347507,\n          606661993,\n          499103098,\n          535136857,\n          802225269,\n          451631786,\n          1012501174,\n          98741855,\n          232303321,\n          515874173,\n          733280861,\n          265989738,\n          1043498569,\n          191170399,\n          870250978,\n          753610413,\n          2599,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          67,\n          -85,\n          37,\n          63,\n          67,\n          -35,\n          121,\n          126,\n          68,\n          -32,\n          -78,\n          127,\n          69,\n          72,\n          32,\n          1,\n          66,\n          -125,\n          -126,\n          -16,\n          66,\n          -66,\n          11,\n          -35,\n          66,\n          101,\n          8,\n          -127,\n          66,\n          83,\n          19,\n          83,\n          66,\n          66,\n          126,\n          -69,\n          66,\n          82,\n          88,\n          -104,\n          66,\n          -102,\n          60,\n          99,\n          66,\n          -84,\n          -1,\n          66,\n          66,\n          82,\n          121,\n          108,\n          66,\n          -73,\n          11,\n          -64,\n          66,\n          -122,\n          124,\n          114,\n          65,\n          78,\n          12,\n          -39,\n          66,\n          -120,\n          -123,\n          51,\n          66,\n          -110,\n          104,\n          -57,\n          66,\n          -81,\n          -4,\n          -97,\n          66,\n          -107,\n          -119,\n          -75,\n          66,\n          -74,\n          66,\n          -59,\n          66,\n          49,\n          -105,\n          45,\n          62,\n          120,\n          -8,\n          98,\n          65,\n          -75,\n          -13,\n          86,\n          66,\n          -92,\n          68,\n          -71,\n          66,\n          -97,\n          63,\n          -127,\n          66,\n          -110,\n          44,\n          -119,\n          66,\n          -66,\n          -68,\n          103,\n          66,\n          -94,\n          69,\n          35,\n          66,\n          111,\n          38,\n          102,\n          66,\n          -111,\n          -26,\n          100,\n          66,\n          81,\n          -65,\n          78,\n          66,\n          69,\n          -14,\n          41,\n          66,\n          47,\n          -83,\n          -67,\n          66,\n          86,\n          -86,\n          -93,\n          66,\n          -91,\n          -126,\n          -123,\n          66,\n          -128,\n          -90,\n          48,\n          66,\n          -83,\n          98,\n          48,\n          66,\n          84,\n          -68,\n          88,\n          66,\n          -92,\n          52,\n          79,\n          66,\n          -108,\n          -14,\n          52,\n          66,\n          114,\n          -122,\n          -109,\n          66,\n          -83,\n          7,\n          41,\n          66,\n          84,\n          -68,\n          30,\n          66,\n          103,\n          52,\n          75,\n          66,\n          -101,\n          37,\n          -31,\n          66,\n          81,\n          64,\n          -18,\n          66,\n          -78,\n          -120,\n          15,\n          66,\n          73,\n          97,\n          119,\n          66,\n          -105,\n          -124,\n          77,\n          64,\n          -33,\n          -117,\n          -44,\n          66,\n          -71,\n          -73,\n          95,\n          66,\n          -121,\n          -72,\n          36,\n          66,\n          92,\n          121,\n          54,\n          66,\n          -108,\n          -30,\n          7,\n          66,\n          -93,\n          33,\n          11,\n          66,\n          -115,\n          -44,\n          100,\n          66,\n          -67,\n          36,\n          -106,\n          66,\n          99,\n          -79,\n          56,\n          66,\n          -122,\n          87,\n          92,\n          66,\n          -119,\n          5,\n          -13,\n          66,\n          72,\n          14,\n          12,\n          66,\n          -74,\n          58,\n          84,\n          66,\n          -125,\n          -51,\n          -6,\n          66,\n          -68,\n          -104,\n          39,\n          66,\n          -122,\n          -60,\n          -111,\n          66,\n          -94,\n          102,\n          72,\n          66,\n          95,\n          -41,\n          -37,\n          66,\n          -69,\n          120,\n          -57,\n          66,\n          106,\n          -61,\n          -44,\n          66,\n          -74,\n          56,\n          62,\n          66,\n          -122,\n          120,\n          119,\n          66,\n          -74,\n          -39,\n          -80,\n          66,\n          -86,\n          -75,\n          -83,\n          66,\n          110,\n          76,\n          -121,\n          66,\n          -109,\n          25,\n          98,\n          65,\n          15,\n          -3,\n          43,\n          66,\n          97,\n          -25,\n          113,\n          66,\n          -64,\n          -55,\n          -112,\n          66,\n          -78,\n          -122,\n          -23,\n          66,\n          80,\n          -56,\n          -93,\n          66,\n          -109,\n          20,\n          40,\n          66,\n          87,\n          14,\n          -9,\n          66,\n          -64,\n          -13,\n          16,\n          66,\n          -111,\n          -126,\n          -16,\n          66,\n          -103,\n          21,\n          -54,\n          66,\n          -125,\n          -84,\n          -77,\n          66,\n          -71,\n          -69,\n          65,\n          66,\n          -67,\n          60,\n          -128,\n          66,\n          -103,\n          78,\n          121,\n          66,\n          -110,\n          113,\n          17,\n          66,\n          -117,\n          -35,\n          -10,\n          66,\n          -98,\n          62,\n          -54,\n          66,\n          -62,\n          126,\n          31,\n          66,\n          -98,\n          118,\n          -27,\n          66,\n          76,\n          37,\n          -8,\n          66,\n          -107,\n          -44,\n          -45,\n          66,\n          101,\n          -63,\n          122,\n          66,\n          -59,\n          -58,\n          29,\n          66,\n          -74,\n          6,\n          52,\n          66,\n          -121,\n          -71,\n          -70,\n          66,\n          -64,\n          -50,\n          -6,\n          66,\n          -62,\n          -67,\n          -39,\n          66,\n          -105,\n          -123,\n          -119,\n          66,\n          -100,\n          -105,\n          -16,\n          66,\n          -115,\n          126,\n          -47,\n          66,\n          -89,\n          39,\n          5,\n          66,\n          -90,\n          -10,\n          -54,\n          66,\n          -111,\n          33,\n          68,\n          66,\n          -102,\n          -4,\n          -58,\n          66,\n          -98,\n          -48,\n          -30,\n          66,\n          -71,\n          -96,\n          -71,\n          66,\n          -75,\n          -30,\n          -69,\n          66,\n          -62,\n          25,\n          52,\n          66,\n          -70,\n          78,\n          -102,\n          66,\n          -96,\n          80,\n          -46,\n          66,\n          -69,\n          -1,\n          34,\n          66,\n          -115,\n          -115,\n          40,\n          66,\n          -97,\n          -109,\n          -87,\n          66,\n          -66,\n          -33,\n          -85,\n          66,\n          -67,\n          45,\n          19,\n          66,\n          -72,\n          -6,\n          46,\n          66,\n          -113,\n          -6,\n          38,\n          66,\n          96,\n          -31,\n          -41,\n          66,\n          118,\n          32,\n          53,\n          66,\n          92,\n          52,\n          48,\n          66,\n          85,\n          -113,\n          96,\n          66,\n          -117,\n          -65,\n          13,\n          66,\n          -94,\n          3,\n          -86,\n          66,\n          93,\n          -45,\n          86,\n          66,\n          -72,\n          124,\n          45,\n          66,\n          -67,\n          -92,\n          57,\n          66,\n          81,\n          -91,\n          57,\n          66,\n          -109,\n          -69,\n          112,\n          66,\n          78,\n          -45,\n          -87,\n          66,\n          -65,\n          22,\n          -61,\n          66,\n          -74,\n          -77,\n          -43,\n          66,\n          -104,\n          93,\n          60,\n          66,\n          -97,\n          -93,\n          -113,\n          66,\n          90,\n          47,\n          44,\n          66,\n          -121,\n          126,\n          -85,\n          66,\n          -106,\n          74,\n          -115,\n          66,\n          -85,\n          -20,\n          60,\n          66,\n          -64,\n          -126,\n          -91,\n          66,\n          -62,\n          20,\n          3,\n          66,\n          73,\n          -17,\n          122,\n          66,\n          -114,\n          -113,\n          -29,\n          66,\n          91,\n          39,\n          -52,\n          66,\n          75,\n          77,\n          47,\n          66,\n          -60,\n          69,\n          34,\n          66,\n          111,\n          85,\n          -41,\n          66,\n          -60,\n          -44,\n          -18,\n          66,\n          -80,\n          -88,\n          -94,\n          66,\n          77,\n          -4,\n          -35,\n          66,\n          -79,\n          -93,\n          -81,\n          66,\n          108,\n          85,\n          -43,\n          66,\n          -95,\n          -62,\n          107,\n          66,\n          77,\n          -8,\n          42,\n          66,\n          87,\n          92,\n          88,\n          66,\n          -106,\n          -38,\n          43,\n          66,\n          -114,\n          -20,\n          10,\n          66,\n          69,\n          100,\n          -128,\n          66,\n          -61,\n          38,\n          -102,\n          66,\n          -108,\n          63,\n          -72,\n          66,\n          -63,\n          53,\n          87,\n          66,\n          -72,\n          93,\n          -125,\n          66,\n          -67,\n          99,\n          2,\n          66,\n          -112,\n          -80,\n          29,\n          66,\n          -63,\n          -87,\n          -45,\n          66,\n          -111,\n          102,\n          -78,\n          66,\n          -68,\n          18,\n          -98,\n          66,\n          -128,\n          64,\n          91,\n          66,\n          -93,\n          -74,\n          48,\n          66,\n          -89,\n          107,\n          45,\n          66,\n          86,\n          -97,\n          121,\n          66,\n          -68,\n          92,\n          -67,\n          66,\n          77,\n          -120,\n          -70,\n          66,\n          -107,\n          -32,\n          27,\n          66,\n          -80,\n          -125,\n          -51,\n          66,\n          -61,\n          119,\n          106,\n          66,\n          -107,\n          98,\n          19,\n          66,\n          -60,\n          -3,\n          -116,\n          66,\n          -73,\n          3,\n          62,\n          66,\n          -110,\n          -91,\n          -87,\n          66,\n          -98,\n          43,\n          -74,\n          66,\n          -62,\n          -17,\n          79,\n          66,\n          -119,\n          63,\n          -28,\n          66,\n          101,\n          104,\n          -81,\n          66,\n          84,\n          -30,\n          64,\n          66,\n          74,\n          -32,\n          -75,\n          66,\n          97,\n          -14,\n          37,\n          66,\n          93,\n          97,\n          -1,\n          66,\n          -103,\n          -58,\n          109,\n          66,\n          -67,\n          -76,\n          -103,\n          66,\n          -66,\n          84,\n          -46,\n          66,\n          -117,\n          -75,\n          72,\n          66,\n          98,\n          58,\n          -56,\n          66,\n          -111,\n          73,\n          -120,\n          66,\n          -88,\n          -49,\n          70,\n          66,\n          106,\n          30,\n          95,\n          66,\n          125,\n          -8,\n          -54,\n          66,\n          -110,\n          69,\n          -68,\n          66,\n          -102,\n          -54,\n          92,\n          66,\n          74,\n          -46,\n          -91,\n          66,\n          -110,\n          -121,\n          -71,\n          66,\n          -67,\n          -98,\n          -72,\n          66,\n          -103,\n          -15,\n          92,\n          66,\n          -102,\n          33,\n          -34,\n          66,\n          79,\n          126,\n          88,\n          66,\n          87,\n          -20,\n          -60,\n          66,\n          89,\n          12,\n          -12,\n          66,\n          -73,\n          72,\n          46,\n          66,\n          -108,\n          97,\n          66,\n          66,\n          -68,\n          0,\n          110,\n          66,\n          -72,\n          -21,\n          18,\n          66,\n          -96,\n          -18,\n          -50,\n          66,\n          -66,\n          124,\n          88,\n          66,\n          72,\n          -13,\n          -118,\n          66,\n          -63,\n          27,\n          -106,\n          66,\n          -101,\n          64,\n          -115,\n          66,\n          -115,\n          65,\n          14,\n          66,\n          -106,\n          -57,\n          55,\n          66,\n          -117,\n          -60,\n          -61,\n          66,\n          -87,\n          -79,\n          -57,\n          66,\n          106,\n          -122,\n          122,\n          66,\n          -71,\n          46,\n          51,\n          66,\n          -93,\n          -35,\n          117,\n          66,\n          -59,\n          26,\n          -58,\n          66,\n          90,\n          2,\n          43,\n          66,\n          -116,\n          -58,\n          102,\n          66,\n          -64,\n          -19,\n          76,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 231,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          773178857,\n          1012386262,\n          1145699305,\n          630754096,\n          645638578,\n          1147351603,\n          1016645539,\n          973599872,\n          1104069010,\n          626487287,\n          767747537,\n          581688418,\n          13,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1160588371,\n          1160658367,\n          1016915542,\n          645169343,\n          639322541,\n          760462628,\n          1018181065,\n          1104274363,\n          602565614,\n          624947605,\n          970342468,\n          725151335,\n          13,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 23,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 23,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -1946928779114242913,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          382683842,\n          35568365,\n          743685422,\n          909763832,\n          743648325,\n          313485543,\n          129614911,\n          903543093,\n          529629177,\n          111118042,\n          259431401,\n          110860469,\n          1063904305,\n          757033013,\n          795981997,\n          460770546,\n          215414982,\n          731319789,\n          190227902,\n          641649747,\n          475746101,\n          70375666,\n          179386167,\n          466999759,\n          1059399863,\n          593955141,\n          223647674,\n          1023085263,\n          743499999,\n          178998983,\n          658101574,\n          40729343,\n          1035685738,\n          452953662,\n          588859707,\n          911337771,\n          304384234,\n          800819963,\n          6227,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          116,\n          53,\n          -110,\n          69,\n          62,\n          -16,\n          117,\n          69,\n          123,\n          61,\n          -95,\n          69,\n          28,\n          62,\n          -26,\n          65,\n          19,\n          16,\n          -32,\n          66,\n          70,\n          98,\n          -29,\n          65,\n          -55,\n          -30,\n          -36,\n          66,\n          76,\n          -76,\n          -69,\n          66,\n          -99,\n          -116,\n          30,\n          66,\n          -100,\n          -69,\n          -78,\n          66,\n          49,\n          -51,\n          -98,\n          66,\n          -128,\n          65,\n          98,\n          66,\n          -64,\n          -76,\n          -98,\n          66,\n          79,\n          65,\n          -77,\n          66,\n          113,\n          -102,\n          -105,\n          65,\n          -31,\n          -92,\n          -103,\n          66,\n          75,\n          108,\n          -73,\n          66,\n          -64,\n          52,\n          -12,\n          64,\n          -54,\n          33,\n          -10,\n          66,\n          87,\n          -10,\n          -8,\n          66,\n          -105,\n          14,\n          20,\n          66,\n          115,\n          15,\n          106,\n          66,\n          91,\n          -3,\n          -87,\n          66,\n          97,\n          -20,\n          119,\n          66,\n          -107,\n          -126,\n          -61,\n          66,\n          -86,\n          28,\n          3,\n          66,\n          72,\n          -77,\n          -98,\n          66,\n          -90,\n          -70,\n          64,\n          66,\n          -102,\n          -67,\n          95,\n          66,\n          -62,\n          -104,\n          -68,\n          66,\n          81,\n          -88,\n          92,\n          66,\n          85,\n          70,\n          122,\n          66,\n          -59,\n          6,\n          17,\n          66,\n          -75,\n          115,\n          -111,\n          66,\n          -69,\n          -90,\n          -23,\n          66,\n          -90,\n          -3,\n          59,\n          66,\n          83,\n          -52,\n          38,\n          66,\n          -121,\n          -82,\n          90,\n          66,\n          -102,\n          -8,\n          40,\n          66,\n          113,\n          44,\n          -123,\n          66,\n          102,\n          -66,\n          -45,\n          66,\n          79,\n          37,\n          -116,\n          66,\n          -108,\n          21,\n          14,\n          66,\n          -111,\n          87,\n          47,\n          66,\n          -61,\n          22,\n          -118,\n          66,\n          -99,\n          93,\n          -127,\n          66,\n          -109,\n          -112,\n          9,\n          66,\n          -69,\n          16,\n          -83,\n          66,\n          -127,\n          122,\n          -48,\n          66,\n          81,\n          -78,\n          112,\n          66,\n          -118,\n          -13,\n          -59,\n          66,\n          -76,\n          42,\n          -73,\n          66,\n          -111,\n          -52,\n          -42,\n          66,\n          100,\n          106,\n          39,\n          66,\n          -79,\n          -105,\n          -79,\n          66,\n          -57,\n          106,\n          -92,\n          66,\n          -104,\n          81,\n          -117,\n          66,\n          78,\n          -25,\n          -88,\n          66,\n          -103,\n          117,\n          -113,\n          66,\n          -118,\n          -90,\n          122,\n          66,\n          -125,\n          47,\n          87,\n          66,\n          111,\n          -93,\n          51,\n          66,\n          -92,\n          -124,\n          57,\n          66,\n          -111,\n          6,\n          -104,\n          66,\n          -114,\n          89,\n          -53,\n          66,\n          102,\n          95,\n          -119,\n          66,\n          -109,\n          -85,\n          54,\n          66,\n          -107,\n          54,\n          16,\n          66,\n          -76,\n          -51,\n          74,\n          66,\n          99,\n          -91,\n          -11,\n          66,\n          -123,\n          -70,\n          -29,\n          66,\n          74,\n          80,\n          60,\n          66,\n          15,\n          -14,\n          -9,\n          66,\n          -92,\n          119,\n          -28,\n          66,\n          -117,\n          9,\n          -56,\n          66,\n          89,\n          94,\n          -83,\n          66,\n          -70,\n          90,\n          -95,\n          66,\n          -126,\n          -114,\n          -15,\n          66,\n          -85,\n          36,\n          -55,\n          66,\n          -97,\n          -21,\n          15,\n          66,\n          -63,\n          -111,\n          125,\n          66,\n          -61,\n          13,\n          7,\n          66,\n          -120,\n          -32,\n          36,\n          66,\n          -71,\n          68,\n          126,\n          66,\n          124,\n          -88,\n          -112,\n          66,\n          -105,\n          99,\n          -75,\n          66,\n          -63,\n          80,\n          102,\n          66,\n          -121,\n          -69,\n          -82,\n          66,\n          83,\n          -8,\n          32,\n          66,\n          97,\n          -23,\n          60,\n          66,\n          -68,\n          127,\n          -29,\n          66,\n          86,\n          81,\n          -58,\n          66,\n          115,\n          -54,\n          -34,\n          66,\n          -112,\n          -11,\n          -70,\n          66,\n          86,\n          -20,\n          -71,\n          66,\n          -123,\n          -106,\n          88,\n          66,\n          -78,\n          -96,\n          -20,\n          66,\n          -72,\n          112,\n          65,\n          66,\n          -90,\n          -60,\n          -119,\n          66,\n          -106,\n          -83,\n          10,\n          66,\n          -123,\n          -69,\n          4,\n          66,\n          -63,\n          -89,\n          46,\n          66,\n          -107,\n          36,\n          57,\n          66,\n          77,\n          96,\n          -46,\n          66,\n          127,\n          81,\n          -88,\n          66,\n          -80,\n          96,\n          -107,\n          66,\n          -121,\n          51,\n          112,\n          66,\n          -105,\n          5,\n          -7,\n          66,\n          -79,\n          -10,\n          37,\n          66,\n          -124,\n          -68,\n          -104,\n          66,\n          -104,\n          38,\n          -66,\n          66,\n          -119,\n          -16,\n          6,\n          66,\n          -98,\n          29,\n          79,\n          66,\n          119,\n          -116,\n          82,\n          66,\n          88,\n          -16,\n          57,\n          66,\n          -67,\n          -102,\n          85,\n          66,\n          86,\n          -6,\n          -41,\n          66,\n          -107,\n          54,\n          -91,\n          66,\n          84,\n          124,\n          -82,\n          66,\n          119,\n          115,\n          -79,\n          66,\n          -120,\n          -78,\n          -18,\n          66,\n          -110,\n          1,\n          117,\n          66,\n          87,\n          -27,\n          -5,\n          66,\n          -70,\n          117,\n          -91,\n          66,\n          -108,\n          -88,\n          -114,\n          66,\n          -61,\n          100,\n          -94,\n          66,\n          -65,\n          75,\n          66,\n          66,\n          121,\n          -19,\n          -78,\n          66,\n          -65,\n          85,\n          -105,\n          66,\n          77,\n          -21,\n          33,\n          66,\n          68,\n          -95,\n          -75,\n          66,\n          -77,\n          66,\n          60,\n          66,\n          105,\n          111,\n          -84,\n          66,\n          -102,\n          -10,\n          -11,\n          66,\n          -110,\n          -26,\n          45,\n          66,\n          -84,\n          33,\n          -44,\n          66,\n          110,\n          33,\n          -32,\n          66,\n          -119,\n          -46,\n          -2,\n          66,\n          84,\n          94,\n          -56,\n          66,\n          -69,\n          -96,\n          -126,\n          66,\n          -66,\n          -106,\n          -35,\n          66,\n          89,\n          -109,\n          107,\n          66,\n          -94,\n          -26,\n          62,\n          66,\n          -117,\n          -106,\n          -82,\n          66,\n          82,\n          91,\n          14,\n          66,\n          -102,\n          16,\n          -6,\n          66,\n          -60,\n          -81,\n          -109,\n          66,\n          -73,\n          35,\n          92,\n          66,\n          -74,\n          62,\n          -54,\n          66,\n          110,\n          117,\n          -5,\n          66,\n          -107,\n          86,\n          -80,\n          66,\n          -60,\n          11,\n          -112,\n          66,\n          -61,\n          -78,\n          120,\n          66,\n          -65,\n          78,\n          -43,\n          66,\n          -73,\n          -25,\n          -102,\n          66,\n          -126,\n          -81,\n          -73,\n          66,\n          -66,\n          -123,\n          -73,\n          66,\n          -123,\n          104,\n          -76,\n          66,\n          -121,\n          -94,\n          -89,\n          66,\n          -124,\n          74,\n          -99,\n          66,\n          -116,\n          -2,\n          -76,\n          66,\n          -68,\n          -2,\n          -89,\n          66,\n          75,\n          -120,\n          -116,\n          66,\n          -65,\n          -100,\n          -43,\n          66,\n          -61,\n          102,\n          86,\n          66,\n          -78,\n          68,\n          -98,\n          66,\n          72,\n          37,\n          30,\n          66,\n          -74,\n          -15,\n          -123,\n          66,\n          116,\n          7,\n          -82,\n          66,\n          -80,\n          -101,\n          6,\n          66,\n          -78,\n          -91,\n          -25,\n          66,\n          -114,\n          117,\n          -71,\n          66,\n          -94,\n          -109,\n          -32,\n          66,\n          -76,\n          125,\n          55,\n          66,\n          -127,\n          55,\n          -106,\n          66,\n          -85,\n          75,\n          32,\n          66,\n          74,\n          127,\n          -34,\n          66,\n          -86,\n          67,\n          -16,\n          66,\n          -61,\n          -43,\n          -27,\n          66,\n          -114,\n          35,\n          -85,\n          66,\n          -74,\n          70,\n          -101,\n          66,\n          -73,\n          -13,\n          -87,\n          66,\n          -108,\n          3,\n          -65,\n          66,\n          74,\n          -60,\n          53,\n          66,\n          104,\n          -108,\n          -13,\n          66,\n          103,\n          -39,\n          10,\n          66,\n          110,\n          58,\n          50,\n          66,\n          88,\n          108,\n          110,\n          66,\n          -64,\n          119,\n          93,\n          66,\n          -87,\n          104,\n          76,\n          66,\n          -81,\n          5,\n          94,\n          66,\n          -116,\n          92,\n          13,\n          66,\n          -67,\n          -25,\n          -1,\n          66,\n          90,\n          19,\n          -18,\n          66,\n          77,\n          39,\n          18,\n          66,\n          -83,\n          -33,\n          24,\n          66,\n          84,\n          -41,\n          -115,\n          66,\n          -90,\n          -78,\n          -30,\n          66,\n          -62,\n          74,\n          -31,\n          66,\n          -106,\n          -120,\n          90,\n          66,\n          -94,\n          -45,\n          -96,\n          66,\n          124,\n          22,\n          -8,\n          66,\n          93,\n          65,\n          13,\n          66,\n          -120,\n          57,\n          -103,\n          66,\n          88,\n          96,\n          -37,\n          66,\n          -100,\n          -23,\n          71,\n          66,\n          -70,\n          96,\n          75,\n          66,\n          -71,\n          83,\n          -109,\n          66,\n          -120,\n          -36,\n          -41,\n          66,\n          -108,\n          103,\n          19,\n          66,\n          88,\n          14,\n          -63,\n          66,\n          -119,\n          -114,\n          57,\n          66,\n          -64,\n          112,\n          41,\n          66,\n          88,\n          53,\n          119,\n          66,\n          -116,\n          -86,\n          12,\n          66,\n          84,\n          -53,\n          -9,\n          66,\n          -65,\n          44,\n          40,\n          66,\n          74,\n          -61,\n          71,\n          66,\n          -65,\n          50,\n          107,\n          66,\n          -109,\n          82,\n          114,\n          66,\n          -68,\n          45,\n          -110,\n          66,\n          -111,\n          -61,\n          -33,\n          66,\n          83,\n          -31,\n          124,\n          66,\n          77,\n          75,\n          122,\n          66,\n          -59,\n          38,\n          -13,\n          66,\n          82,\n          10,\n          127,\n          66,\n          102,\n          -47,\n          -61,\n          66,\n          69,\n          27,\n          66,\n          66,\n          -124,\n          -31,\n          17,\n          66,\n          -69,\n          -21,\n          40,\n          66,\n          -68,\n          -75,\n          -10,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 231,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1103205176,\n          1018712510,\n          1160588330,\n          1140797273,\n          759950828,\n          597274046,\n          1142932018,\n          1099482568,\n          626303302,\n          1157212808,\n          638762557,\n          631089349,\n          13,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          770037556,\n          989995012,\n          1027784929,\n          1159935863,\n          1116909284,\n          989477531,\n          1155822910,\n          600459560,\n          602053855,\n          985046548,\n          581842249,\n          755445598,\n          13,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 23,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 23,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -506526637525828854,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          576280926,\n          927163981,\n          605657545,\n          36542934,\n          979282387,\n          513316351,\n          75349067,\n          527426881,\n          594341451,\n          215076597,\n          44371942,\n          610711145,\n          722929218,\n          727435191,\n          250609621,\n          39364477,\n          670951999,\n          497525985,\n          791842027,\n          383748555,\n          74503847,\n          131771513,\n          195497686,\n          871192506,\n          471404001,\n          481101102,\n          43350835,\n          897546731,\n          241166303,\n          746235349,\n          753518170,\n          748063309,\n          110970575,\n          997932150,\n          387185630,\n          862779169,\n          181844805,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -39,\n          -78,\n          -59,\n          69,\n          39,\n          -89,\n          22,\n          68,\n          -109,\n          100,\n          125,\n          69,\n          104,\n          -38,\n          -124,\n          66,\n          60,\n          39,\n          -25,\n          66,\n          -97,\n          -128,\n          45,\n          66,\n          58,\n          -104,\n          27,\n          66,\n          -77,\n          -26,\n          49,\n          66,\n          -101,\n          -12,\n          -99,\n          68,\n          -122,\n          92,\n          68,\n          64,\n          -25,\n          -86,\n          -18,\n          64,\n          104,\n          47,\n          -36,\n          66,\n          4,\n          119,\n          -91,\n          66,\n          -59,\n          -105,\n          57,\n          66,\n          -58,\n          -17,\n          -11,\n          66,\n          1,\n          63,\n          -61,\n          66,\n          26,\n          120,\n          -45,\n          66,\n          -84,\n          -44,\n          -89,\n          66,\n          -63,\n          -77,\n          74,\n          66,\n          -92,\n          41,\n          -97,\n          66,\n          -67,\n          16,\n          -104,\n          66,\n          122,\n          93,\n          -5,\n          66,\n          -76,\n          64,\n          113,\n          66,\n          4,\n          28,\n          71,\n          66,\n          98,\n          65,\n          8,\n          66,\n          -96,\n          55,\n          104,\n          66,\n          -71,\n          100,\n          -87,\n          66,\n          -112,\n          17,\n          -11,\n          66,\n          -100,\n          -101,\n          109,\n          66,\n          126,\n          -119,\n          76,\n          66,\n          76,\n          -81,\n          -1,\n          66,\n          78,\n          110,\n          -35,\n          66,\n          -107,\n          -117,\n          74,\n          66,\n          -125,\n          -106,\n          -79,\n          66,\n          -104,\n          14,\n          -44,\n          66,\n          110,\n          -99,\n          -47,\n          66,\n          96,\n          -89,\n          -11,\n          66,\n          -62,\n          54,\n          -66,\n          66,\n          73,\n          0,\n          -93,\n          66,\n          83,\n          106,\n          -57,\n          66,\n          91,\n          42,\n          -85,\n          66,\n          -74,\n          -9,\n          127,\n          66,\n          -97,\n          -81,\n          29,\n          66,\n          -82,\n          -106,\n          15,\n          66,\n          -101,\n          -22,\n          74,\n          66,\n          93,\n          9,\n          -54,\n          66,\n          -62,\n          120,\n          114,\n          66,\n          117,\n          -108,\n          -128,\n          66,\n          -123,\n          99,\n          -56,\n          66,\n          -62,\n          47,\n          -86,\n          66,\n          -108,\n          -124,\n          -121,\n          66,\n          -106,\n          -80,\n          78,\n          66,\n          -60,\n          88,\n          -56,\n          66,\n          -108,\n          -73,\n          -30,\n          66,\n          -100,\n          54,\n          -37,\n          66,\n          99,\n          -107,\n          -76,\n          66,\n          108,\n          -109,\n          -21,\n          66,\n          113,\n          -128,\n          80,\n          66,\n          -110,\n          101,\n          14,\n          66,\n          -67,\n          -108,\n          124,\n          66,\n          -78,\n          -11,\n          -88,\n          66,\n          125,\n          85,\n          -95,\n          66,\n          84,\n          36,\n          87,\n          66,\n          -72,\n          111,\n          -79,\n          66,\n          -71,\n          -91,\n          21,\n          66,\n          -99,\n          -86,\n          -2,\n          66,\n          -97,\n          -13,\n          -110,\n          66,\n          73,\n          28,\n          -78,\n          66,\n          -88,\n          101,\n          34,\n          66,\n          -117,\n          40,\n          39,\n          66,\n          -65,\n          -115,\n          106,\n          66,\n          -81,\n          -13,\n          65,\n          66,\n          -60,\n          125,\n          -118,\n          66,\n          -79,\n          57,\n          71,\n          66,\n          -107,\n          41,\n          111,\n          66,\n          -65,\n          18,\n          -60,\n          66,\n          -106,\n          95,\n          102,\n          66,\n          -100,\n          119,\n          25,\n          66,\n          81,\n          -41,\n          -45,\n          66,\n          -120,\n          -44,\n          -2,\n          66,\n          -122,\n          -94,\n          87,\n          66,\n          101,\n          86,\n          1,\n          66,\n          -103,\n          72,\n          -66,\n          66,\n          -98,\n          100,\n          -64,\n          66,\n          -95,\n          -14,\n          41,\n          66,\n          -65,\n          100,\n          4,\n          66,\n          87,\n          60,\n          100,\n          66,\n          107,\n          -99,\n          -128,\n          66,\n          -91,\n          55,\n          -38,\n          66,\n          98,\n          -48,\n          -33,\n          66,\n          -117,\n          -98,\n          104,\n          66,\n          108,\n          55,\n          121,\n          66,\n          -118,\n          -73,\n          -13,\n          66,\n          -111,\n          95,\n          19,\n          66,\n          -120,\n          -65,\n          -88,\n          66,\n          -108,\n          -91,\n          110,\n          66,\n          104,\n          -61,\n          11,\n          66,\n          -90,\n          -68,\n          71,\n          66,\n          -61,\n          -66,\n          21,\n          66,\n          84,\n          27,\n          -48,\n          66,\n          113,\n          -5,\n          -29,\n          66,\n          68,\n          45,\n          1,\n          66,\n          -103,\n          37,\n          -34,\n          66,\n          70,\n          42,\n          -63,\n          66,\n          -117,\n          -59,\n          -38,\n          66,\n          79,\n          88,\n          -94,\n          66,\n          -62,\n          119,\n          -44,\n          66,\n          -73,\n          8,\n          9,\n          66,\n          80,\n          69,\n          36,\n          66,\n          127,\n          15,\n          34,\n          66,\n          100,\n          -6,\n          -73,\n          66,\n          -123,\n          -93,\n          -19,\n          66,\n          79,\n          -26,\n          -57,\n          66,\n          108,\n          54,\n          -105,\n          66,\n          101,\n          -65,\n          -22,\n          66,\n          -64,\n          -4,\n          34,\n          66,\n          -64,\n          -105,\n          -101,\n          66,\n          123,\n          114,\n          46,\n          66,\n          -122,\n          -50,\n          -125,\n          66,\n          76,\n          34,\n          -116,\n          66,\n          77,\n          -71,\n          -2,\n          66,\n          -104,\n          104,\n          -79,\n          66,\n          -96,\n          101,\n          -120,\n          66,\n          -119,\n          112,\n          -16,\n          66,\n          115,\n          -96,\n          56,\n          66,\n          -70,\n          68,\n          90,\n          66,\n          -98,\n          -95,\n          -20,\n          66,\n          78,\n          13,\n          -120,\n          66,\n          81,\n          72,\n          74,\n          66,\n          -106,\n          -5,\n          45,\n          66,\n          -111,\n          -95,\n          -10,\n          66,\n          98,\n          15,\n          -72,\n          66,\n          -71,\n          -121,\n          92,\n          66,\n          -65,\n          -58,\n          63,\n          66,\n          104,\n          15,\n          67,\n          66,\n          -72,\n          -112,\n          47,\n          66,\n          -89,\n          -1,\n          -36,\n          66,\n          -113,\n          99,\n          -19,\n          66,\n          -69,\n          103,\n          -2,\n          66,\n          -125,\n          42,\n          118,\n          66,\n          -117,\n          93,\n          -128,\n          66,\n          -62,\n          -21,\n          -67,\n          66,\n          -62,\n          -74,\n          -93,\n          66,\n          -94,\n          78,\n          82,\n          66,\n          -105,\n          108,\n          -122,\n          66,\n          106,\n          -77,\n          40,\n          66,\n          72,\n          -102,\n          47,\n          66,\n          -73,\n          -36,\n          87,\n          66,\n          -117,\n          -16,\n          86,\n          66,\n          -61,\n          90,\n          -1,\n          66,\n          -64,\n          103,\n          49,\n          66,\n          -116,\n          -105,\n          77,\n          66,\n          -122,\n          -58,\n          78,\n          66,\n          -70,\n          16,\n          44,\n          66,\n          -73,\n          18,\n          -53,\n          66,\n          -67,\n          68,\n          -27,\n          66,\n          86,\n          14,\n          29,\n          66,\n          -107,\n          -96,\n          8,\n          66,\n          -73,\n          -23,\n          52,\n          66,\n          -64,\n          85,\n          88,\n          66,\n          -113,\n          26,\n          -126,\n          66,\n          -111,\n          21,\n          88,\n          66,\n          88,\n          -126,\n          8,\n          66,\n          100,\n          -40,\n          -103,\n          66,\n          -69,\n          62,\n          -13,\n          66,\n          -72,\n          79,\n          95,\n          66,\n          103,\n          -56,\n          -51,\n          66,\n          -80,\n          -58,\n          -34,\n          66,\n          97,\n          -54,\n          46,\n          66,\n          -61,\n          -1,\n          73,\n          66,\n          -109,\n          121,\n          27,\n          66,\n          95,\n          -74,\n          -20,\n          66,\n          -117,\n          -66,\n          27,\n          66,\n          69,\n          -35,\n          -49,\n          66,\n          -110,\n          -86,\n          89,\n          66,\n          -77,\n          17,\n          -21,\n          66,\n          -121,\n          -7,\n          120,\n          66,\n          -118,\n          40,\n          -84,\n          66,\n          94,\n          104,\n          43,\n          66,\n          -72,\n          -66,\n          52,\n          66,\n          -63,\n          -67,\n          50,\n          66,\n          -67,\n          -5,\n          -30,\n          66,\n          -118,\n          108,\n          -119,\n          66,\n          104,\n          100,\n          21,\n          66,\n          -72,\n          17,\n          -91,\n          66,\n          -77,\n          -107,\n          110,\n          66,\n          -94,\n          72,\n          101,\n          66,\n          -66,\n          123,\n          -10,\n          66,\n          -67,\n          11,\n          -111,\n          66,\n          -112,\n          13,\n          18,\n          66,\n          -117,\n          75,\n          73,\n          66,\n          -72,\n          -94,\n          -118,\n          66,\n          85,\n          39,\n          -124,\n          66,\n          -62,\n          -41,\n          12,\n          66,\n          -122,\n          101,\n          83,\n          66,\n          -65,\n          -51,\n          -89,\n          66,\n          -100,\n          -38,\n          -27,\n          66,\n          125,\n          33,\n          47,\n          66,\n          -66,\n          10,\n          -126,\n          66,\n          84,\n          31,\n          88,\n          66,\n          -125,\n          127,\n          -101,\n          66,\n          -63,\n          16,\n          85,\n          66,\n          74,\n          -101,\n          -63,\n          66,\n          -109,\n          -126,\n          91,\n          66,\n          -64,\n          -109,\n          -111,\n          66,\n          -77,\n          -98,\n          -25,\n          66,\n          -64,\n          3,\n          -5,\n          66,\n          87,\n          23,\n          107,\n          66,\n          -99,\n          -51,\n          101,\n          66,\n          87,\n          17,\n          123,\n          66,\n          -124,\n          -75,\n          -78,\n          66,\n          -123,\n          -72,\n          -34,\n          66,\n          -107,\n          66,\n          -13,\n          66,\n          -86,\n          -121,\n          -5,\n          66,\n          -62,\n          -114,\n          -2,\n          66,\n          -114,\n          -5,\n          83,\n          66,\n          -103,\n          77,\n          124,\n          66,\n          -58,\n          22,\n          116,\n          66,\n          -66,\n          -103,\n          32,\n          66,\n          -97,\n          -50,\n          81,\n          66,\n          -118,\n          -94,\n          -84,\n          66,\n          -121,\n          95,\n          -72,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 222,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1018055060,\n          1146258376,\n          1143063005,\n          773243645,\n          624258709,\n          985054328,\n          774052592,\n          1155856840,\n          629206420,\n          626507348,\n          624794434,\n          819031,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1033033540,\n          717445259,\n          1156704290,\n          1162081312,\n          1146137927,\n          1013745580,\n          1140760097,\n          1018568642,\n          968619758,\n          586117183,\n          581682838,\n          994003,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 32,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 32,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -26643182478891948,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          52102590,\n          803441321,\n          845014002,\n          402434489,\n          452533423,\n          857576527,\n          1038034607,\n          905684145,\n          488373930,\n          214396250,\n          355904417,\n          530702145,\n          1046798206,\n          332791595,\n          504888034,\n          777824675,\n          892833502,\n          861728509,\n          660034926,\n          70744530,\n          501807487,\n          761722539,\n          519539025,\n          182281293,\n          353970593,\n          187506475,\n          578410162,\n          1050517306,\n          729400670,\n          325250227,\n          714712277,\n          1039845225,\n          590912757,\n          313752750,\n          848418633,\n          775234665,\n          907772981,\n          651257981,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -18,\n          62,\n          -6,\n          67,\n          -19,\n          -107,\n          114,\n          66,\n          60,\n          -76,\n          113,\n          66,\n          -78,\n          -82,\n          -78,\n          66,\n          2,\n          57,\n          -79,\n          65,\n          -30,\n          -89,\n          -101,\n          65,\n          -108,\n          87,\n          79,\n          66,\n          -128,\n          -29,\n          -89,\n          66,\n          -74,\n          5,\n          -111,\n          65,\n          118,\n          -110,\n          62,\n          66,\n          -85,\n          -128,\n          -33,\n          65,\n          -121,\n          120,\n          29,\n          66,\n          -75,\n          118,\n          68,\n          66,\n          114,\n          -108,\n          -46,\n          63,\n          -27,\n          -25,\n          62,\n          66,\n          106,\n          -111,\n          30,\n          66,\n          -112,\n          101,\n          -86,\n          66,\n          -105,\n          -30,\n          127,\n          66,\n          -113,\n          -55,\n          -35,\n          66,\n          -93,\n          -101,\n          49,\n          66,\n          -71,\n          94,\n          -56,\n          66,\n          124,\n          -21,\n          106,\n          66,\n          115,\n          63,\n          -121,\n          66,\n          115,\n          75,\n          81,\n          66,\n          121,\n          4,\n          45,\n          66,\n          -113,\n          107,\n          -73,\n          66,\n          91,\n          36,\n          -32,\n          66,\n          -81,\n          14,\n          -6,\n          66,\n          86,\n          29,\n          -93,\n          66,\n          -113,\n          -90,\n          67,\n          66,\n          99,\n          -20,\n          31,\n          66,\n          -58,\n          -74,\n          -105,\n          66,\n          -122,\n          23,\n          44,\n          66,\n          88,\n          -99,\n          4,\n          66,\n          -125,\n          -68,\n          14,\n          66,\n          -107,\n          -60,\n          -32,\n          66,\n          109,\n          120,\n          22,\n          66,\n          -99,\n          -67,\n          -5,\n          66,\n          -113,\n          -21,\n          113,\n          66,\n          -60,\n          122,\n          -107,\n          66,\n          -112,\n          -37,\n          8,\n          66,\n          -65,\n          -102,\n          42,\n          66,\n          -96,\n          48,\n          54,\n          66,\n          -126,\n          4,\n          66,\n          66,\n          -102,\n          80,\n          61,\n          66,\n          108,\n          56,\n          -34,\n          66,\n          69,\n          -17,\n          -117,\n          66,\n          -67,\n          -38,\n          -21,\n          66,\n          -67,\n          -68,\n          -29,\n          66,\n          -112,\n          51,\n          70,\n          66,\n          -120,\n          -81,\n          -38,\n          66,\n          97,\n          -115,\n          -57,\n          66,\n          -113,\n          -71,\n          -47,\n          66,\n          -69,\n          74,\n          -76,\n          66,\n          -94,\n          -87,\n          -124,\n          66,\n          -75,\n          -64,\n          75,\n          65,\n          -73,\n          112,\n          16,\n          66,\n          -60,\n          56,\n          40,\n          63,\n          -18,\n          63,\n          74,\n          66,\n          -80,\n          -73,\n          -40,\n          66,\n          -115,\n          -75,\n          25,\n          66,\n          -117,\n          63,\n          -94,\n          66,\n          -78,\n          22,\n          -32,\n          66,\n          -90,\n          66,\n          67,\n          66,\n          108,\n          -60,\n          2,\n          66,\n          -67,\n          -102,\n          -117,\n          66,\n          -113,\n          14,\n          112,\n          66,\n          -76,\n          94,\n          -118,\n          66,\n          125,\n          22,\n          74,\n          66,\n          96,\n          -28,\n          70,\n          66,\n          -72,\n          0,\n          -113,\n          66,\n          94,\n          95,\n          31,\n          66,\n          -63,\n          107,\n          -9,\n          66,\n          79,\n          -45,\n          -52,\n          66,\n          73,\n          4,\n          121,\n          66,\n          -103,\n          84,\n          -107,\n          66,\n          -74,\n          -46,\n          39,\n          66,\n          75,\n          48,\n          27,\n          66,\n          73,\n          -48,\n          19,\n          66,\n          -124,\n          -82,\n          70,\n          66,\n          102,\n          81,\n          -41,\n          66,\n          104,\n          93,\n          39,\n          66,\n          -116,\n          6,\n          -18,\n          66,\n          -117,\n          52,\n          -9,\n          66,\n          -89,\n          -81,\n          36,\n          66,\n          95,\n          43,\n          -49,\n          66,\n          -75,\n          124,\n          6,\n          66,\n          107,\n          -62,\n          84,\n          66,\n          -115,\n          -109,\n          1,\n          66,\n          91,\n          16,\n          97,\n          66,\n          -124,\n          82,\n          -26,\n          66,\n          -121,\n          -15,\n          -122,\n          66,\n          -71,\n          47,\n          89,\n          66,\n          -107,\n          61,\n          -127,\n          66,\n          -121,\n          -81,\n          17,\n          66,\n          107,\n          59,\n          -105,\n          66,\n          -77,\n          -62,\n          -112,\n          66,\n          -65,\n          -40,\n          14,\n          66,\n          86,\n          83,\n          -46,\n          66,\n          80,\n          -35,\n          40,\n          66,\n          37,\n          -87,\n          61,\n          66,\n          -77,\n          -121,\n          31,\n          66,\n          -98,\n          -7,\n          15,\n          66,\n          -128,\n          47,\n          -44,\n          66,\n          78,\n          9,\n          32,\n          66,\n          -98,\n          -121,\n          -37,\n          66,\n          -89,\n          -121,\n          -75,\n          66,\n          -99,\n          -52,\n          121,\n          66,\n          -62,\n          10,\n          124,\n          66,\n          -128,\n          -64,\n          -24,\n          66,\n          -108,\n          12,\n          101,\n          66,\n          -72,\n          -92,\n          54,\n          66,\n          -116,\n          18,\n          -12,\n          66,\n          86,\n          23,\n          3,\n          66,\n          -71,\n          67,\n          -92,\n          66,\n          -75,\n          124,\n          -82,\n          66,\n          -64,\n          -110,\n          -69,\n          66,\n          -64,\n          -66,\n          -83,\n          66,\n          91,\n          44,\n          24,\n          66,\n          -92,\n          105,\n          -114,\n          66,\n          125,\n          -127,\n          -55,\n          66,\n          91,\n          24,\n          -62,\n          66,\n          -73,\n          -24,\n          -125,\n          66,\n          -100,\n          -55,\n          41,\n          66,\n          -71,\n          73,\n          107,\n          66,\n          -61,\n          77,\n          -24,\n          66,\n          86,\n          63,\n          -67,\n          66,\n          -103,\n          37,\n          16,\n          66,\n          -118,\n          -109,\n          44,\n          66,\n          -105,\n          -9,\n          -33,\n          66,\n          -79,\n          48,\n          -19,\n          66,\n          -74,\n          97,\n          9,\n          66,\n          -96,\n          73,\n          -69,\n          66,\n          -59,\n          65,\n          120,\n          66,\n          -88,\n          103,\n          -36,\n          66,\n          88,\n          -30,\n          -115,\n          66,\n          82,\n          -114,\n          29,\n          66,\n          104,\n          -37,\n          -89,\n          66,\n          -99,\n          -41,\n          1,\n          66,\n          -77,\n          -16,\n          -48,\n          66,\n          -99,\n          24,\n          -117,\n          66,\n          -59,\n          -32,\n          -9,\n          66,\n          -117,\n          62,\n          83,\n          66,\n          -99,\n          43,\n          -54,\n          66,\n          -111,\n          115,\n          -37,\n          66,\n          -117,\n          -9,\n          110,\n          66,\n          -65,\n          108,\n          89,\n          66,\n          -78,\n          -6,\n          -42,\n          66,\n          -98,\n          127,\n          -31,\n          66,\n          -72,\n          -72,\n          33,\n          66,\n          -117,\n          -76,\n          114,\n          66,\n          -116,\n          46,\n          -18,\n          66,\n          103,\n          17,\n          -113,\n          66,\n          -83,\n          -97,\n          -106,\n          66,\n          -62,\n          35,\n          48,\n          66,\n          -99,\n          20,\n          27,\n          66,\n          -61,\n          -45,\n          -81,\n          66,\n          -117,\n          -15,\n          -119,\n          66,\n          -105,\n          15,\n          124,\n          66,\n          108,\n          -67,\n          -88,\n          66,\n          89,\n          -125,\n          -4,\n          66,\n          -107,\n          -25,\n          4,\n          66,\n          -64,\n          -38,\n          -115,\n          66,\n          -100,\n          -20,\n          -48,\n          66,\n          88,\n          6,\n          77,\n          66,\n          86,\n          -22,\n          0,\n          66,\n          -96,\n          -108,\n          -89,\n          66,\n          94,\n          100,\n          -55,\n          66,\n          -66,\n          14,\n          28,\n          66,\n          -89,\n          60,\n          37,\n          66,\n          -118,\n          -84,\n          34,\n          66,\n          96,\n          73,\n          76,\n          66,\n          80,\n          64,\n          -114,\n          66,\n          -115,\n          -104,\n          19,\n          66,\n          75,\n          30,\n          98,\n          66,\n          -105,\n          91,\n          -106,\n          66,\n          74,\n          31,\n          -11,\n          66,\n          -118,\n          -3,\n          -11,\n          66,\n          -82,\n          -72,\n          -72,\n          66,\n          -113,\n          35,\n          -83,\n          66,\n          -116,\n          -113,\n          -91,\n          66,\n          -73,\n          84,\n          5,\n          66,\n          -109,\n          -94,\n          -104,\n          66,\n          119,\n          -19,\n          39,\n          66,\n          -117,\n          17,\n          108,\n          66,\n          -98,\n          117,\n          -86,\n          66,\n          -102,\n          56,\n          -113,\n          66,\n          96,\n          90,\n          98,\n          66,\n          -111,\n          53,\n          -126,\n          66,\n          -104,\n          -64,\n          57,\n          66,\n          86,\n          77,\n          -121,\n          66,\n          -65,\n          -98,\n          51,\n          66,\n          -113,\n          -58,\n          -58,\n          66,\n          77,\n          -104,\n          29,\n          66,\n          72,\n          50,\n          -46,\n          66,\n          -125,\n          -36,\n          15,\n          66,\n          91,\n          -31,\n          126,\n          66,\n          -98,\n          106,\n          81,\n          66,\n          -83,\n          -115,\n          86,\n          66,\n          -116,\n          -24,\n          22,\n          66,\n          94,\n          110,\n          41,\n          66,\n          -70,\n          65,\n          -104,\n          66,\n          94,\n          113,\n          -94,\n          66,\n          -107,\n          -51,\n          68,\n          66,\n          -100,\n          122,\n          -57,\n          66,\n          -71,\n          -16,\n          105,\n          66,\n          -119,\n          -100,\n          -19,\n          66,\n          106,\n          -14,\n          55,\n          66,\n          -113,\n          -97,\n          89,\n          66,\n          -117,\n          19,\n          -2,\n          66,\n          -122,\n          -27,\n          107,\n          66,\n          114,\n          -11,\n          -3,\n          66,\n          -112,\n          10,\n          -126,\n          66,\n          -76,\n          -26,\n          80,\n          66,\n          90,\n          -77,\n          -89,\n          66,\n          87,\n          10,\n          57,\n          66,\n          -115,\n          45,\n          62,\n          66,\n          -108,\n          -120,\n          -57,\n          66,\n          -110,\n          -27,\n          -117,\n          66,\n          110,\n          -96,\n          44,\n          66,\n          -125,\n          3,\n          101,\n          66,\n          87,\n          -89,\n          -119,\n          66,\n          -112,\n          20,\n          102,\n          66,\n          -128,\n          -5,\n          103,\n          66,\n          -61,\n          6,\n          -60,\n          66,\n          -72,\n          -111,\n          -22,\n          66,\n          -98,\n          -92,\n          -3,\n          66,\n          73,\n          -98,\n          -6,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 228,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1160410184,\n          1017177992,\n          583521974,\n          969262118,\n          716729444,\n          600505403,\n          1100016922,\n          753852334,\n          643567427,\n          1160578652,\n          588239222,\n          597076159,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1155824878,\n          630820141,\n          581396444,\n          581216390,\n          1025975534,\n          1013272006,\n          726472544,\n          626323669,\n          768460445,\n          711865301,\n          582921926,\n          587687386,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 26,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 26,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 2636386493963874353,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          788174302,\n          798717110,\n          366505670,\n          119613373,\n          66512334,\n          853456866,\n          502119545,\n          842074535,\n          914171442,\n          870410549,\n          714861873,\n          489239669,\n          991668698,\n          578117463,\n          74496707,\n          237819874,\n          232737587,\n          1067103550,\n          599611629,\n          250213437,\n          1021368235,\n          228956670,\n          595533297,\n          263410797,\n          594373949,\n          402575103,\n          858203485,\n          45956269,\n          194420585,\n          1010017503,\n          221618018,\n          619120463,\n          706196786,\n          111868117,\n          191735609,\n          903846469,\n          380274103,\n          18166,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          112,\n          -66,\n          86,\n          69,\n          92,\n          39,\n          28,\n          66,\n          79,\n          -121,\n          62,\n          66,\n          -104,\n          40,\n          -80,\n          66,\n          78,\n          91,\n          -59,\n          66,\n          90,\n          -80,\n          91,\n          66,\n          -91,\n          -67,\n          83,\n          66,\n          -118,\n          32,\n          118,\n          66,\n          -96,\n          -65,\n          -83,\n          66,\n          -95,\n          71,\n          -113,\n          65,\n          -63,\n          -16,\n          19,\n          66,\n          68,\n          117,\n          14,\n          66,\n          -78,\n          -93,\n          -10,\n          66,\n          -74,\n          -66,\n          -10,\n          66,\n          127,\n          -8,\n          -88,\n          65,\n          85,\n          -90,\n          7,\n          66,\n          24,\n          -54,\n          -111,\n          66,\n          -61,\n          -27,\n          -40,\n          66,\n          -125,\n          86,\n          60,\n          66,\n          -112,\n          117,\n          34,\n          66,\n          -109,\n          62,\n          -126,\n          66,\n          -76,\n          -26,\n          98,\n          66,\n          -75,\n          4,\n          118,\n          64,\n          -51,\n          72,\n          -93,\n          66,\n          -82,\n          -56,\n          61,\n          66,\n          -83,\n          106,\n          96,\n          66,\n          -110,\n          120,\n          -39,\n          66,\n          -117,\n          14,\n          -128,\n          66,\n          89,\n          -25,\n          -42,\n          66,\n          -122,\n          67,\n          5,\n          66,\n          -84,\n          3,\n          -1,\n          66,\n          111,\n          12,\n          -109,\n          66,\n          -98,\n          -50,\n          113,\n          66,\n          -102,\n          90,\n          7,\n          66,\n          -125,\n          57,\n          21,\n          66,\n          123,\n          -32,\n          39,\n          66,\n          -110,\n          5,\n          73,\n          66,\n          117,\n          96,\n          3,\n          65,\n          -108,\n          59,\n          -116,\n          66,\n          87,\n          -77,\n          31,\n          66,\n          -82,\n          31,\n          115,\n          66,\n          -59,\n          -9,\n          -47,\n          66,\n          123,\n          -21,\n          105,\n          66,\n          -119,\n          1,\n          -92,\n          66,\n          -69,\n          -31,\n          -89,\n          66,\n          -85,\n          -49,\n          -113,\n          66,\n          82,\n          -50,\n          -90,\n          66,\n          -98,\n          -102,\n          -119,\n          66,\n          -68,\n          -79,\n          17,\n          66,\n          -97,\n          60,\n          -79,\n          66,\n          -88,\n          -113,\n          42,\n          66,\n          -90,\n          -10,\n          -63,\n          66,\n          121,\n          122,\n          -90,\n          66,\n          73,\n          34,\n          45,\n          66,\n          -110,\n          27,\n          7,\n          66,\n          -107,\n          -103,\n          -16,\n          66,\n          -78,\n          41,\n          43,\n          66,\n          -71,\n          -93,\n          -120,\n          66,\n          -63,\n          -45,\n          -108,\n          66,\n          -99,\n          -50,\n          40,\n          66,\n          -102,\n          116,\n          19,\n          66,\n          -118,\n          42,\n          15,\n          66,\n          15,\n          26,\n          -98,\n          66,\n          83,\n          -13,\n          16,\n          66,\n          -99,\n          127,\n          -43,\n          66,\n          -104,\n          24,\n          45,\n          66,\n          -103,\n          -18,\n          -54,\n          66,\n          86,\n          48,\n          -103,\n          66,\n          -111,\n          -55,\n          -72,\n          66,\n          -70,\n          -50,\n          -49,\n          66,\n          -63,\n          -68,\n          6,\n          66,\n          -61,\n          120,\n          81,\n          66,\n          -79,\n          65,\n          95,\n          66,\n          -60,\n          65,\n          57,\n          66,\n          -61,\n          -63,\n          -39,\n          66,\n          21,\n          -123,\n          -121,\n          66,\n          -113,\n          45,\n          54,\n          66,\n          -121,\n          -121,\n          118,\n          66,\n          108,\n          -49,\n          -84,\n          66,\n          -71,\n          98,\n          -25,\n          66,\n          84,\n          -69,\n          -71,\n          66,\n          -98,\n          13,\n          -97,\n          66,\n          71,\n          9,\n          58,\n          66,\n          -115,\n          -62,\n          -38,\n          66,\n          69,\n          67,\n          27,\n          66,\n          -74,\n          24,\n          56,\n          66,\n          -66,\n          100,\n          111,\n          66,\n          -97,\n          -95,\n          -79,\n          66,\n          92,\n          -125,\n          -63,\n          66,\n          -66,\n          -109,\n          109,\n          66,\n          -63,\n          -14,\n          113,\n          66,\n          100,\n          40,\n          44,\n          66,\n          -122,\n          -8,\n          -35,\n          66,\n          -97,\n          -8,\n          98,\n          66,\n          -67,\n          -88,\n          76,\n          66,\n          -127,\n          86,\n          -27,\n          66,\n          84,\n          18,\n          125,\n          66,\n          -104,\n          29,\n          92,\n          66,\n          -62,\n          -128,\n          -82,\n          66,\n          -84,\n          36,\n          -25,\n          66,\n          -113,\n          2,\n          35,\n          66,\n          -63,\n          -8,\n          28,\n          66,\n          -72,\n          -82,\n          50,\n          66,\n          -127,\n          14,\n          -87,\n          66,\n          -110,\n          101,\n          -14,\n          66,\n          -102,\n          15,\n          67,\n          66,\n          -99,\n          -74,\n          -63,\n          66,\n          23,\n          -38,\n          -128,\n          66,\n          -113,\n          120,\n          -72,\n          66,\n          98,\n          39,\n          -27,\n          66,\n          -79,\n          51,\n          36,\n          66,\n          -69,\n          0,\n          34,\n          66,\n          87,\n          -41,\n          106,\n          66,\n          -98,\n          -59,\n          -97,\n          66,\n          -101,\n          -107,\n          -9,\n          66,\n          -108,\n          -107,\n          -64,\n          66,\n          -103,\n          61,\n          -11,\n          66,\n          98,\n          -36,\n          -120,\n          66,\n          -73,\n          120,\n          -54,\n          66,\n          89,\n          -22,\n          -66,\n          66,\n          69,\n          -38,\n          97,\n          66,\n          -120,\n          -40,\n          -103,\n          66,\n          -90,\n          -34,\n          70,\n          66,\n          -96,\n          107,\n          -47,\n          66,\n          -67,\n          -70,\n          95,\n          66,\n          -89,\n          -86,\n          -69,\n          66,\n          -78,\n          17,\n          -7,\n          66,\n          84,\n          -87,\n          35,\n          66,\n          -80,\n          -40,\n          113,\n          66,\n          118,\n          -36,\n          110,\n          66,\n          -94,\n          -7,\n          71,\n          66,\n          -68,\n          91,\n          -125,\n          66,\n          -116,\n          -92,\n          -84,\n          66,\n          124,\n          -15,\n          -98,\n          66,\n          69,\n          57,\n          -39,\n          66,\n          -74,\n          51,\n          -87,\n          66,\n          69,\n          92,\n          -110,\n          66,\n          -113,\n          16,\n          100,\n          66,\n          -116,\n          64,\n          -112,\n          66,\n          81,\n          14,\n          49,\n          66,\n          -95,\n          -83,\n          18,\n          66,\n          -77,\n          -25,\n          102,\n          66,\n          87,\n          122,\n          114,\n          66,\n          81,\n          94,\n          0,\n          66,\n          -123,\n          -8,\n          -58,\n          66,\n          -124,\n          26,\n          -12,\n          66,\n          87,\n          -66,\n          40,\n          66,\n          -59,\n          -119,\n          45,\n          66,\n          -68,\n          54,\n          -66,\n          66,\n          -126,\n          -17,\n          102,\n          66,\n          72,\n          86,\n          94,\n          66,\n          114,\n          -102,\n          -11,\n          66,\n          80,\n          -10,\n          -112,\n          66,\n          -96,\n          13,\n          -104,\n          66,\n          86,\n          -58,\n          27,\n          66,\n          79,\n          21,\n          57,\n          66,\n          -113,\n          -3,\n          -124,\n          66,\n          -80,\n          105,\n          3,\n          66,\n          -117,\n          120,\n          13,\n          66,\n          -77,\n          -64,\n          -11,\n          66,\n          -65,\n          -48,\n          99,\n          66,\n          -102,\n          75,\n          -1,\n          66,\n          -102,\n          83,\n          91,\n          66,\n          -99,\n          37,\n          -94,\n          66,\n          98,\n          90,\n          -87,\n          66,\n          -83,\n          64,\n          126,\n          66,\n          77,\n          -57,\n          -109,\n          66,\n          -102,\n          -99,\n          23,\n          66,\n          -106,\n          112,\n          -96,\n          66,\n          122,\n          86,\n          97,\n          66,\n          103,\n          14,\n          53,\n          66,\n          -106,\n          37,\n          72,\n          66,\n          -107,\n          97,\n          -32,\n          66,\n          -111,\n          80,\n          41,\n          66,\n          91,\n          84,\n          123,\n          66,\n          -79,\n          -84,\n          -35,\n          66,\n          -112,\n          -17,\n          -96,\n          66,\n          86,\n          27,\n          -63,\n          66,\n          74,\n          -24,\n          82,\n          66,\n          -72,\n          -7,\n          -6,\n          66,\n          -63,\n          -23,\n          57,\n          66,\n          82,\n          -20,\n          74,\n          66,\n          -117,\n          10,\n          -74,\n          66,\n          88,\n          -103,\n          3,\n          66,\n          97,\n          15,\n          -55,\n          66,\n          -68,\n          -86,\n          92,\n          66,\n          93,\n          94,\n          -105,\n          66,\n          -64,\n          -63,\n          -7,\n          66,\n          -124,\n          -20,\n          104,\n          66,\n          -69,\n          -119,\n          -81,\n          66,\n          -65,\n          19,\n          -20,\n          66,\n          -68,\n          44,\n          -47,\n          66,\n          -61,\n          65,\n          -2,\n          66,\n          -101,\n          25,\n          59,\n          66,\n          -107,\n          -76,\n          100,\n          66,\n          106,\n          -5,\n          53,\n          66,\n          -120,\n          51,\n          11,\n          66,\n          -99,\n          105,\n          -68,\n          66,\n          -107,\n          93,\n          -23,\n          66,\n          -65,\n          110,\n          -18,\n          66,\n          -60,\n          110,\n          -53,\n          66,\n          -128,\n          -87,\n          59,\n          66,\n          -95,\n          58,\n          72,\n          66,\n          94,\n          -115,\n          -30,\n          66,\n          -110,\n          -77,\n          -62,\n          66,\n          -90,\n          110,\n          -89,\n          66,\n          -111,\n          15,\n          -127,\n          66,\n          80,\n          28,\n          -31,\n          66,\n          -74,\n          32,\n          118,\n          66,\n          -93,\n          -44,\n          76,\n          66,\n          -112,\n          118,\n          15,\n          66,\n          -98,\n          55,\n          -75,\n          66,\n          -59,\n          -89,\n          42,\n          66,\n          84,\n          -22,\n          -89,\n          66,\n          -112,\n          -70,\n          64,\n          66,\n          -60,\n          -52,\n          39,\n          66,\n          86,\n          77,\n          30,\n          66,\n          -118,\n          -113,\n          17,\n          66,\n          -111,\n          -87,\n          -118,\n          66,\n          -117,\n          -117,\n          115,\n          66,\n          -79,\n          5,\n          -109,\n          66,\n          85,\n          -35,\n          103,\n          66,\n          -64,\n          -74,\n          33,\n          66,\n          69,\n          57,\n          47,\n          66,\n          -119,\n          17,\n          14,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 225,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          1112601230,\n          1098481256,\n          715612390,\n          755116096,\n          774811544,\n          755091314,\n          1099883041,\n          1104134558,\n          710979488,\n          639137542,\n          970204702,\n          21543125,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1100082838,\n          1026743281,\n          974929489,\n          1157397494,\n          1162075454,\n          770030701,\n          1147892029,\n          1143048553,\n          712423184,\n          1155684874,\n          974042806,\n          26391631,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 29,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 29,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 1481498699720728495,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          395504574,\n          585025198,\n          900579529,\n          660706858,\n          129354621,\n          584653689,\n          227894714,\n          496541255,\n          500241443,\n          884430901,\n          196410546,\n          367394527,\n          311609830,\n          737270875,\n          454334127,\n          510170670,\n          178633154,\n          179615919,\n          312450938,\n          93929382,\n          259980462,\n          535254690,\n          572598979,\n          718329150,\n          887737550,\n          371910753,\n          1033713388,\n          710371046,\n          34957989,\n          909313710,\n          621919469,\n          590414885,\n          859822039,\n          440044767,\n          595507497,\n          102226114,\n          343784573,\n          878258135,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          68,\n          -101,\n          -40,\n          -100,\n          66,\n          -125,\n          79,\n          10,\n          66,\n          -76,\n          -29,\n          32,\n          67,\n          -47,\n          -38,\n          35,\n          66,\n          86,\n          105,\n          -128,\n          66,\n          84,\n          58,\n          -22,\n          67,\n          1,\n          -59,\n          123,\n          66,\n          -114,\n          -96,\n          55,\n          66,\n          35,\n          -14,\n          -91,\n          66,\n          113,\n          95,\n          -115,\n          65,\n          -47,\n          -126,\n          69,\n          66,\n          -122,\n          -60,\n          121,\n          66,\n          -119,\n          -120,\n          -18,\n          66,\n          -82,\n          106,\n          -94,\n          65,\n          12,\n          27,\n          -61,\n          66,\n          77,\n          104,\n          -38,\n          66,\n          -72,\n          94,\n          -13,\n          69,\n          100,\n          -74,\n          -54,\n          66,\n          -73,\n          117,\n          -43,\n          66,\n          -110,\n          -8,\n          43,\n          66,\n          -112,\n          -109,\n          -66,\n          66,\n          102,\n          122,\n          -124,\n          66,\n          -65,\n          44,\n          -100,\n          66,\n          78,\n          96,\n          114,\n          66,\n          -86,\n          126,\n          115,\n          66,\n          109,\n          82,\n          90,\n          66,\n          -77,\n          33,\n          75,\n          65,\n          17,\n          115,\n          -86,\n          63,\n          -35,\n          -47,\n          2,\n          66,\n          114,\n          126,\n          36,\n          66,\n          -105,\n          -6,\n          12,\n          65,\n          37,\n          34,\n          -15,\n          66,\n          -80,\n          87,\n          -18,\n          66,\n          -73,\n          118,\n          -59,\n          66,\n          -109,\n          -3,\n          72,\n          66,\n          -115,\n          -53,\n          -32,\n          66,\n          -60,\n          -67,\n          -94,\n          66,\n          -110,\n          -4,\n          -39,\n          66,\n          -93,\n          -8,\n          -29,\n          66,\n          -89,\n          23,\n          -40,\n          66,\n          -101,\n          30,\n          47,\n          66,\n          -94,\n          -20,\n          -80,\n          66,\n          -126,\n          88,\n          -88,\n          66,\n          -76,\n          57,\n          58,\n          63,\n          -35,\n          -21,\n          122,\n          66,\n          -100,\n          -123,\n          -115,\n          66,\n          -79,\n          84,\n          66,\n          66,\n          -82,\n          3,\n          58,\n          65,\n          -33,\n          65,\n          92,\n          66,\n          -96,\n          81,\n          82,\n          66,\n          -104,\n          -32,\n          -96,\n          66,\n          -84,\n          -24,\n          57,\n          66,\n          -102,\n          -102,\n          8,\n          66,\n          -83,\n          19,\n          25,\n          66,\n          -95,\n          15,\n          -124,\n          66,\n          -108,\n          3,\n          -38,\n          66,\n          -78,\n          -12,\n          -6,\n          66,\n          -66,\n          115,\n          79,\n          66,\n          94,\n          -107,\n          -50,\n          66,\n          -74,\n          -74,\n          48,\n          66,\n          -64,\n          -115,\n          -111,\n          66,\n          -97,\n          -46,\n          115,\n          66,\n          83,\n          17,\n          64,\n          66,\n          -120,\n          -51,\n          -76,\n          66,\n          82,\n          111,\n          -41,\n          66,\n          -99,\n          -24,\n          -56,\n          66,\n          82,\n          2,\n          -23,\n          66,\n          -64,\n          -39,\n          -78,\n          66,\n          85,\n          -10,\n          -48,\n          66,\n          109,\n          13,\n          -112,\n          66,\n          -75,\n          29,\n          -106,\n          66,\n          -76,\n          31,\n          -103,\n          66,\n          -68,\n          121,\n          104,\n          66,\n          75,\n          68,\n          -34,\n          66,\n          -66,\n          45,\n          111,\n          66,\n          -118,\n          77,\n          -64,\n          66,\n          -102,\n          -101,\n          -95,\n          66,\n          -108,\n          96,\n          -35,\n          66,\n          90,\n          -6,\n          108,\n          66,\n          -91,\n          73,\n          0,\n          66,\n          101,\n          12,\n          -79,\n          66,\n          76,\n          30,\n          36,\n          66,\n          92,\n          6,\n          -27,\n          66,\n          -99,\n          -33,\n          100,\n          66,\n          -128,\n          23,\n          86,\n          66,\n          -111,\n          126,\n          115,\n          66,\n          -127,\n          70,\n          -89,\n          66,\n          62,\n          109,\n          -26,\n          66,\n          -113,\n          53,\n          109,\n          66,\n          -125,\n          -72,\n          -12,\n          66,\n          -71,\n          61,\n          -93,\n          66,\n          -106,\n          35,\n          -13,\n          66,\n          -117,\n          -5,\n          -16,\n          66,\n          -97,\n          -43,\n          -121,\n          66,\n          -88,\n          62,\n          -91,\n          66,\n          99,\n          6,\n          53,\n          66,\n          -71,\n          -99,\n          98,\n          66,\n          -70,\n          -60,\n          41,\n          66,\n          -67,\n          81,\n          104,\n          66,\n          90,\n          -43,\n          105,\n          66,\n          -63,\n          118,\n          -76,\n          66,\n          -95,\n          126,\n          33,\n          66,\n          89,\n          -122,\n          109,\n          66,\n          -96,\n          -29,\n          117,\n          66,\n          -63,\n          3,\n          -49,\n          66,\n          -100,\n          -88,\n          79,\n          66,\n          74,\n          108,\n          50,\n          66,\n          -110,\n          -46,\n          19,\n          66,\n          -90,\n          -42,\n          -74,\n          66,\n          114,\n          36,\n          6,\n          66,\n          91,\n          31,\n          43,\n          66,\n          85,\n          31,\n          121,\n          66,\n          -105,\n          -44,\n          34,\n          66,\n          -107,\n          82,\n          23,\n          66,\n          -64,\n          -58,\n          81,\n          66,\n          -121,\n          -10,\n          38,\n          66,\n          99,\n          -105,\n          31,\n          66,\n          -83,\n          0,\n          -128,\n          66,\n          -104,\n          -78,\n          -86,\n          66,\n          -70,\n          73,\n          47,\n          66,\n          -68,\n          2,\n          -68,\n          66,\n          -113,\n          -121,\n          -79,\n          66,\n          89,\n          85,\n          108,\n          66,\n          -119,\n          84,\n          -82,\n          66,\n          79,\n          -44,\n          -38,\n          66,\n          79,\n          -20,\n          114,\n          66,\n          -72,\n          -50,\n          116,\n          66,\n          -125,\n          96,\n          -25,\n          66,\n          -126,\n          -21,\n          -6,\n          66,\n          -59,\n          -128,\n          -90,\n          66,\n          -77,\n          118,\n          3,\n          66,\n          82,\n          75,\n          123,\n          66,\n          97,\n          -61,\n          -86,\n          66,\n          -79,\n          93,\n          -87,\n          66,\n          -64,\n          11,\n          30,\n          66,\n          -66,\n          -94,\n          3,\n          66,\n          -60,\n          -113,\n          -40,\n          66,\n          -107,\n          -100,\n          37,\n          66,\n          -76,\n          -45,\n          82,\n          66,\n          -107,\n          9,\n          73,\n          66,\n          -94,\n          -123,\n          125,\n          66,\n          -112,\n          101,\n          28,\n          66,\n          -106,\n          -119,\n          121,\n          66,\n          -85,\n          -112,\n          8,\n          66,\n          -60,\n          67,\n          16,\n          66,\n          -74,\n          -100,\n          51,\n          66,\n          69,\n          110,\n          -88,\n          66,\n          95,\n          9,\n          -59,\n          66,\n          -60,\n          -56,\n          -31,\n          66,\n          -82,\n          -15,\n          49,\n          66,\n          -104,\n          -94,\n          -50,\n          66,\n          83,\n          52,\n          -102,\n          66,\n          -66,\n          -24,\n          10,\n          66,\n          -117,\n          -79,\n          -91,\n          66,\n          -67,\n          -59,\n          119,\n          66,\n          109,\n          -67,\n          110,\n          63,\n          -127,\n          53,\n          -107,\n          66,\n          72,\n          -18,\n          51,\n          66,\n          -120,\n          -61,\n          51,\n          66,\n          -69,\n          -74,\n          47,\n          66,\n          -118,\n          35,\n          -77,\n          66,\n          -67,\n          22,\n          30,\n          66,\n          -67,\n          86,\n          1,\n          66,\n          76,\n          -110,\n          -54,\n          66,\n          -113,\n          -98,\n          -65,\n          66,\n          -69,\n          -57,\n          -116,\n          66,\n          -113,\n          31,\n          60,\n          66,\n          -105,\n          29,\n          38,\n          66,\n          127,\n          101,\n          125,\n          66,\n          -103,\n          -121,\n          -41,\n          66,\n          -92,\n          -124,\n          60,\n          66,\n          -78,\n          86,\n          120,\n          66,\n          -116,\n          -114,\n          -56,\n          66,\n          -106,\n          53,\n          -53,\n          66,\n          -62,\n          65,\n          69,\n          66,\n          -121,\n          -58,\n          109,\n          66,\n          -125,\n          58,\n          -55,\n          66,\n          -66,\n          -70,\n          -58,\n          66,\n          70,\n          32,\n          53,\n          66,\n          84,\n          -109,\n          79,\n          66,\n          -117,\n          2,\n          115,\n          66,\n          117,\n          -85,\n          -128,\n          66,\n          80,\n          55,\n          122,\n          66,\n          103,\n          73,\n          -127,\n          66,\n          -115,\n          -98,\n          38,\n          66,\n          -63,\n          -88,\n          -65,\n          66,\n          -102,\n          16,\n          123,\n          66,\n          -117,\n          17,\n          -42,\n          66,\n          -110,\n          -46,\n          21,\n          66,\n          -70,\n          -40,\n          94,\n          66,\n          103,\n          60,\n          -38,\n          66,\n          -117,\n          -79,\n          62,\n          66,\n          93,\n          -119,\n          -43,\n          66,\n          -66,\n          -45,\n          110,\n          66,\n          -107,\n          -66,\n          -69,\n          66,\n          84,\n          81,\n          106,\n          66,\n          115,\n          63,\n          63,\n          66,\n          -99,\n          15,\n          -97,\n          66,\n          101,\n          -105,\n          12,\n          66,\n          -79,\n          68,\n          117,\n          66,\n          83,\n          -121,\n          24,\n          66,\n          -106,\n          -103,\n          -7,\n          66,\n          104,\n          77,\n          -76,\n          66,\n          -112,\n          -98,\n          -70,\n          66,\n          -117,\n          -8,\n          42,\n          66,\n          -104,\n          6,\n          120,\n          66,\n          -70,\n          -97,\n          38,\n          66,\n          -99,\n          63,\n          28,\n          66,\n          -127,\n          -21,\n          -30,\n          66,\n          -110,\n          102,\n          21,\n          66,\n          -85,\n          110,\n          56,\n          66,\n          -62,\n          123,\n          -3,\n          66,\n          -59,\n          111,\n          -119,\n          66,\n          108,\n          -99,\n          115,\n          66,\n          -104,\n          -106,\n          -117,\n          66,\n          99,\n          104,\n          101,\n          66,\n          -107,\n          -109,\n          -39,\n          66,\n          69,\n          -7,\n          -47,\n          66,\n          118,\n          -96,\n          26,\n          66,\n          100,\n          -53,\n          -68,\n          66,\n          84,\n          100,\n          -107,\n          66,\n          -64,\n          92,\n          -87,\n          66,\n          104,\n          -40,\n          62,\n          66,\n          -64,\n          -101,\n          16,\n          66,\n          -99,\n          -23,\n          -91,\n          66,\n          -60,\n          -61,\n          -57,\n          66,\n          -100,\n          -80,\n          67,\n          66,\n          -76,\n          -94,\n          92,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 228,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          729935225,\n          774110515,\n          644020442,\n          602647442,\n          724628632,\n          1160470031,\n          1102654741,\n          975699148,\n          711885020,\n          597139601,\n          624895870,\n          597251228,\n          0,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          643928587,\n          774602257,\n          774832121,\n          1032937514,\n          1155686359,\n          984562334,\n          730133567,\n          1013920862,\n          1141526167,\n          581151478,\n          624197408,\n          625949042,\n          0,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 26,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 26,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -326098280807737610,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"2.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"2.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          31,\n          255,\n          53270838,\n          647031374,\n          119730107,\n          769242474,\n          81208803,\n          74292330,\n          215862725,\n          487953482,\n          727395159,\n          863477410,\n          347707217,\n          999504309,\n          794159870,\n          345620406,\n          117119418,\n          536214317,\n          240760993,\n          619092426,\n          204531182,\n          743941457,\n          207144049,\n          716884917,\n          358578133,\n          995748417,\n          498445609,\n          792270938,\n          994048331,\n          1041956798,\n          572573245,\n          764632115,\n          769913909,\n          1065293034,\n          799648081,\n          913108409,\n          380348583,\n          330397483,\n          664271558,\n          399767081,\n          21817,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"cutValueData\": [\n          69,\n          76,\n          38,\n          14,\n          66,\n          51,\n          -35,\n          -57,\n          66,\n          -83,\n          36,\n          -80,\n          67,\n          -66,\n          102,\n          -98,\n          68,\n          -80,\n          -128,\n          -68,\n          66,\n          -120,\n          121,\n          76,\n          66,\n          -86,\n          17,\n          97,\n          66,\n          -82,\n          19,\n          82,\n          66,\n          -58,\n          -82,\n          121,\n          66,\n          -117,\n          -8,\n          -73,\n          66,\n          -92,\n          -97,\n          -115,\n          66,\n          101,\n          -74,\n          1,\n          66,\n          94,\n          96,\n          112,\n          66,\n          -118,\n          77,\n          65,\n          66,\n          31,\n          83,\n          127,\n          66,\n          -117,\n          48,\n          73,\n          66,\n          -63,\n          126,\n          -93,\n          65,\n          90,\n          114,\n          75,\n          66,\n          -78,\n          -41,\n          113,\n          66,\n          114,\n          -72,\n          -73,\n          66,\n          -92,\n          -103,\n          -122,\n          66,\n          110,\n          -10,\n          30,\n          66,\n          -101,\n          40,\n          47,\n          66,\n          -78,\n          -108,\n          115,\n          66,\n          82,\n          49,\n          -33,\n          66,\n          99,\n          -41,\n          14,\n          66,\n          -96,\n          -68,\n          -111,\n          66,\n          -59,\n          -39,\n          116,\n          66,\n          117,\n          83,\n          96,\n          66,\n          -98,\n          -12,\n          -57,\n          66,\n          -99,\n          26,\n          6,\n          66,\n          100,\n          78,\n          40,\n          66,\n          81,\n          91,\n          -42,\n          66,\n          86,\n          26,\n          110,\n          66,\n          -79,\n          29,\n          90,\n          66,\n          -80,\n          37,\n          -74,\n          66,\n          -99,\n          -112,\n          -68,\n          66,\n          -88,\n          87,\n          19,\n          66,\n          13,\n          16,\n          19,\n          66,\n          68,\n          6,\n          91,\n          66,\n          -111,\n          -115,\n          -98,\n          66,\n          -59,\n          -39,\n          10,\n          66,\n          -80,\n          -96,\n          -95,\n          66,\n          -60,\n          101,\n          77,\n          66,\n          -108,\n          35,\n          98,\n          66,\n          119,\n          23,\n          -16,\n          66,\n          119,\n          -16,\n          97,\n          66,\n          -62,\n          -118,\n          86,\n          66,\n          23,\n          -110,\n          74,\n          66,\n          -66,\n          43,\n          -44,\n          66,\n          -88,\n          99,\n          34,\n          66,\n          -67,\n          122,\n          18,\n          66,\n          -105,\n          87,\n          -55,\n          66,\n          -117,\n          -59,\n          -113,\n          66,\n          -71,\n          94,\n          -54,\n          66,\n          -119,\n          -9,\n          -67,\n          66,\n          102,\n          91,\n          14,\n          66,\n          46,\n          83,\n          -125,\n          66,\n          105,\n          -77,\n          92,\n          66,\n          -108,\n          -65,\n          -101,\n          66,\n          -88,\n          28,\n          71,\n          66,\n          -68,\n          114,\n          56,\n          66,\n          -117,\n          -73,\n          -119,\n          66,\n          72,\n          -85,\n          9,\n          66,\n          108,\n          10,\n          -10,\n          66,\n          -79,\n          -110,\n          68,\n          66,\n          -115,\n          -119,\n          -74,\n          66,\n          -117,\n          -90,\n          6,\n          66,\n          -66,\n          -48,\n          -29,\n          66,\n          -84,\n          67,\n          -51,\n          66,\n          -122,\n          91,\n          -46,\n          66,\n          -91,\n          34,\n          119,\n          66,\n          -77,\n          87,\n          -54,\n          66,\n          69,\n          -47,\n          5,\n          66,\n          -71,\n          -52,\n          -48,\n          66,\n          95,\n          -71,\n          67,\n          66,\n          -99,\n          94,\n          -51,\n          66,\n          98,\n          37,\n          -74,\n          66,\n          -70,\n          35,\n          90,\n          66,\n          -115,\n          -109,\n          -31,\n          66,\n          105,\n          -92,\n          31,\n          66,\n          -128,\n          7,\n          -117,\n          66,\n          -121,\n          -37,\n          93,\n          66,\n          -67,\n          64,\n          -126,\n          66,\n          -72,\n          -122,\n          124,\n          66,\n          -121,\n          105,\n          19,\n          66,\n          -60,\n          24,\n          -55,\n          66,\n          -81,\n          20,\n          42,\n          66,\n          81,\n          90,\n          -111,\n          66,\n          91,\n          -54,\n          8,\n          66,\n          -112,\n          -87,\n          -38,\n          66,\n          -93,\n          -46,\n          -82,\n          66,\n          -62,\n          16,\n          -34,\n          66,\n          110,\n          -55,\n          75,\n          66,\n          83,\n          89,\n          -110,\n          66,\n          90,\n          -101,\n          36,\n          66,\n          -105,\n          54,\n          -83,\n          66,\n          -86,\n          -85,\n          -56,\n          66,\n          -80,\n          -70,\n          -99,\n          66,\n          97,\n          77,\n          -75,\n          66,\n          -99,\n          25,\n          42,\n          66,\n          81,\n          91,\n          34,\n          66,\n          -78,\n          71,\n          -116,\n          66,\n          -70,\n          -58,\n          -3,\n          66,\n          -59,\n          -21,\n          -111,\n          66,\n          -101,\n          121,\n          11,\n          66,\n          -63,\n          101,\n          -114,\n          66,\n          -63,\n          77,\n          87,\n          66,\n          -75,\n          -1,\n          124,\n          66,\n          69,\n          -122,\n          77,\n          66,\n          -112,\n          20,\n          -5,\n          66,\n          -106,\n          17,\n          -36,\n          66,\n          108,\n          -115,\n          -124,\n          66,\n          -72,\n          -24,\n          -86,\n          66,\n          -101,\n          116,\n          -35,\n          66,\n          -63,\n          44,\n          14,\n          66,\n          -113,\n          36,\n          -117,\n          66,\n          104,\n          52,\n          -44,\n          66,\n          -106,\n          -118,\n          -121,\n          66,\n          -67,\n          -45,\n          -24,\n          66,\n          -110,\n          27,\n          60,\n          66,\n          98,\n          91,\n          -81,\n          66,\n          -116,\n          36,\n          10,\n          66,\n          -114,\n          -111,\n          34,\n          66,\n          -104,\n          -75,\n          29,\n          66,\n          -60,\n          44,\n          -44,\n          66,\n          125,\n          -49,\n          -80,\n          66,\n          -122,\n          -120,\n          -80,\n          66,\n          -60,\n          46,\n          0,\n          66,\n          -127,\n          88,\n          -74,\n          66,\n          84,\n          1,\n          -119,\n          66,\n          -114,\n          -97,\n          6,\n          66,\n          -107,\n          17,\n          27,\n          66,\n          -68,\n          79,\n          43,\n          66,\n          -112,\n          -112,\n          -78,\n          66,\n          -64,\n          27,\n          -47,\n          66,\n          -127,\n          -128,\n          7,\n          66,\n          -82,\n          -81,\n          49,\n          66,\n          -103,\n          -52,\n          -35,\n          66,\n          -70,\n          -95,\n          115,\n          66,\n          -67,\n          -102,\n          -44,\n          66,\n          80,\n          56,\n          -87,\n          66,\n          -123,\n          -76,\n          -14,\n          66,\n          -109,\n          17,\n          -83,\n          66,\n          -103,\n          -22,\n          -49,\n          66,\n          -107,\n          -81,\n          76,\n          66,\n          91,\n          -15,\n          -109,\n          66,\n          93,\n          -50,\n          106,\n          66,\n          71,\n          87,\n          -83,\n          66,\n          -65,\n          -115,\n          -104,\n          66,\n          -80,\n          97,\n          61,\n          66,\n          -60,\n          102,\n          38,\n          66,\n          -69,\n          -81,\n          10,\n          66,\n          -58,\n          -51,\n          79,\n          66,\n          70,\n          40,\n          -22,\n          66,\n          96,\n          -88,\n          -72,\n          66,\n          95,\n          -52,\n          105,\n          66,\n          -63,\n          109,\n          15,\n          66,\n          -71,\n          37,\n          -27,\n          66,\n          82,\n          47,\n          61,\n          66,\n          84,\n          58,\n          44,\n          66,\n          -110,\n          64,\n          101,\n          66,\n          -71,\n          -42,\n          5,\n          66,\n          -110,\n          -5,\n          40,\n          66,\n          115,\n          -89,\n          90,\n          66,\n          -109,\n          -113,\n          56,\n          66,\n          -103,\n          43,\n          94,\n          66,\n          94,\n          -115,\n          45,\n          66,\n          -119,\n          -41,\n          88,\n          66,\n          -104,\n          -19,\n          82,\n          66,\n          -98,\n          -50,\n          -62,\n          66,\n          -101,\n          34,\n          -28,\n          66,\n          -63,\n          -114,\n          113,\n          66,\n          -117,\n          66,\n          2,\n          66,\n          75,\n          18,\n          68,\n          66,\n          -114,\n          -104,\n          -9,\n          66,\n          90,\n          -97,\n          -90,\n          66,\n          -59,\n          25,\n          48,\n          66,\n          -110,\n          12,\n          -48,\n          66,\n          -63,\n          93,\n          -8,\n          66,\n          -103,\n          -124,\n          105,\n          66,\n          -102,\n          -107,\n          -87,\n          66,\n          -104,\n          -104,\n          -32,\n          66,\n          112,\n          -53,\n          -120,\n          66,\n          -73,\n          -8,\n          -127,\n          66,\n          -87,\n          114,\n          21,\n          66,\n          -61,\n          4,\n          -78,\n          66,\n          -128,\n          78,\n          -95,\n          66,\n          -102,\n          91,\n          127,\n          66,\n          -63,\n          105,\n          -15,\n          66,\n          73,\n          113,\n          124,\n          66,\n          84,\n          55,\n          -107,\n          66,\n          -112,\n          101,\n          -48,\n          66,\n          -85,\n          23,\n          -73,\n          66,\n          -61,\n          108,\n          15,\n          66,\n          94,\n          93,\n          -107,\n          66,\n          -74,\n          19,\n          -116,\n          66,\n          -124,\n          -111,\n          -61,\n          66,\n          -96,\n          92,\n          -103,\n          66,\n          -95,\n          76,\n          21,\n          66,\n          104,\n          37,\n          18,\n          66,\n          -109,\n          -7,\n          -43,\n          66,\n          -63,\n          48,\n          43,\n          66,\n          85,\n          104,\n          -23,\n          66,\n          68,\n          -38,\n          104,\n          66,\n          -97,\n          -118,\n          125,\n          66,\n          -74,\n          82,\n          82,\n          66,\n          85,\n          -1,\n          111,\n          66,\n          -66,\n          -91,\n          -98,\n          66,\n          79,\n          -30,\n          -42,\n          66,\n          81,\n          100,\n          59,\n          66,\n          -113,\n          25,\n          -47,\n          66,\n          -108,\n          -70,\n          33,\n          66,\n          -70,\n          27,\n          -58,\n          66,\n          68,\n          -68,\n          5,\n          66,\n          -103,\n          -31,\n          59,\n          66,\n          -74,\n          -48,\n          -27,\n          66,\n          -87,\n          -6,\n          12,\n          66,\n          -64,\n          1,\n          -54,\n          66,\n          72,\n          86,\n          -5,\n          66,\n          -110,\n          115,\n          57,\n          66,\n          95,\n          -9,\n          87,\n          66,\n          -106,\n          -58,\n          -30,\n          66,\n          -114,\n          -43,\n          -9,\n          66,\n          -124,\n          -117,\n          -61,\n          66,\n          80,\n          -121,\n          46,\n          66,\n          -105,\n          -104,\n          -9,\n          66,\n          69,\n          -48,\n          -109,\n          66,\n          -116,\n          -62,\n          -126,\n          66,\n          -101,\n          15,\n          -70,\n          66,\n          -100,\n          2,\n          8,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 231,\n        \"leftIndex\": [\n          -1,\n          1,\n          255,\n          626568200,\n          629499437,\n          753339841,\n          1030788898,\n          726831224,\n          596099411,\n          586113809,\n          975114418,\n          987685622,\n          1142571545,\n          587744221,\n          711000005,\n          13,\n          0\n        ],\n        \"rightIndex\": [\n          -1,\n          1,\n          255,\n          1161723346,\n          987939094,\n          768285566,\n          769871048,\n          588302266,\n          983461084,\n          597634816,\n          985094540,\n          640298897,\n          1118597027,\n          1030729828,\n          1098223177,\n          13,\n          0\n        ],\n        \"nodeFreeIndexes\": [],\n        \"nodeFreeIndexPointer\": 23,\n        \"leafFreeIndexes\": [],\n        \"leafFreeIndexPointer\": 23,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 1885563308252954837,\n      \"id\": 0,\n      \"dimensions\": 32,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    }\n  ],\n  \"executionContext\": {\n    \"parallelExecutionEnabled\": false,\n    \"threadPoolSize\": 0\n  },\n  \"saveTreeStateEnabled\": true,\n  \"saveSamplerStateEnabled\": true,\n  \"saveCoordinatorStateEnabled\": true\n}"
  },
  {
    "path": "Java/core/src/test/resources/com/amazon/randomcutforest/state/state_3.json",
    "content": "{\n  \"version\": \"3.0\",\n  \"totalUpdates\": 1501,\n  \"timeDecay\": 1.0e-4,\n  \"numberOfTrees\": 30,\n  \"sampleSize\": 256,\n  \"shingleSize\": 8,\n  \"dimensions\": 8,\n  \"outputAfter\": 32,\n  \"compressed\": true,\n  \"partialTreeState\": true,\n  \"boundingBoxCacheFraction\": 0.0,\n  \"storeSequenceIndexesEnabled\": false,\n  \"compact\": true,\n  \"internalShinglingEnabled\": true,\n  \"centerOfMassEnabled\": false,\n  \"precision\": \"FLOAT_32\",\n  \"pointStoreState\": {\n    \"version\": \"3.0\",\n    \"dimensions\": 8,\n    \"capacity\": 7681,\n    \"shingleSize\": 8,\n    \"precision\": \"FLOAT_32\",\n    \"startOfFreeSegment\": 1237,\n    \"pointData\": [\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      32,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      48,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      64,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      80,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      96,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -72,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -112,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      112,\n      0,\n      0,\n      65,\n      -128,\n      0,\n      0,\n      65,\n      -120,\n      0,\n      0,\n      65,\n      -104,\n      0,\n      0,\n      65,\n      -96,\n      0,\n      0,\n      65,\n      -88,\n      0,\n      0,\n      65,\n      -80,\n      0,\n      0,\n      65,\n      -64,\n      0,\n      0,\n      65,\n      -56,\n      0,\n      0,\n      65,\n      -48,\n      0,\n      0,\n      65,\n      -40,\n      0,\n      0,\n      65,\n      -32,\n      0,\n      0,\n      65,\n      -16,\n      0,\n      0,\n      65,\n      -8,\n      0,\n      0,\n      66,\n      0,\n      0,\n      0,\n      66,\n      4,\n      0,\n      0,\n      66,\n      8,\n      0,\n      0,\n      66,\n      16,\n      0,\n      0,\n      66,\n      20,\n      0,\n      0,\n      66,\n      24,\n      0,\n      0,\n      66,\n      28,\n      0,\n      0,\n      66,\n      36,\n      0,\n      0,\n      66,\n      40,\n      0,\n      0,\n      66,\n      44,\n      0,\n      0,\n      66,\n      48,\n      0,\n      0,\n      66,\n      52,\n      0,\n      0,\n      66,\n      60,\n      0,\n      0,\n      66,\n      64,\n      0,\n      0,\n      66,\n      68,\n      0,\n      0,\n      66,\n      72,\n      0,\n      0,\n      66,\n      80,\n      0,\n      0,\n      66,\n      84,\n      0,\n      0,\n      66,\n      88,\n      0,\n      0,\n      66,\n      92,\n      0,\n      0,\n      66,\n      96,\n      0,\n      0,\n      66,\n      104,\n      0,\n      0,\n      66,\n      108,\n      0,\n      0,\n      66,\n      112,\n      0,\n      0,\n      66,\n      116,\n      0,\n      0,\n      66,\n      120,\n      0,\n      0,\n      66,\n      -128,\n      0,\n      0,\n      66,\n      -126,\n      0,\n      0,\n      66,\n      -124,\n      0,\n      0,\n      66,\n      -122,\n      0,\n      0,\n      66,\n      -118,\n      0,\n      0,\n      66,\n      -116,\n      0,\n      0,\n      66,\n      -114,\n      0,\n      0,\n      66,\n      -112,\n      0,\n      0,\n      66,\n      -110,\n      0,\n      0,\n      66,\n      -106,\n      0,\n      0,\n      66,\n      -104,\n      0,\n      0,\n      66,\n      -102,\n      0,\n      0,\n      66,\n      -100,\n      0,\n      0,\n      66,\n      -98,\n      0,\n      0,\n      66,\n      -94,\n      0,\n      0,\n      66,\n      -92,\n      0,\n      0,\n      66,\n      -90,\n      0,\n      0,\n      66,\n      -88,\n      0,\n      0,\n      66,\n      -84,\n      0,\n      0,\n      66,\n      -82,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0,\n      66,\n      -80,\n      0,\n      0\n    ],\n    \"compressed\": true,\n    \"refCount\": [\n      0,\n      30,\n      1105,\n      278589342,\n      351115866,\n      228275584,\n      149050893,\n      529489237,\n      361595630,\n      235719363,\n      405823962,\n      329851333,\n      647509378,\n      236608032,\n      149795073,\n      292908125,\n      351067413,\n      120032065,\n      860972706,\n      438142281,\n      177851411,\n      34379842,\n      174576285,\n      175596032,\n      118367711,\n      176575356,\n      389059008,\n      153668093,\n      357743235,\n      61044007,\n      350166645,\n      63992188,\n      32383813,\n      183400379,\n      66709002,\n      41897063,\n      177675884,\n      37154258,\n      37845590,\n      183880780,\n      176608001,\n      148810020,\n      174668513,\n      60125002,\n      146962044,\n      202286688,\n      205355169,\n      289126587,\n      268099108,\n      177444879,\n      178391623,\n      92597318,\n      232820762,\n      234886017,\n      322463930,\n      146963935,\n      58274175,\n      157123657,\n      87800030,\n      59318907,\n      174702243,\n      85177337,\n      175591164,\n      90659910,\n      118428062,\n      120242400,\n      173833437,\n      202317473,\n      90778047,\n      292937137,\n      176634874,\n      116515703,\n      117320990,\n      62919619,\n      61046694,\n      87887422,\n      61910691,\n      206932318,\n      31460354,\n      30542568,\n      59258207,\n      173863254,\n      30569443,\n      117440061,\n      58276064,\n      88689825,\n      116542362,\n      343224220,\n      152477257,\n      312332752,\n      30599393,\n      115508482,\n      30508931,\n      145947104,\n      60125062,\n      206162496,\n      207914367,\n      260647353,\n      204400952,\n      147976862,\n      285583350,\n      173690367,\n      179486903,\n      605058218,\n      33372814,\n      62870609,\n      174637823,\n      30569634,\n      116454044,\n      201451611,\n      173682588,\n      116517531,\n      60209729,\n      86901433,\n      60208639,\n      59198658,\n      121043814,\n      867285488,\n      887503680,\n      92593080,\n      202467455,\n      148813893,\n      206097275,\n      153372908,\n      87858534,\n      121073824,\n      179373757,\n      180273156,\n      148991646,\n      34351943,\n      64745643,\n      150687746,\n      207024577,\n      121134270,\n      61107268,\n      33399713,\n      147056286,\n      175593088,\n      263327646,\n      119226532,\n      97212873,\n      116487807,\n      147917189,\n      205202554,\n      89674850,\n      231841344,\n      145084219,\n      60120223,\n      118302203,\n      115624763,\n      88810014,\n      119286206,\n      149166483,\n      61044678,\n      137936241,\n      148808073,\n      212565639,\n      147943168,\n      146073027,\n      121044898,\n      33315052,\n      92412865,\n      87769186,\n      59195743,\n      58243427,\n      147855718,\n      290196367,\n      120180963,\n      204254916,\n      208781255,\n      147084092,\n      180182825,\n      179316094,\n      235741243,\n      205175581,\n      5028,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0,\n      0\n    ],\n    \"directLocationMap\": false,\n    \"locationList\": [\n      0,\n      1229,\n      1035,\n      7569420,\n      12111816,\n      16654209,\n      31795519,\n      36337912,\n      51470605,\n      56021615,\n      60564008,\n      71162921,\n      75705318,\n      80247711,\n      84790104,\n      89332497,\n      93874890,\n      98417283,\n      102959676,\n      107502069,\n      112044462,\n      116586855,\n      121129248,\n      125671641,\n      130214034,\n      134756427,\n      139298820,\n      143841213,\n      148383606,\n      152925999,\n      157468392,\n      162010785,\n      177152088,\n      181694488,\n      1859504281,\n      201378184,\n      205920584,\n      211303067,\n      225604280,\n      230146680,\n      245279373,\n      593258683,\n      254373163,\n      258915169,\n      263457562,\n      267999955,\n      272542348,\n      277084741,\n      281627134,\n      286169527,\n      290711920,\n      295254313,\n      299796706,\n      304339099,\n      308881492,\n      313423885,\n      317966278,\n      322508671,\n      327051064,\n      331593457,\n      336135850,\n      340678622,\n      345220636,\n      355814629,\n      360361946,\n      364904339,\n      372474994,\n      377017387,\n      381564700,\n      390644566,\n      395186959,\n      399729352,\n      404271745,\n      408814138,\n      413356531,\n      417898924,\n      422441317,\n      426983710,\n      431526103,\n      436068496,\n      440610889,\n      445153282,\n      449695675,\n      454238068,\n      459469261,\n      463322854,\n      467865247,\n      472407640,\n      1600448023,\n      481493334,\n      486034819,\n      490577212,\n      495119605,\n      499661998,\n      504204391,\n      508746784,\n      513289177,\n      517831570,\n      522373963,\n      526916356,\n      531458749,\n      536001142,\n      540543535,\n      545085928,\n      549628321,\n      554170714,\n      1826523307,\n      563255500,\n      567797893,\n      572340286,\n      1474032379,\n      581425072,\n      1537581565,\n      590509858,\n      596566381,\n      601108775,\n      605651168,\n      610193561,\n      614735954,\n      619278347,\n      623820740,\n      628363133,\n      632905526,\n      637447919,\n      641990312,\n      646532705,\n      651075098,\n      655617491,\n      660159884,\n      666216408,\n      670758801,\n      678329454,\n      682871849,\n      688927142,\n      693470766,\n      698013159,\n      702555552,\n      707097945,\n      711640338,\n      716182731,\n      720725124,\n      725267517,\n      729809910,\n      734352303,\n      738894696,\n      743437089,\n      747979482,\n      752521875,\n      757064268,\n      761606661,\n      766149054,\n      770691447,\n      775233840,\n      779961963,\n      784318626,\n      1394021019,\n      793403468,\n      797945805,\n      802488198,\n      1253339784,\n      816115542,\n      1519617570,\n      829742787,\n      834284949,\n      840341472,\n      844883866,\n      860016559,\n      856997112,\n      863053437,\n      867595831,\n      872138224,\n      878194747,\n      882737141,\n      890307794,\n      908477361,\n      914533891,\n      919076285,\n      935723108,\n      940274119,\n      944816512,\n      951030475,\n      956939399,\n      961471953,\n      970556793,\n      1397588142,\n      981155656,\n      985698049,\n      990240442,\n      994782835,\n      999325228,\n      1003867621,\n      1009924144,\n      1014466538,\n      1019008931,\n      1023551324,\n      1028093717,\n      1032636110,\n      1037178503,\n      1041720896,\n      1046263289,\n      1052319812,\n      1056862206,\n      1061404599,\n      1067461123,\n      1072003516,\n      1076545909,\n      1081088302,\n      1085630695,\n      1090173204,\n      1094715481,\n      1099257874,\n      1105599757,\n      1109856791,\n      1117426214,\n      1123484050,\n      1476482903,\n      1132568756,\n      1137111149,\n      1141653542,\n      1148052006,\n      1152252459,\n      1158308983,\n      1162851376,\n      1167393769,\n      1171936162,\n      1177991455,\n      1182535079,\n      1187077472,\n      1193132765,\n      1568336889,\n      1202218782,\n      1206761175,\n      1211303568,\n      1215845961,\n      1220388354,\n      1240063438,\n      1247642712,\n      1252185105,\n      1258241629,\n      1264296922,\n      1268840546,\n      1273382939,\n      1277925332,\n      1282467725,\n      1287010118,\n      1291552511,\n      1296094904,\n      1300637297,\n      1306692590,\n      1311236214,\n      1315778607,\n      1320321000,\n      1324863393,\n      1329405786,\n      1333948179,\n      1338490572,\n      1343032965,\n      1347575358,\n      1352117751,\n      1356660144,\n      1361202537,\n      1365744930,\n      1370287323,\n      1374829716,\n      1379372109,\n      1383914502,\n      1388456895,\n      1392999288,\n      1400569942,\n      1405112336,\n      1409654729,\n      1414197122,\n      1418739515,\n      1423281908,\n      1430850101,\n      1435394956,\n      1439937349,\n      1458106919,\n      1473239614,\n      1480818886,\n      1495960189,\n      1500502589,\n      1505044982,\n      1512615637,\n      1517158030,\n      1523214553,\n      1527756947,\n      1532299468,\n      1536841733,\n      1542898257,\n      1547440650,\n      1551983043,\n      1556525436,\n      1561067829,\n      1565610222,\n      1573180877,\n      1577723270,\n      1582265663,\n      1586808056,\n      1591350449,\n      1595892842,\n      1601948135,\n      1606491759,\n      1618601112,\n      1623147200,\n      1627689593,\n      1632231986,\n      1650392939,\n      1659486344,\n      1664028737,\n      1668571130,\n      1677654685,\n      1685226571,\n      1689768964,\n      1694311404,\n      1698853750,\n      1703396143,\n      1707938536,\n      1712480929,\n      1718646922,\n      1724592746,\n      1730649270,\n      1736707024,\n      1741249418,\n      1745791811,\n      1750334204,\n      1754876597,\n      1759418990,\n      1766989645,\n      1771532038,\n      1776074431,\n      1780616824,\n      1785159217,\n      1789701610,\n      1794244003,\n      1798786396,\n      1803328789,\n      1807871182,\n      1812413575,\n      1816955968,\n      1821498361,\n      1826040754,\n      1832097278,\n      1836639671,\n      1841182064,\n      1845724457,\n      1854805550,\n      1859351636\n    ],\n    \"reverseAvailable\": false,\n    \"internalShinglingEnabled\": true,\n    \"internalShingle\": [\n      81.0,\n      82.0,\n      83.0,\n      84.0,\n      86.0,\n      87.0,\n      88.0,\n      88.0\n    ],\n    \"lastTimeStamp\": 1502,\n    \"rotationEnabled\": false,\n    \"dynamicResizingEnabled\": true,\n    \"currentStoreCapacity\": 512,\n    \"indexCapacity\": 1105,\n    \"duplicateRefs\": [\n      0,\n      695,\n      44,\n      3897600,\n      24716394,\n      42153264,\n      110513029,\n      50402328,\n      2086095,\n      241742728,\n      3738950,\n      283887772,\n      17368684,\n      334297841,\n      40203129,\n      335750396,\n      7264855,\n      23663\n    ]\n  },\n  \"compactSamplerStates\": [\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.726248,\n        -1.7356311,\n        -1.7449484,\n        -1.7510741,\n        -1.7539912,\n        -1.7473811,\n        -1.8241102,\n        -1.7670848,\n        -1.7573545,\n        -1.7978079,\n        -1.7825805,\n        -1.7723746,\n        -1.8323271,\n        -1.8414215,\n        -1.8303815,\n        -1.792648,\n        -1.9015952,\n        -1.8188479,\n        -1.9806261,\n        -1.8121938,\n        -1.8129233,\n        -1.8650898,\n        -1.8366604,\n        -1.8418276,\n        -1.999935,\n        -2.032135,\n        -1.8586072,\n        -1.8423811,\n        -1.9055475,\n        -1.8451024,\n        -1.9508317,\n        -1.8134956,\n        -2.2847633,\n        -1.9073308,\n        -2.0071895,\n        -1.8895093,\n        -1.956416,\n        -1.9973236,\n        -2.0586295,\n        -1.8672371,\n        -2.0988955,\n        -2.0428073,\n        -2.011875,\n        -1.8849101,\n        -1.8685876,\n        -2.1907096,\n        -1.9462093,\n        -1.9988756,\n        -2.299218,\n        -2.0017252,\n        -2.0773284,\n        -2.4523854,\n        -2.0663824,\n        -2.023498,\n        -2.0612655,\n        -2.042152,\n        -2.069748,\n        -1.9578686,\n        -1.9781349,\n        -1.8621529,\n        -1.9471117,\n        -2.0265074,\n        -2.2279465,\n        -1.8210775,\n        -1.9795929,\n        -2.5953238,\n        -2.390732,\n        -3.1138978,\n        -2.2910502,\n        -2.0865312,\n        -2.5766997,\n        -2.0562227,\n        -1.9138722,\n        -2.0392447,\n        -2.026909,\n        -2.2205603,\n        -2.091928,\n        -2.1294122,\n        -2.2178159,\n        -2.4366877,\n        -1.8943694,\n        -2.101471,\n        -2.9819884,\n        -2.290241,\n        -2.763693,\n        -2.3244293,\n        -2.0386095,\n        -1.9151877,\n        -2.83677,\n        -1.8807462,\n        -1.9139498,\n        -2.685066,\n        -2.4755366,\n        -2.173793,\n        -2.3330193,\n        -2.6528668,\n        -2.5381188,\n        -2.371536,\n        -2.4438212,\n        -2.0802977,\n        -2.0486376,\n        -2.2020848,\n        -2.5800166,\n        -2.7924101,\n        -2.8116932,\n        -2.2266011,\n        -3.0653925,\n        -2.0397663,\n        -3.11812,\n        -2.2300634,\n        -2.2714894,\n        -2.946862,\n        -2.7537735,\n        -3.161233,\n        -2.7489417,\n        -2.331509,\n        -2.2418895,\n        -2.1874921,\n        -2.0681648,\n        -2.03847,\n        -2.2435958,\n        -2.573769,\n        -2.7047837,\n        -2.9036868,\n        -3.026818,\n        -2.4197178,\n        -2.6823406,\n        -1.8570595,\n        -2.4379678,\n        -5.5215054,\n        -2.5416145,\n        -4.6656966,\n        -3.5039902,\n        -3.2943099,\n        -6.0341554,\n        -3.254804,\n        -3.7077918,\n        -2.739763,\n        -2.9579237,\n        -2.1496873,\n        -3.1787102,\n        -4.4074664,\n        -4.4549766,\n        -2.0833125,\n        -2.603608,\n        -2.2164037,\n        -2.7361348,\n        -2.1582174,\n        -3.0311465,\n        -2.3569057,\n        -3.3770554,\n        -4.742624,\n        -2.7280037,\n        -3.7714725,\n        -2.1618302,\n        -3.2844663,\n        -2.8226726,\n        -2.964899,\n        -2.6862307,\n        -2.9615252,\n        -3.0366094,\n        -2.017697,\n        -2.7177863,\n        -2.8497036,\n        -2.2568204,\n        -3.1616893,\n        -3.207093,\n        -2.3343463,\n        -3.028608,\n        -3.143178,\n        -5.623062,\n        -4.608758,\n        -3.986712,\n        -2.2927513,\n        -3.102124,\n        -2.5337071,\n        -1.9950272,\n        -3.897555,\n        -3.3334637,\n        -5.482882,\n        -3.3254464,\n        -2.1957855,\n        -2.2119808,\n        -2.6956422,\n        -3.8892157,\n        -5.5334697,\n        -2.6207926,\n        -2.2275114,\n        -8.324413,\n        -2.5119126,\n        -4.330357,\n        -3.1112654,\n        -2.7946742,\n        -7.297978,\n        -3.5470343,\n        -3.7771158,\n        -2.9075985,\n        -3.7523994,\n        -3.228332,\n        -3.2314463,\n        -2.6365817,\n        -3.0304646,\n        -3.4130404,\n        -2.3348067,\n        -7.749205,\n        -2.958699,\n        -4.405752,\n        -6.593999,\n        -3.897356,\n        -3.2318664,\n        -4.5506983,\n        -2.412623,\n        -2.7457352,\n        -3.1259267,\n        -4.2919693,\n        -2.3542695,\n        -2.9822729,\n        -3.1331265,\n        -5.3454256,\n        -5.604323,\n        -2.8822865,\n        -2.706761,\n        -2.957169,\n        -3.433357,\n        -3.4868684,\n        -3.9239047,\n        -2.89275,\n        -4.7114544,\n        -3.7985191,\n        -4.4912863,\n        -3.0240886,\n        -2.6035159,\n        -2.5994782,\n        -3.1590285,\n        -2.9561524,\n        -2.869075,\n        -2.4873657,\n        -4.53508,\n        -2.073862,\n        -2.8812068,\n        -2.063298,\n        -2.9101927,\n        -2.3357852,\n        -4.731912,\n        -5.609206,\n        -3.0264845,\n        -3.423773,\n        -4.0586,\n        -3.8452144,\n        -4.7615886,\n        -4.6334796,\n        -3.06386,\n        -3.0389128,\n        -5.4126983,\n        -3.9077544,\n        -2.7038245\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        256,\n        397456478,\n        214846029,\n        110866961,\n        212465124,\n        695229024,\n        815817630,\n        771788473,\n        263909741,\n        283686995,\n        397779630,\n        935678783,\n        827059140,\n        734743071,\n        1072286907,\n        1057549205,\n        1047782471,\n        102038452,\n        51219701,\n        606710213,\n        949330085,\n        101215925,\n        127901944,\n        110861471,\n        51646989,\n        733846016,\n        101714706,\n        102119372,\n        8618248,\n        101744357,\n        130218449,\n        531093095,\n        734605467,\n        177505127,\n        675225791,\n        764895436,\n        798523936,\n        287503034,\n        554113481,\n        273357881,\n        264224,\n        497012656,\n        627639407,\n        943555316,\n        851625071,\n        542937296,\n        852597411,\n        936944648,\n        138489344,\n        37456332,\n        734512748,\n        118316393,\n        46507635,\n        101275415,\n        101385401,\n        101234607,\n        583731260,\n        156824879,\n        547783552,\n        1067043140,\n        733899671,\n        17958068,\n        57387459,\n        101328252,\n        149154706,\n        227012240,\n        971327176,\n        433651570,\n        24490785,\n        264119134,\n        101228425,\n        694621196,\n        455309207,\n        305722871,\n        665218317,\n        535731840,\n        328078255,\n        27798641,\n        359011423,\n        93027933,\n        1045200399,\n        712600122,\n        667222745,\n        971894136,\n        529415657,\n        93423453,\n        1031\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -3710461305226654598\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6987427,\n        -1.6988138,\n        -1.7114749,\n        -1.6990972,\n        -1.7235111,\n        -1.7235203,\n        -1.7158189,\n        -1.7037977,\n        -1.7455696,\n        -1.7451863,\n        -1.7315372,\n        -1.7678101,\n        -1.7410578,\n        -1.7423337,\n        -1.7279032,\n        -1.7055162,\n        -1.7699987,\n        -1.7759233,\n        -1.7575607,\n        -1.7969104,\n        -1.7797482,\n        -1.7767636,\n        -1.8502752,\n        -1.8698097,\n        -2.1332664,\n        -1.802178,\n        -1.7759057,\n        -1.84717,\n        -1.791979,\n        -1.880251,\n        -2.137471,\n        -1.7164084,\n        -1.8215646,\n        -1.8540864,\n        -1.8427064,\n        -1.7827498,\n        -1.8662268,\n        -2.0459082,\n        -1.8306061,\n        -2.025883,\n        -1.8439078,\n        -1.8316021,\n        -1.8311063,\n        -1.9857459,\n        -1.7843397,\n        -1.9243836,\n        -1.9946369,\n        -1.9436802,\n        -1.9301946,\n        -2.361733,\n        -2.1552134,\n        -1.9145958,\n        -1.8832469,\n        -1.9697897,\n        -2.5311952,\n        -1.8784039,\n        -2.0148246,\n        -1.8317643,\n        -1.9458715,\n        -2.2802289,\n        -2.193603,\n        -2.2077315,\n        -2.1669204,\n        -1.7186235,\n        -2.1428325,\n        -2.2886713,\n        -2.6043172,\n        -1.8573753,\n        -2.1323564,\n        -2.2403038,\n        -2.0174465,\n        -1.9318246,\n        -1.9016538,\n        -1.9055052,\n        -2.0054536,\n        -2.4183135,\n        -2.0512388,\n        -2.1760705,\n        -2.0273197,\n        -2.271114,\n        -2.0847688,\n        -1.9064558,\n        -1.9554824,\n        -2.1253724,\n        -1.98131,\n        -2.3002048,\n        -2.094602,\n        -2.3036559,\n        -2.5028017,\n        -2.6302726,\n        -1.8619525,\n        -1.938369,\n        -1.9711773,\n        -3.1919992,\n        -2.157826,\n        -2.8051956,\n        -2.0006428,\n        -2.3635347,\n        -3.124144,\n        -3.1220577,\n        -2.376385,\n        -2.287251,\n        -2.3049092,\n        -2.07427,\n        -2.163283,\n        -3.0154366,\n        -3.9707642,\n        -2.0178869,\n        -2.125331,\n        -2.7371986,\n        -2.8076415,\n        -1.978846,\n        -2.6553276,\n        -2.7560446,\n        -2.1825616,\n        -2.060888,\n        -1.8457292,\n        -2.053117,\n        -2.0136225,\n        -2.3343344,\n        -2.3349183,\n        -2.4195023,\n        -2.6442425,\n        -2.4659474,\n        -2.6804495,\n        -2.338013,\n        -2.3729303,\n        -1.7446113,\n        -2.5846603,\n        -2.5249848,\n        -2.5094528,\n        -4.942845,\n        -2.7058222,\n        -3.2194257,\n        -5.138144,\n        -2.381514,\n        -3.5102649,\n        -6.12156,\n        -9.068111,\n        -3.8582501,\n        -6.4702754,\n        -3.025642,\n        -2.0585368,\n        -1.968088,\n        -3.1419234,\n        -1.9947373,\n        -2.905421,\n        -2.1778913,\n        -2.9628203,\n        -2.008788,\n        -2.3595178,\n        -2.4967823,\n        -7.301354,\n        -2.5906043,\n        -2.6280746,\n        -3.429913,\n        -3.6139886,\n        -2.5194135,\n        -2.2956996,\n        -2.4574964,\n        -7.172427,\n        -2.5825264,\n        -2.628515,\n        -2.089512,\n        -3.059124,\n        -2.3417923,\n        -1.9770174,\n        -3.038394,\n        -2.370252,\n        -2.8336518,\n        -2.5989141,\n        -5.2617593,\n        -7.253482,\n        -3.0367568,\n        -3.5020788,\n        -3.5063367,\n        -2.5984735,\n        -3.880897,\n        -2.5269272,\n        -3.3172085,\n        -2.725622,\n        -3.4208205,\n        -2.113782,\n        -3.9765637,\n        -2.0370877,\n        -2.4379008,\n        -2.5530434,\n        -3.2248905,\n        -4.210036,\n        -3.2597442,\n        -2.683376,\n        -3.125675,\n        -2.848652,\n        -2.3545458,\n        -3.1987576,\n        -3.697043,\n        -2.5002239,\n        -3.8537326,\n        -4.3223076,\n        -4.366369,\n        -3.2022114,\n        -4.1086054,\n        -3.382559,\n        -4.4218183,\n        -2.5717256,\n        -3.1717417,\n        -3.6525395,\n        -2.8365328,\n        -3.9841425,\n        -3.2895095,\n        -3.5932245,\n        -4.3293204,\n        -4.3896503,\n        -5.628051,\n        -4.3096967,\n        -3.7508967,\n        -3.6594143,\n        -3.3258815,\n        -3.5776339,\n        -5.7001376,\n        -3.542185,\n        -3.0637264,\n        -2.8357048,\n        -2.0344348,\n        -2.1466126,\n        -2.8275092,\n        -8.12309,\n        -3.0167618,\n        -3.660664,\n        -3.2416954,\n        -2.6405907,\n        -2.0970125,\n        -4.1815867,\n        -4.8249936,\n        -2.958169,\n        -3.3893027,\n        -11.295491,\n        -4.4177246,\n        -3.719296,\n        -2.8651376,\n        -3.0947475,\n        -3.5775409,\n        -3.7627654,\n        -2.813284,\n        -3.600927,\n        -3.3611822,\n        -3.1788363,\n        -2.55794,\n        -2.8159835,\n        -3.2947376,\n        -3.492983,\n        -2.9479876,\n        -2.82707,\n        -3.076763,\n        -3.665544,\n        -2.1067598\n      ],\n      \"pointIndex\": [\n        1,\n        1032,\n        256,\n        689353285,\n        358062710,\n        707465627,\n        928798311,\n        751922518,\n        735611480,\n        1058665567,\n        265321217,\n        269679651,\n        735792016,\n        100209954,\n        630601194,\n        16546650,\n        877686192,\n        2227350,\n        761466660,\n        548083317,\n        338882010,\n        735315837,\n        868603257,\n        100421950,\n        697474313,\n        100267563,\n        954840429,\n        753552253,\n        25036277,\n        171566342,\n        1064449270,\n        951461745,\n        76364998,\n        61220426,\n        1213400,\n        91618020,\n        204165751,\n        2335086,\n        595445634,\n        320319120,\n        765399906,\n        8414288,\n        468060394,\n        540322388,\n        1069996826,\n        378348830,\n        735579022,\n        109958692,\n        891522308,\n        100821343,\n        110160730,\n        101025242,\n        52293162,\n        231117526,\n        16804159,\n        114844563,\n        494015502,\n        243988449,\n        222627313,\n        735579234,\n        38781835,\n        759759526,\n        841227313,\n        131069421,\n        633475750,\n        477843141,\n        802167502,\n        44889059,\n        735338391,\n        16900265,\n        734973354,\n        224266811,\n        318602826,\n        35337,\n        1224263,\n        1090944909,\n        56727070,\n        530697219,\n        953531538,\n        799480296,\n        329471125,\n        132894792,\n        735474415,\n        865480170,\n        697060796,\n        813409491,\n        807276385,\n        987040578,\n        1031\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -1832695825309728817\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7263805,\n        -1.7375551,\n        -1.7313173,\n        -1.7518847,\n        -1.7382256,\n        -1.8148856,\n        -1.7497174,\n        -1.7751008,\n        -1.7682782,\n        -1.7795463,\n        -1.7421398,\n        -1.8266472,\n        -1.8711087,\n        -1.8068098,\n        -1.7756518,\n        -1.7882441,\n        -1.801252,\n        -1.8973166,\n        -1.7912936,\n        -1.874118,\n        -1.838997,\n        -1.901438,\n        -1.8849925,\n        -1.904245,\n        -1.9089594,\n        -1.8950796,\n        -1.8757346,\n        -1.815007,\n        -1.8165854,\n        -1.8207376,\n        -1.834664,\n        -1.7970071,\n        -2.0420365,\n        -1.8186653,\n        -1.9001725,\n        -1.9416976,\n        -1.925906,\n        -1.7918729,\n        -2.0645015,\n        -1.9327296,\n        -1.9502766,\n        -1.8531502,\n        -1.8582855,\n        -2.2072883,\n        -2.0137906,\n        -1.9007939,\n        -1.938968,\n        -1.9130193,\n        -2.0598137,\n        -2.2761853,\n        -2.0687282,\n        -2.3987145,\n        -1.9022133,\n        -1.923783,\n        -1.940225,\n        -1.9351351,\n        -2.2480874,\n        -2.0033512,\n        -1.9436657,\n        -2.217973,\n        -1.9852718,\n        -1.9654913,\n        -2.1221738,\n        -1.8002614,\n        -2.0962842,\n        -3.2507203,\n        -2.6174097,\n        -1.9439516,\n        -1.9302056,\n        -2.4891465,\n        -2.0648394,\n        -2.0536103,\n        -2.4781923,\n        -2.57167,\n        -2.2196703,\n        -2.1147714,\n        -1.8358169,\n        -2.47737,\n        -2.2866445,\n        -2.5095856,\n        -3.876913,\n        -2.0000772,\n        -1.9559746,\n        -1.8986975,\n        -2.2375166,\n        -2.2407198,\n        -2.155431,\n        -2.3410838,\n        -2.3333297,\n        -2.352641,\n        -2.094627,\n        -2.0666466,\n        -1.9877744,\n        -2.3980875,\n        -2.3612285,\n        -2.3800156,\n        -2.0489552,\n        -2.1511087,\n        -2.2666194,\n        -2.2914808,\n        -2.493814,\n        -2.131418,\n        -2.1959467,\n        -2.7689898,\n        -2.4005754,\n        -2.4278355,\n        -2.180492,\n        -2.7206013,\n        -2.0130916,\n        -2.5957193,\n        -2.2647026,\n        -2.1309671,\n        -2.3937173,\n        -2.808019,\n        -2.796604,\n        -2.5641506,\n        -2.9082766,\n        -2.1960948,\n        -2.0550525,\n        -3.4902134,\n        -2.2626262,\n        -2.0546598,\n        -2.2353933,\n        -2.2925892,\n        -1.9780589,\n        -2.676092,\n        -2.3579953,\n        -1.8052179,\n        -4.061683,\n        -2.124273,\n        -2.243488,\n        -6.8029437,\n        -3.2897651,\n        -4.5654554,\n        -3.5403163,\n        -3.3789957,\n        -2.0621793,\n        -2.7556863,\n        -2.275963,\n        -3.3505375,\n        -3.1508522,\n        -2.7905304,\n        -3.0283778,\n        -4.7304935,\n        -2.2691193,\n        -2.6516175,\n        -5.4103274,\n        -3.0850492,\n        -3.808751,\n        -2.2798278,\n        -2.6754081,\n        -3.649609,\n        -3.43447,\n        -3.3962407,\n        -2.297451,\n        -3.8745487,\n        -2.7456691,\n        -4.346253,\n        -2.8765647,\n        -2.6176047,\n        -3.0547519,\n        -4.024011,\n        -4.148545,\n        -3.075434,\n        -2.3377926,\n        -1.989783,\n        -2.1525402,\n        -2.4560719,\n        -2.035318,\n        -4.531904,\n        -2.4122272,\n        -8.901809,\n        -4.3958845,\n        -4.914483,\n        -4.5451665,\n        -2.666528,\n        -3.0422502,\n        -4.876383,\n        -3.6195297,\n        -2.6072137,\n        -3.276356,\n        -3.1958067,\n        -2.519129,\n        -2.3006656,\n        -2.9947581,\n        -2.6409235,\n        -3.1454105,\n        -2.9574306,\n        -5.6988854,\n        -2.8363302,\n        -6.363191,\n        -4.958148,\n        -2.904896,\n        -3.4956331,\n        -2.8139682,\n        -4.6646104,\n        -2.503981,\n        -2.3116431,\n        -3.7402675,\n        -2.9581175,\n        -2.3116732,\n        -2.837012,\n        -4.2062826,\n        -4.785396,\n        -2.1955554,\n        -2.675852,\n        -2.9000273,\n        -3.276603,\n        -3.3937223,\n        -2.9771085,\n        -3.0301015,\n        -3.150276,\n        -3.587858,\n        -2.2167044,\n        -3.2472196,\n        -2.772113,\n        -3.528512,\n        -2.1612778,\n        -3.5337818,\n        -3.165023,\n        -3.3809435,\n        -2.864679,\n        -2.4150205,\n        -2.3029518,\n        -3.820228,\n        -3.9436743,\n        -2.6318831,\n        -2.8081706,\n        -5.3189373,\n        -3.8409364,\n        -3.647043,\n        -6.631731,\n        -4.7184677,\n        -3.422374,\n        -3.1660762,\n        -2.8524973,\n        -2.216437,\n        -3.2280953,\n        -2.7843888,\n        -5.808835,\n        -3.6174486,\n        -2.4131265,\n        -3.79252,\n        -2.1694925,\n        -2.478608,\n        -2.7515159,\n        -7.3641315,\n        -2.8473504,\n        -3.9948292,\n        -4.4298363,\n        -2.7575362,\n        -4.147462,\n        -3.0440564,\n        -2.7988882,\n        -3.5733883,\n        -3.9266362\n      ],\n      \"pointIndex\": [\n        0,\n        1033,\n        256,\n        731349338,\n        14613626,\n        738787189,\n        208612697,\n        872529539,\n        362840922,\n        736194611,\n        85092925,\n        184539319,\n        649673555,\n        689559781,\n        935619044,\n        111255393,\n        386978073,\n        282355853,\n        147543623,\n        217862,\n        663971652,\n        329853833,\n        293228994,\n        739615088,\n        1009879862,\n        376859430,\n        63225070,\n        143365238,\n        84767336,\n        101898424,\n        1053135895,\n        322310899,\n        229560907,\n        733539971,\n        1061815033,\n        152980298,\n        738877174,\n        220340307,\n        31223905,\n        800126435,\n        925351849,\n        111834680,\n        477520247,\n        62617567,\n        854970829,\n        739847432,\n        837248069,\n        68780741,\n        444068599,\n        954926931,\n        476489926,\n        101668129,\n        111595036,\n        1093735905,\n        882851311,\n        113927368,\n        738928460,\n        48728107,\n        111969491,\n        93189843,\n        250157976,\n        946210476,\n        903589128,\n        886945811,\n        153603969,\n        465215838,\n        21531192,\n        281552136,\n        636349537,\n        193607220,\n        81339610,\n        476765175,\n        216249893,\n        50438177,\n        626619167,\n        565743034,\n        931858666,\n        1050203583,\n        426998869,\n        650137156,\n        738984040,\n        1022887014,\n        14415768,\n        31276962,\n        702039505,\n        739501876,\n        895804636,\n        490455854,\n        1033\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 7114851682173006812\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7213378,\n        -1.7361705,\n        -1.7221712,\n        -1.7483038,\n        -1.7481142,\n        -1.734512,\n        -1.7354586,\n        -1.7692962,\n        -1.7863561,\n        -1.7501098,\n        -1.8153974,\n        -1.7605233,\n        -1.7958629,\n        -1.7541641,\n        -1.7527289,\n        -1.8128883,\n        -1.7716357,\n        -1.8419961,\n        -1.798154,\n        -1.8803853,\n        -1.8073257,\n        -1.8491515,\n        -1.8381054,\n        -1.7999474,\n        -1.834759,\n        -1.8552243,\n        -1.9752426,\n        -1.8100715,\n        -1.8214978,\n        -1.815661,\n        -1.7622541,\n        -1.8608704,\n        -1.937013,\n        -1.8063078,\n        -2.097493,\n        -2.0325112,\n        -2.090001,\n        -2.2030551,\n        -1.880542,\n        -1.8989832,\n        -1.9169465,\n        -2.0454733,\n        -2.3608801,\n        -2.0398042,\n        -1.9210875,\n        -1.9601887,\n        -2.1688564,\n        -1.8921643,\n        -2.1079338,\n        -1.8846809,\n        -1.9053581,\n        -1.8609388,\n        -1.861419,\n        -1.9769111,\n        -2.0520697,\n        -2.0500374,\n        -2.1605208,\n        -1.8724028,\n        -2.4285762,\n        -1.9565207,\n        -2.0551217,\n        -2.1283052,\n        -1.780987,\n        -2.0475042,\n        -1.8611298,\n        -2.0052965,\n        -2.480902,\n        -2.116227,\n        -3.3740113,\n        -2.248864,\n        -2.608668,\n        -2.3502972,\n        -2.0546894,\n        -2.1364262,\n        -2.493374,\n        -2.5947201,\n        -2.2172499,\n        -1.9032508,\n        -2.2693121,\n        -2.1549084,\n        -2.317325,\n        -2.1600435,\n        -2.6500542,\n        -2.3052373,\n        -2.0892093,\n        -2.7369802,\n        -2.6205173,\n        -2.3193016,\n        -2.1746347,\n        -2.361723,\n        -2.1305943,\n        -2.4848862,\n        -2.1849778,\n        -2.4694436,\n        -2.3367288,\n        -2.1838152,\n        -2.0030391,\n        -3.9612434,\n        -2.7162955,\n        -2.4098318,\n        -2.184592,\n        -2.4032464,\n        -1.9063706,\n        -2.3570921,\n        -1.9482576,\n        -1.9491566,\n        -1.8808714,\n        -2.0300531,\n        -2.1099784,\n        -2.2152898,\n        -2.096946,\n        -2.1582355,\n        -2.911972,\n        -2.8457968,\n        -2.1733398,\n        -2.0358014,\n        -1.8754802,\n        -2.7525644,\n        -2.5826657,\n        -2.1905253,\n        -2.1240547,\n        -2.1860065,\n        -2.3271196,\n        -2.3023238,\n        -2.2108738,\n        -2.3863454,\n        -1.9728581,\n        -2.0932593,\n        -2.722052,\n        -2.7966716,\n        -3.0517302,\n        -2.1796262,\n        -2.0828886,\n        -2.562471,\n        -3.213032,\n        -4.5760326,\n        -2.4981914,\n        -3.4045708,\n        -5.9084907,\n        -2.377274,\n        -2.4985743,\n        -3.1099327,\n        -2.7495134,\n        -2.5695348,\n        -3.187431,\n        -2.2065935,\n        -2.4225142,\n        -4.160821,\n        -2.4481084,\n        -4.11453,\n        -5.694333,\n        -2.6227708,\n        -4.109512,\n        -4.6349754,\n        -3.980821,\n        -2.2555346,\n        -3.3175068,\n        -2.3780687,\n        -3.4288237,\n        -2.2603228,\n        -3.2764254,\n        -2.3525176,\n        -2.799586,\n        -2.3389485,\n        -3.0396752,\n        -3.6468666,\n        -4.069407,\n        -4.4919386,\n        -2.6284664,\n        -2.120248,\n        -4.5610027,\n        -3.5115538,\n        -3.415773,\n        -2.7309248,\n        -3.5663652,\n        -5.1245418,\n        -3.8632941,\n        -2.4682183,\n        -4.012755,\n        -2.8533666,\n        -3.6104355,\n        -2.9533494,\n        -4.055925,\n        -4.165349,\n        -3.9857836,\n        -2.2085397,\n        -4.4871273,\n        -4.0958743,\n        -2.6484976,\n        -2.529068,\n        -3.3469458,\n        -2.4826763,\n        -2.2764714,\n        -3.134541,\n        -2.082051,\n        -9.624558,\n        -4.521997,\n        -3.217462,\n        -4.2082963,\n        -3.1595595,\n        -4.406593,\n        -5.5054197,\n        -5.135093,\n        -3.4735184,\n        -4.4578037,\n        -3.7940795,\n        -2.4316862,\n        -4.425623,\n        -2.9476283,\n        -2.2842023,\n        -3.7966342,\n        -2.10466,\n        -2.76057,\n        -2.1770823,\n        -2.3118787,\n        -3.3338983,\n        -2.4358966,\n        -2.508122,\n        -2.1342807,\n        -2.881158,\n        -2.2848332,\n        -3.1506746,\n        -3.3806746,\n        -2.2328131,\n        -2.6429768,\n        -4.078485,\n        -3.1872077,\n        -3.1687684,\n        -3.6513085,\n        -2.7235165,\n        -3.5960937,\n        -2.1952925,\n        -2.744159,\n        -2.8644168,\n        -5.7612257,\n        -3.5861068,\n        -4.144304,\n        -4.885823,\n        -2.9428275,\n        -2.2381268,\n        -4.2431483,\n        -2.7445326,\n        -2.2255795,\n        -2.9216354,\n        -3.0350933,\n        -3.3693304,\n        -2.9375076,\n        -4.2570996,\n        -2.4565692,\n        -3.0399098,\n        -2.8765717,\n        -3.1432006,\n        -2.9929404,\n        -2.4635708,\n        -3.6228664\n      ],\n      \"pointIndex\": [\n        0,\n        1033,\n        255,\n        102282497,\n        402100990,\n        263115829,\n        713115,\n        1040384128,\n        771063143,\n        768652959,\n        736670769,\n        152013919,\n        713599392,\n        627315414,\n        111936508,\n        102347984,\n        547455531,\n        737257605,\n        534676939,\n        846479071,\n        748973419,\n        209359415,\n        597045158,\n        39458129,\n        108697843,\n        111921523,\n        671538193,\n        32777895,\n        90930055,\n        102374417,\n        228443369,\n        737166609,\n        233174389,\n        737625902,\n        324540835,\n        1012123581,\n        815480795,\n        776409398,\n        547506257,\n        291907161,\n        102283229,\n        56760764,\n        244724518,\n        737177934,\n        1061307958,\n        167589652,\n        17325972,\n        689631081,\n        848289559,\n        324193176,\n        837187061,\n        626573700,\n        278319781,\n        164757709,\n        371895,\n        166495369,\n        637255259,\n        180330729,\n        9325741,\n        781197039,\n        879363921,\n        30034693,\n        93788208,\n        737511564,\n        605241038,\n        699330485,\n        294026927,\n        141260978,\n        152436549,\n        547490228,\n        735083165,\n        153743430,\n        306299780,\n        795663008,\n        269647623,\n        560345643,\n        164680794,\n        736927768,\n        325325629,\n        737264782,\n        300121328,\n        1001897653,\n        131039206,\n        71436045,\n        552214868,\n        25821548,\n        637943456,\n        1105481408\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -3818791177366136709\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7702423,\n        -1.7741617,\n        -1.7761874,\n        -1.8201277,\n        -1.7830086,\n        -1.7889271,\n        -1.7833676,\n        -1.8924297,\n        -1.874537,\n        -1.7873601,\n        -1.7973388,\n        -1.8079433,\n        -1.8390552,\n        -1.7958614,\n        -1.814892,\n        -1.9106158,\n        -1.9397742,\n        -1.9598393,\n        -2.1195445,\n        -1.8822867,\n        -1.8601811,\n        -1.8263239,\n        -1.8335419,\n        -1.8965925,\n        -1.8371737,\n        -1.8483274,\n        -1.9001577,\n        -1.8762997,\n        -1.8220085,\n        -2.0048501,\n        -1.819184,\n        -1.9703261,\n        -1.9413223,\n        -1.9491875,\n        -2.1189895,\n        -2.1726112,\n        -1.965673,\n        -2.5208008,\n        -2.2000847,\n        -1.9716797,\n        -1.8830699,\n        -1.8652872,\n        -1.898027,\n        -1.9595675,\n        -1.8423129,\n        -1.8456022,\n        -2.1123471,\n        -2.1530201,\n        -2.092249,\n        -1.9827334,\n        -1.8859129,\n        -2.0667198,\n        -1.9365962,\n        -1.9075276,\n        -1.9287502,\n        -1.9576122,\n        -1.8889908,\n        -1.9392532,\n        -1.9073569,\n        -3.0292616,\n        -2.0586112,\n        -2.083684,\n        -2.1338289,\n        -2.0773046,\n        -2.2736607,\n        -2.734866,\n        -2.3995154,\n        -2.7925901,\n        -2.0446103,\n        -2.3112795,\n        -2.466452,\n        -2.227907,\n        -2.4852831,\n        -2.9385371,\n        -2.014702,\n        -2.7009735,\n        -2.9879744,\n        -2.6110694,\n        -2.2149563,\n        -2.028647,\n        -2.2171886,\n        -2.1633236,\n        -1.9537677,\n        -3.0489364,\n        -2.1664395,\n        -1.9824026,\n        -2.9345503,\n        -2.3468847,\n        -2.2192733,\n        -2.2084255,\n        -2.6179307,\n        -3.1664317,\n        -1.9642246,\n        -2.3182428,\n        -2.3802795,\n        -2.184298,\n        -2.4220433,\n        -2.1763873,\n        -2.5785458,\n        -2.0459166,\n        -2.329075,\n        -2.488287,\n        -2.4209292,\n        -2.289788,\n        -2.432514,\n        -3.602149,\n        -2.2484367,\n        -2.1083975,\n        -1.9968246,\n        -2.41732,\n        -2.0026295,\n        -3.0169873,\n        -2.1224093,\n        -1.9760996,\n        -2.382224,\n        -2.0337443,\n        -2.035265,\n        -2.2563734,\n        -3.0550318,\n        -3.084549,\n        -3.300399,\n        -2.7731838,\n        -2.263299,\n        -2.2811584,\n        -2.718737,\n        -2.45838,\n        -2.9797404,\n        -5.3977304,\n        -2.3861601,\n        -2.578384,\n        -3.7122684,\n        -8.983812,\n        -2.9973774,\n        -4.0429688,\n        -4.6303134,\n        -3.2663412,\n        -3.2816062,\n        -2.323853,\n        -3.3423917,\n        -4.5679555,\n        -3.444484,\n        -3.4950154,\n        -4.2491784,\n        -5.39973,\n        -3.4007435,\n        -7.403896,\n        -2.6254303,\n        -3.2303808,\n        -3.6560638,\n        -4.3585124,\n        -2.105285,\n        -6.5697093,\n        -3.3331714,\n        -3.7785146,\n        -3.531168,\n        -5.574697,\n        -3.2225053,\n        -4.5398073,\n        -2.4795613,\n        -3.6858013,\n        -2.3718863,\n        -2.5371513,\n        -3.7150352,\n        -2.5630503,\n        -2.3375943,\n        -2.902592,\n        -2.5773351,\n        -3.599029,\n        -3.5350518,\n        -6.773025,\n        -3.0676215,\n        -3.2018552,\n        -2.5566518,\n        -3.6761866,\n        -4.0427117,\n        -2.5090013,\n        -2.543057,\n        -4.6302094,\n        -3.399911,\n        -3.6339245,\n        -6.434297,\n        -3.2186852,\n        -4.402264,\n        -4.718331,\n        -3.476106,\n        -2.2835002,\n        -4.386716,\n        -2.4162905,\n        -2.8879254,\n        -2.3906515,\n        -2.549378,\n        -3.7428405,\n        -3.4365635,\n        -4.513357,\n        -5.2633185,\n        -3.860512,\n        -2.5353842,\n        -3.062731,\n        -2.9334655,\n        -4.586098,\n        -2.68154,\n        -3.8288786,\n        -2.4388762,\n        -2.887588,\n        -2.8646297,\n        -3.2624207,\n        -3.6507297,\n        -2.5473955,\n        -2.7972898,\n        -2.5632856,\n        -2.526004,\n        -3.6276956,\n        -4.005558,\n        -2.4402428,\n        -2.9025264,\n        -2.750446,\n        -3.081374,\n        -5.848348,\n        -3.2820542,\n        -2.473018,\n        -5.4867163,\n        -2.3861876,\n        -2.1360703,\n        -3.5660334,\n        -4.956147,\n        -3.6622944,\n        -2.6745722,\n        -2.1847558,\n        -4.5681486,\n        -3.0090547,\n        -2.9648213,\n        -3.0344598,\n        -3.042011,\n        -3.9562647,\n        -4.75239,\n        -3.0026288,\n        -5.348022,\n        -6.03404,\n        -3.1813967,\n        -3.0963845,\n        -3.6727166,\n        -3.4077396,\n        -3.925601,\n        -2.984088,\n        -5.1343,\n        -2.455299,\n        -3.2948701,\n        -3.0572507,\n        -2.553569,\n        -3.5304341,\n        -2.7200413,\n        -2.47091,\n        -5.0832996,\n        -7.197777,\n        -5.735646\n      ],\n      \"pointIndex\": [\n        0,\n        1029,\n        255,\n        1070066769,\n        296505090,\n        17082542,\n        35027785,\n        737743212,\n        110350679,\n        658687155,\n        196031728,\n        844566230,\n        632394665,\n        252047322,\n        825388892,\n        1054905746,\n        195921458,\n        600571869,\n        30779653,\n        238901959,\n        418264477,\n        208034779,\n        738096546,\n        1082870844,\n        216466534,\n        738161860,\n        110550369,\n        101399425,\n        588454753,\n        222896144,\n        101501350,\n        530370165,\n        581398823,\n        865748093,\n        19339728,\n        581457754,\n        277277224,\n        976110164,\n        111019908,\n        794269074,\n        988807715,\n        227532523,\n        738052345,\n        742673355,\n        633035817,\n        158109115,\n        44656345,\n        118289107,\n        110432056,\n        100835067,\n        529496953,\n        522640781,\n        55513393,\n        152485492,\n        952755174,\n        522060870,\n        530435245,\n        359743779,\n        952786763,\n        613298843,\n        302351015,\n        999375620,\n        745607579,\n        316643295,\n        2446261,\n        737407571,\n        928288389,\n        293593286,\n        755411212,\n        46435184,\n        258011502,\n        101104250,\n        45642214,\n        1068415975,\n        27844964,\n        97070985,\n        491061346,\n        821235145,\n        111238044,\n        84606955,\n        466830318,\n        466249765,\n        783558506,\n        475390916,\n        357468809,\n        55681109,\n        759168698,\n        1092706086\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 6086357924367131302\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7891973,\n        -1.7938184,\n        -1.7990597,\n        -1.794038,\n        -1.8355857,\n        -1.8043972,\n        -1.8624703,\n        -1.8014457,\n        -1.9507738,\n        -1.8365257,\n        -1.8568066,\n        -1.8230112,\n        -1.8497584,\n        -1.9112098,\n        -1.9273734,\n        -1.8456591,\n        -1.9012897,\n        -2.051263,\n        -1.9693652,\n        -1.9001044,\n        -1.8517295,\n        -1.9004611,\n        -1.9388436,\n        -1.8347762,\n        -1.8312088,\n        -1.8608724,\n        -1.9499263,\n        -2.015206,\n        -1.9469022,\n        -1.943752,\n        -1.9391099,\n        -1.851484,\n        -1.9697275,\n        -1.9337447,\n        -1.9535925,\n        -2.55787,\n        -2.1713665,\n        -2.162509,\n        -1.9912992,\n        -2.2144854,\n        -1.9345199,\n        -2.4184113,\n        -2.1061249,\n        -1.9422418,\n        -1.9594705,\n        -2.0076475,\n        -2.0332174,\n        -1.9057178,\n        -1.9363347,\n        -1.8374417,\n        -2.1650097,\n        -2.156692,\n        -2.3073993,\n        -1.9596207,\n        -2.1123805,\n        -2.073181,\n        -2.0820956,\n        -2.0289507,\n        -2.0272346,\n        -2.2688544,\n        -1.96197,\n        -2.037199,\n        -1.9644263,\n        -1.9029511,\n        -2.1392868,\n        -2.0010538,\n        -2.0658603,\n        -2.4028983,\n        -2.031586,\n        -2.4779835,\n        -2.55722,\n        -2.6982176,\n        -2.5748029,\n        -2.266887,\n        -2.7431335,\n        -2.3569906,\n        -2.8150468,\n        -2.2750762,\n        -2.5548105,\n        -2.4642677,\n        -2.6854718,\n        -1.9813852,\n        -2.014481,\n        -2.511371,\n        -2.8830795,\n        -2.16451,\n        -2.3447776,\n        -2.065102,\n        -2.4947796,\n        -1.9728829,\n        -2.305431,\n        -2.3086157,\n        -2.2161295,\n        -2.2300034,\n        -2.3863747,\n        -2.309367,\n        -2.6166027,\n        -2.0461657,\n        -2.0063524,\n        -2.2658095,\n        -2.1662562,\n        -2.3095102,\n        -2.399888,\n        -2.172814,\n        -2.554477,\n        -2.8587255,\n        -2.8032956,\n        -2.4694288,\n        -2.2257938,\n        -2.2674966,\n        -2.5122075,\n        -2.2708306,\n        -2.5574882,\n        -3.0335155,\n        -2.6084294,\n        -2.3969684,\n        -2.2430587,\n        -3.0452266,\n        -2.0837376,\n        -2.7869911,\n        -2.5844896,\n        -2.3934283,\n        -2.7919724,\n        -3.4120383,\n        -2.616348,\n        -2.1856997,\n        -2.629461,\n        -1.9124812,\n        -4.512979,\n        -4.584976,\n        -2.4355335,\n        -2.0747797,\n        -2.308362,\n        -2.4352875,\n        -3.2322314,\n        -2.471324,\n        -2.7473583,\n        -3.5841548,\n        -2.3818414,\n        -2.582833,\n        -7.105225,\n        -2.8461235,\n        -3.6598423,\n        -6.1045427,\n        -3.2373908,\n        -3.3672473,\n        -3.2429414,\n        -3.654256,\n        -3.1720347,\n        -2.7951217,\n        -3.622787,\n        -2.9572363,\n        -2.5096502,\n        -5.2478595,\n        -2.9272563,\n        -5.062048,\n        -2.2758002,\n        -3.0355945,\n        -2.758816,\n        -2.53904,\n        -2.8399243,\n        -2.899185,\n        -2.8000119,\n        -2.6176448,\n        -2.1764967,\n        -2.8477302,\n        -3.8203726,\n        -2.7194335,\n        -3.333414,\n        -3.8880222,\n        -3.387632,\n        -2.832152,\n        -3.8208325,\n        -2.8803353,\n        -3.148308,\n        -2.1635082,\n        -3.6437538,\n        -6.291415,\n        -2.556656,\n        -3.2039547,\n        -2.5613267,\n        -2.8584914,\n        -2.312199,\n        -5.0586023,\n        -2.5204206,\n        -2.5504262,\n        -2.8208148,\n        -2.2910707,\n        -3.1887956,\n        -2.6971226,\n        -2.850193,\n        -4.6429367,\n        -3.400458,\n        -2.8184445,\n        -2.719052,\n        -2.2813413,\n        -2.2796204,\n        -3.3232892,\n        -3.165229,\n        -3.7672992,\n        -4.649677,\n        -4.32578,\n        -2.2768762,\n        -3.4076962,\n        -6.425753,\n        -3.6577134,\n        -3.4111705,\n        -3.7387614,\n        -2.376335,\n        -5.414337,\n        -3.8767424,\n        -2.938985,\n        -3.1410298,\n        -3.217051,\n        -2.9517841,\n        -2.5554893,\n        -3.1065688,\n        -2.747226,\n        -3.1555014,\n        -2.39086,\n        -4.164318,\n        -3.961989,\n        -2.9115572,\n        -3.7175267,\n        -2.638704,\n        -3.8871746,\n        -4.5865183,\n        -5.3320184,\n        -3.2834165,\n        -4.9169993,\n        -3.4118302,\n        -3.037287,\n        -2.8454387,\n        -2.3995957,\n        -2.5613315,\n        -3.7722256,\n        -4.1652484,\n        -2.6312847,\n        -2.4832075,\n        -6.6289434,\n        -3.5507565,\n        -2.9681396,\n        -3.646959,\n        -4.3364744,\n        -2.6795216,\n        -3.705987,\n        -3.0310488,\n        -3.9621572,\n        -4.4465737,\n        -4.2029114,\n        -2.9274888,\n        -2.4574783,\n        -5.6766067,\n        -3.7883599,\n        -4.292392,\n        -1.9271429\n      ],\n      \"pointIndex\": [\n        0,\n        1020,\n        256,\n        226860082,\n        241512673,\n        1028514,\n        723551144,\n        724162855,\n        949770846,\n        38667373,\n        724188248,\n        766423885,\n        704485949,\n        109106601,\n        357033163,\n        723546981,\n        99095401,\n        182257362,\n        723488220,\n        8610383,\n        1063290393,\n        784982760,\n        919321232,\n        870395921,\n        956194051,\n        301362488,\n        215810088,\n        41501026,\n        99138166,\n        801304245,\n        99074966,\n        68067424,\n        980303516,\n        46445462,\n        241967848,\n        16369592,\n        190912487,\n        757402334,\n        43923004,\n        254452877,\n        90790379,\n        899012327,\n        520475393,\n        520975967,\n        710639982,\n        34664749,\n        36582529,\n        414930720,\n        528773858,\n        108502724,\n        824192144,\n        528623875,\n        11399825,\n        85788177,\n        724121779,\n        99435884,\n        39188759,\n        163329947,\n        593182680,\n        988942728,\n        808378465,\n        170087464,\n        72980154,\n        977459099,\n        778872932,\n        288901501,\n        76250440,\n        131723719,\n        8387593,\n        723618749,\n        176587137,\n        413946635,\n        1038566354,\n        238951605,\n        8649378,\n        905562916,\n        723751455,\n        1023415449,\n        41173026,\n        485331826,\n        226684860,\n        99550015,\n        724162830,\n        939031945,\n        950554570,\n        168810069,\n        683416579,\n        921198886,\n        991\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 130347926699401324\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7458738,\n        -1.7459202,\n        -1.7540773,\n        -1.749542,\n        -1.7758787,\n        -1.7663901,\n        -1.7597626,\n        -1.7983912,\n        -1.7967318,\n        -1.790723,\n        -1.7942529,\n        -1.7698159,\n        -1.7901419,\n        -1.7606037,\n        -1.7695391,\n        -1.8286564,\n        -1.8160319,\n        -1.816564,\n        -1.8332781,\n        -1.8665884,\n        -1.8134235,\n        -1.8990643,\n        -1.8318924,\n        -1.8472352,\n        -2.2606862,\n        -1.8384478,\n        -1.805894,\n        -1.8035682,\n        -1.913579,\n        -1.9970407,\n        -1.7931528,\n        -1.9376734,\n        -1.9632316,\n        -1.837514,\n        -1.9540465,\n        -1.8792814,\n        -1.9826835,\n        -1.8808243,\n        -1.8765384,\n        -1.9517182,\n        -1.8930686,\n        -1.9339737,\n        -1.8993996,\n        -2.2783897,\n        -2.1686013,\n        -1.8984512,\n        -2.0246086,\n        -1.8897066,\n        -1.9112937,\n        -2.2872317,\n        -2.2853355,\n        -2.0051572,\n        -1.8750285,\n        -1.8121555,\n        -1.8786547,\n        -1.8933679,\n        -2.003782,\n        -1.9410443,\n        -1.9946569,\n        -2.4242601,\n        -2.0011337,\n        -2.1233604,\n        -1.9996283,\n        -2.2002804,\n        -2.5613973,\n        -2.0985966,\n        -2.1519492,\n        -1.9718783,\n        -2.069552,\n        -2.0823627,\n        -2.119145,\n        -1.9674317,\n        -2.4302022,\n        -2.5696392,\n        -2.0152953,\n        -1.965902,\n        -3.1185346,\n        -2.0377831,\n        -2.8895268,\n        -2.2058628,\n        -2.2036233,\n        -2.2515984,\n        -2.0698123,\n        -2.926231,\n        -2.3575144,\n        -1.9349142,\n        -1.9187722,\n        -2.3093095,\n        -2.3299356,\n        -2.2274444,\n        -2.3721182,\n        -1.9954238,\n        -2.0040684,\n        -2.1480374,\n        -2.3712957,\n        -1.9408329,\n        -2.2641346,\n        -2.5376055,\n        -2.378402,\n        -2.4461465,\n        -2.72218,\n        -2.9016545,\n        -3.1972055,\n        -3.4462445,\n        -2.1440892,\n        -2.2128904,\n        -2.7813263,\n        -2.4548275,\n        -1.9589072,\n        -2.2507253,\n        -1.9105552,\n        -2.838215,\n        -1.8960181,\n        -2.129582,\n        -2.564832,\n        -1.9558966,\n        -2.315645,\n        -2.0832243,\n        -2.1454003,\n        -2.7185538,\n        -2.744642,\n        -2.3759334,\n        -2.0640194,\n        -2.6081028,\n        -2.7267694,\n        -2.530277,\n        -2.1042588,\n        -3.52893,\n        -4.7533956,\n        -6.1599216,\n        -2.7777214,\n        -2.1850455,\n        -5.1596055,\n        -4.6121492,\n        -3.4246397,\n        -3.0896103,\n        -2.8528311,\n        -2.8153925,\n        -2.3002465,\n        -2.5653515,\n        -2.1697829,\n        -2.562335,\n        -2.392597,\n        -2.8613794,\n        -4.6819,\n        -2.705143,\n        -3.0419872,\n        -3.25114,\n        -3.7311366,\n        -2.986425,\n        -3.7844992,\n        -2.8878086,\n        -3.5765872,\n        -4.542649,\n        -3.7292254,\n        -3.1321683,\n        -2.1281826,\n        -4.6114335,\n        -3.605644,\n        -2.778752,\n        -6.076429,\n        -3.567361,\n        -2.657346,\n        -3.359025,\n        -2.8356557,\n        -2.7868803,\n        -2.2494237,\n        -4.4436207,\n        -4.687628,\n        -4.907177,\n        -2.7419994,\n        -3.3269196,\n        -2.4788537,\n        -2.1154923,\n        -2.7898583,\n        -3.3839576,\n        -2.9270916,\n        -4.095572,\n        -3.4070337,\n        -2.5832121,\n        -2.338569,\n        -4.0131354,\n        -4.154775,\n        -2.546357,\n        -2.1281435,\n        -2.2800431,\n        -2.0756383,\n        -2.39434,\n        -2.5916631,\n        -3.218564,\n        -3.0408003,\n        -2.1862967,\n        -2.6716952,\n        -2.347239,\n        -2.2905219,\n        -3.3032372,\n        -2.5469925,\n        -2.4376462,\n        -3.320769,\n        -3.902425,\n        -2.9026523,\n        -2.7472348,\n        -4.1579685,\n        -3.2284787,\n        -3.8274834,\n        -3.5813596,\n        -3.491209,\n        -3.4919388,\n        -4.2455463,\n        -2.287259,\n        -2.2776346,\n        -3.9811766,\n        -2.5823433,\n        -4.076829,\n        -3.4749491,\n        -2.5761414,\n        -2.8577077,\n        -5.204262,\n        -2.8644261,\n        -3.53813,\n        -2.4176593,\n        -2.8726804,\n        -2.9213688,\n        -5.10005,\n        -4.142307,\n        -2.0991101,\n        -3.3309941,\n        -5.661359,\n        -2.4053798,\n        -4.96297,\n        -3.2823954,\n        -4.2527103,\n        -4.3435597,\n        -2.5295732,\n        -4.6548147,\n        -2.224739,\n        -3.4724247,\n        -3.3562639,\n        -4.784001,\n        -3.1374204,\n        -2.7418466,\n        -3.4137278,\n        -3.5025773,\n        -2.9059665,\n        -3.780606,\n        -2.6394105,\n        -2.9706342,\n        -3.7509956,\n        -6.758096,\n        -3.0541124,\n        -2.8361156,\n        -4.890127,\n        -2.767828,\n        -4.053451,\n        -3.3795867\n      ],\n      \"pointIndex\": [\n        3,\n        1024,\n        255,\n        185576736,\n        888950489,\n        779888112,\n        741163838,\n        959689728,\n        972420614,\n        90006619,\n        1027785997,\n        718388930,\n        96119507,\n        861024445,\n        380700131,\n        78693319,\n        718166718,\n        726281137,\n        40314926,\n        619702085,\n        9107576,\n        297396600,\n        519616482,\n        267044040,\n        1023621944,\n        718327921,\n        834201033,\n        106427222,\n        49706833,\n        64609982,\n        917838234,\n        609414568,\n        27809213,\n        198416687,\n        228836613,\n        138085226,\n        80462144,\n        675439127,\n        841923879,\n        241446,\n        853438035,\n        416406771,\n        471276313,\n        88331809,\n        718167158,\n        870758307,\n        1053506313,\n        193931755,\n        58565766,\n        105974430,\n        873464539,\n        211279675,\n        106338179,\n        155673097,\n        738631717,\n        32122147,\n        330452441,\n        96178969,\n        96133784,\n        96187348,\n        96122436,\n        96327680,\n        736019852,\n        969365585,\n        452668911,\n        37816136,\n        219023866,\n        110373651,\n        354571714,\n        367387698,\n        717758924,\n        853153560,\n        5844897,\n        571426959,\n        609705866,\n        319838337,\n        172264702,\n        281239823,\n        733930512,\n        364618981,\n        699516135,\n        1046195557,\n        730336195,\n        29948157,\n        714269810,\n        863378003,\n        519811510,\n        322668571\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -8018290736902480486\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7742779,\n        -1.782927,\n        -1.7935271,\n        -1.8049732,\n        -1.7903272,\n        -1.8037083,\n        -1.811745,\n        -1.817572,\n        -1.8770543,\n        -1.8074085,\n        -1.8207537,\n        -1.8211355,\n        -1.8509297,\n        -1.9178317,\n        -1.8556215,\n        -1.8207356,\n        -1.8654337,\n        -1.8883383,\n        -1.9475877,\n        -1.8493286,\n        -1.8847728,\n        -1.9458281,\n        -1.8740331,\n        -1.8927585,\n        -1.8659846,\n        -1.9389685,\n        -1.9860533,\n        -1.9296442,\n        -1.9357408,\n        -1.863457,\n        -1.8559549,\n        -1.8219192,\n        -2.0432744,\n        -1.870771,\n        -1.953769,\n        -2.301594,\n        -2.4724388,\n        -2.4559312,\n        -2.0348008,\n        -1.9868361,\n        -1.9131081,\n        -2.0091815,\n        -1.8920166,\n        -2.134798,\n        -2.047697,\n        -2.0522218,\n        -1.9108384,\n        -1.8949109,\n        -2.0104938,\n        -1.885111,\n        -1.8947254,\n        -2.0070653,\n        -2.028318,\n        -2.0352836,\n        -2.2318938,\n        -1.9758452,\n        -1.9652188,\n        -2.0490868,\n        -2.000966,\n        -1.9816724,\n        -1.9216161,\n        -2.1491387,\n        -2.1937797,\n        -1.8549658,\n        -3.3671987,\n        -2.4767044,\n        -2.072395,\n        -2.1930852,\n        -2.2067597,\n        -2.0510044,\n        -2.005744,\n        -2.397834,\n        -2.8201542,\n        -2.6242805,\n        -2.525026,\n        -3.0812702,\n        -2.8736756,\n        -2.804161,\n        -2.1336691,\n        -2.7248974,\n        -2.455642,\n        -2.0994189,\n        -2.0893044,\n        -2.615462,\n        -2.9190938,\n        -2.2559266,\n        -2.191257,\n        -2.1453724,\n        -2.3776965,\n        -2.0627317,\n        -2.4819498,\n        -2.2644243,\n        -2.3848276,\n        -2.7275438,\n        -1.9400868,\n        -2.6268668,\n        -2.1447332,\n        -2.4450848,\n        -2.7891557,\n        -2.0396283,\n        -1.9161348,\n        -1.9080963,\n        -2.0300424,\n        -2.632521,\n        -2.0695016,\n        -2.083452,\n        -2.8567162,\n        -2.1522179,\n        -2.4074516,\n        -2.9479344,\n        -3.371155,\n        -2.0262294,\n        -2.4203074,\n        -2.0013454,\n        -2.356698,\n        -4.088611,\n        -2.5771027,\n        -2.145145,\n        -2.1506948,\n        -2.2148411,\n        -2.1181817,\n        -2.1483655,\n        -2.3675692,\n        -2.5563858,\n        -2.1789782,\n        -2.4433784,\n        -2.882444,\n        -2.0437548,\n        -2.97634,\n        -4.9177,\n        -3.4715421,\n        -4.124766,\n        -4.4633274,\n        -3.3401403,\n        -2.3922188,\n        -4.265721,\n        -3.1980264,\n        -4.0759006,\n        -2.535098,\n        -2.5249221,\n        -3.4394972,\n        -2.6583657,\n        -3.174206,\n        -3.934965,\n        -4.263968,\n        -4.2605233,\n        -3.1849203,\n        -2.8849237,\n        -3.0997977,\n        -3.2478483,\n        -2.5998013,\n        -4.2987695,\n        -3.332344,\n        -4.200896,\n        -3.704933,\n        -6.0851536,\n        -4.104127,\n        -2.6303682,\n        -2.3432426,\n        -3.2087345,\n        -6.306906,\n        -2.5917318,\n        -4.953048,\n        -4.36103,\n        -3.274854,\n        -3.2982278,\n        -2.1126833,\n        -3.2233055,\n        -2.9062939,\n        -3.29822,\n        -3.2420087,\n        -2.9634907,\n        -4.788699,\n        -7.4372206,\n        -2.8611424,\n        -2.582046,\n        -2.2423937,\n        -3.9410949,\n        -2.9252014,\n        -4.3836813,\n        -2.4027176,\n        -8.114465,\n        -3.0931153,\n        -3.2290485,\n        -2.7975178,\n        -2.4578404,\n        -2.662729,\n        -2.7658195,\n        -5.132716,\n        -2.4653416,\n        -2.5699165,\n        -4.2840476,\n        -6.238168,\n        -3.4399889,\n        -2.8105867,\n        -4.5309677,\n        -3.4960177,\n        -2.994868,\n        -2.9653075,\n        -2.1339335,\n        -2.3873973,\n        -2.1707273,\n        -2.8777766,\n        -1.9274333,\n        -2.5151055,\n        -2.5358534,\n        -2.5167193,\n        -3.9277947,\n        -2.839244,\n        -2.5179868,\n        -3.5413857,\n        -2.103055,\n        -4.5783806,\n        -3.2729514,\n        -3.3976986,\n        -2.7353375,\n        -2.617493,\n        -6.6083703,\n        -2.6767309,\n        -2.9664655,\n        -4.713502,\n        -4.1384845,\n        -3.643049,\n        -2.028004,\n        -2.3718233,\n        -2.9049892,\n        -2.6818995,\n        -3.360933,\n        -5.6230044,\n        -2.793938,\n        -5.8042717,\n        -4.636513,\n        -4.344588,\n        -3.3438268,\n        -4.0825515,\n        -2.307448,\n        -4.179932,\n        -4.9495397,\n        -2.7999353,\n        -2.8246424,\n        -2.3057222,\n        -2.9527013,\n        -2.9480212,\n        -4.1105733,\n        -2.1817973,\n        -3.0926511,\n        -2.449598,\n        -3.0435505,\n        -4.8923807,\n        -5.040625,\n        -2.8466733,\n        -2.515366,\n        -3.4752016,\n        -5.507589,\n        -4.8790283,\n        -3.6216578\n      ],\n      \"pointIndex\": [\n        0,\n        1034,\n        256,\n        6454630,\n        228420051,\n        102117739,\n        145331690,\n        717941456,\n        47071363,\n        476793545,\n        966579389,\n        296174573,\n        544604988,\n        102824353,\n        112250204,\n        863461973,\n        259253184,\n        432420108,\n        741546479,\n        207462832,\n        336882107,\n        438512617,\n        1009020122,\n        742047311,\n        93815581,\n        111515620,\n        112422621,\n        152777462,\n        25849220,\n        375947882,\n        179605581,\n        534561526,\n        151061379,\n        6400544,\n        83317513,\n        207005262,\n        657785980,\n        951604438,\n        772742693,\n        376716513,\n        741824878,\n        741386078,\n        628491472,\n        544947358,\n        777602277,\n        1054073684,\n        111797695,\n        111521268,\n        432400235,\n        933067540,\n        96414494,\n        741443604,\n        719962008,\n        286342573,\n        93196954,\n        111765021,\n        61125845,\n        101847200,\n        82633016,\n        101784560,\n        33247368,\n        28536167,\n        142607367,\n        213326518,\n        741460695,\n        479005298,\n        217487759,\n        93205143,\n        86497712,\n        350142065,\n        227244858,\n        742020688,\n        266740190,\n        210815702,\n        784426999,\n        150922760,\n        844471198,\n        102482959,\n        741696763,\n        310118424,\n        456440605,\n        4315620,\n        390545626,\n        534640201,\n        31591804,\n        705477199,\n        742004574,\n        770818026,\n        1034\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 7607273630454256775\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6166878,\n        -1.6180277,\n        -1.6216232,\n        -1.6228727,\n        -1.6219813,\n        -1.635756,\n        -1.6265807,\n        -1.6560133,\n        -1.6309868,\n        -1.6393675,\n        -1.6336645,\n        -1.636524,\n        -1.6603762,\n        -1.6327052,\n        -1.6808931,\n        -1.6671044,\n        -1.6852964,\n        -1.7933347,\n        -1.6837842,\n        -1.6893544,\n        -1.6658978,\n        -1.649125,\n        -1.6919701,\n        -1.6879814,\n        -1.6927471,\n        -1.7841454,\n        -1.6966611,\n        -1.6869533,\n        -1.6763427,\n        -1.6893233,\n        -1.8322129,\n        -1.6892469,\n        -2.0100207,\n        -1.7677158,\n        -1.932623,\n        -1.8616197,\n        -1.8876401,\n        -1.8663489,\n        -1.6938057,\n        -1.8469261,\n        -1.977507,\n        -2.0190978,\n        -1.8905826,\n        -1.7598318,\n        -1.6648656,\n        -1.9908979,\n        -1.8212391,\n        -1.9193687,\n        -1.7402436,\n        -1.7644914,\n        -1.8605156,\n        -1.9337134,\n        -2.0262332,\n        -1.8630294,\n        -2.0668433,\n        -1.6899385,\n        -1.8363451,\n        -2.0924673,\n        -1.8828721,\n        -1.7820061,\n        -1.6921434,\n        -1.9934251,\n        -2.4187918,\n        -1.7382892,\n        -2.2235272,\n        -2.7753878,\n        -2.516987,\n        -1.8692182,\n        -1.9802023,\n        -2.5232034,\n        -2.0636158,\n        -2.0971892,\n        -2.4341168,\n        -2.2007446,\n        -2.2460756,\n        -1.999314,\n        -2.2191515,\n        -2.7549808,\n        -1.7425053,\n        -2.4137955,\n        -1.9388118,\n        -2.80071,\n        -2.292174,\n        -2.1451943,\n        -2.828744,\n        -2.061893,\n        -2.154893,\n        -1.7792556,\n        -2.602723,\n        -2.1476638,\n        -2.471334,\n        -2.0935545,\n        -2.1884823,\n        -2.2556326,\n        -2.6571348,\n        -2.2234051,\n        -2.1578372,\n        -2.855582,\n        -1.8165001,\n        -1.8029543,\n        -1.9425266,\n        -2.646843,\n        -1.9401612,\n        -2.1620598,\n        -2.0720956,\n        -2.7948859,\n        -2.0363657,\n        -2.141345,\n        -1.9503344,\n        -2.6766877,\n        -2.984802,\n        -1.8757877,\n        -1.867727,\n        -2.9219038,\n        -2.28071,\n        -2.2559128,\n        -2.573712,\n        -3.036459,\n        -2.233641,\n        -2.4557755,\n        -2.5671406,\n        -1.7194425,\n        -1.8867804,\n        -2.19423,\n        -2.3825018,\n        -2.6712582,\n        -2.4469564,\n        -1.7476544,\n        -7.425859,\n        -3.150718,\n        -2.2373273,\n        -3.6050756,\n        -6.20976,\n        -7.979375,\n        -5.4883556,\n        -3.5081415,\n        -3.6902416,\n        -1.9828478,\n        -2.2691176,\n        -4.2087407,\n        -3.6642642,\n        -3.1641202,\n        -8.9014225,\n        -2.4093835,\n        -2.8579707,\n        -5.507862,\n        -3.0440426,\n        -3.4115152,\n        -3.6423595,\n        -2.9895892,\n        -3.7129953,\n        -3.0018919,\n        -2.4316092,\n        -5.2371306,\n        -2.9627035,\n        -3.7204707,\n        -3.455863,\n        -2.4724798,\n        -2.5818582,\n        -3.763166,\n        -2.7013094,\n        -2.1444993,\n        -2.0187795,\n        -4.480569,\n        -3.7178774,\n        -2.3083253,\n        -3.8219557,\n        -3.5712507,\n        -2.5033422,\n        -4.075861,\n        -3.814081,\n        -2.2165027,\n        -3.4429348,\n        -4.63539,\n        -3.060742,\n        -2.1189203,\n        -3.6775537,\n        -2.802239,\n        -4.579251,\n        -2.1857872,\n        -3.1271665,\n        -3.2500339,\n        -2.6241705,\n        -2.244974,\n        -2.158894,\n        -2.5828085,\n        -2.495642,\n        -2.4378066,\n        -4.4089704,\n        -2.9693635,\n        -2.972198,\n        -2.732911,\n        -4.9379916,\n        -3.1752994,\n        -2.2322066,\n        -3.0193737,\n        -3.6216593,\n        -2.3240645,\n        -4.4654694,\n        -3.4344585,\n        -2.8935435,\n        -2.6081583,\n        -3.1192338,\n        -3.543054,\n        -3.6092315,\n        -3.9698086,\n        -3.2849846,\n        -3.2602642,\n        -2.1961646,\n        -2.1579323,\n        -2.7234974,\n        -3.0247269,\n        -3.6815965,\n        -3.5696614,\n        -3.1362653,\n        -3.4984534,\n        -2.2487803,\n        -2.2461174,\n        -2.0616622,\n        -3.310472,\n        -2.9053435,\n        -4.337758,\n        -3.1315777,\n        -3.5387976,\n        -2.6110525,\n        -6.262158,\n        -1.9734536,\n        -2.9705286,\n        -3.0596435,\n        -3.2770479,\n        -2.4924855,\n        -2.3972082,\n        -3.817022,\n        -4.488544,\n        -4.287587,\n        -3.3106089,\n        -4.35999,\n        -4.208139,\n        -2.5065055,\n        -2.6681383,\n        -4.110587,\n        -2.7485754,\n        -3.1977475,\n        -1.8630259,\n        -1.9024656,\n        -2.824685,\n        -2.4826117,\n        -3.794629,\n        -3.091652,\n        -3.2088513,\n        -2.4113505,\n        -3.5489256,\n        -3.6256285,\n        -2.925018,\n        -2.5376785,\n        -2.987564\n      ],\n      \"pointIndex\": [\n        0,\n        1032,\n        256,\n        739101521,\n        359625618,\n        56663841,\n        189462625,\n        8938549,\n        84363683,\n        102175161,\n        174017939,\n        759976805,\n        738942780,\n        739344262,\n        969673660,\n        758555770,\n        101471606,\n        500490033,\n        154933567,\n        992269854,\n        17420240,\n        591780533,\n        1016705849,\n        983580939,\n        903564063,\n        212392093,\n        299500303,\n        785494059,\n        542238235,\n        8626715,\n        101890554,\n        322185489,\n        433953024,\n        84407643,\n        21858550,\n        847498684,\n        719400066,\n        1059808564,\n        894986123,\n        294786739,\n        101471993,\n        463565086,\n        738890235,\n        534255403,\n        739141409,\n        102390826,\n        322363837,\n        9251652,\n        155259125,\n        35140253,\n        796156754,\n        111319317,\n        382482816,\n        101471644,\n        52280181,\n        927816936,\n        479464938,\n        102088339,\n        600869544,\n        104218943,\n        34807030,\n        647206627,\n        1090255153,\n        17857989,\n        177006409,\n        178807254,\n        827125293,\n        2772737,\n        711198074,\n        816283371,\n        93781677,\n        106285378,\n        209488292,\n        431628897,\n        386384872,\n        41758398,\n        292157884,\n        413259159,\n        542606875,\n        17523695,\n        455745233,\n        58651042,\n        921613451,\n        534372261,\n        46354817,\n        739124588,\n        105290257,\n        1057593386,\n        1032\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -757304452677969426\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7135965,\n        -1.7136995,\n        -1.7157676,\n        -1.7181004,\n        -1.7154931,\n        -1.8246434,\n        -1.7362574,\n        -1.7346522,\n        -1.8199588,\n        -1.7231078,\n        -1.720099,\n        -1.8612716,\n        -1.83821,\n        -1.7492942,\n        -1.7446247,\n        -1.8284454,\n        -1.8603145,\n        -1.9138993,\n        -1.831228,\n        -1.8718604,\n        -1.91732,\n        -1.7551297,\n        -2.033309,\n        -1.8765991,\n        -1.9802958,\n        -2.058483,\n        -1.84605,\n        -1.8352607,\n        -1.8584217,\n        -1.8029315,\n        -1.7596902,\n        -1.8959981,\n        -1.9026619,\n        -1.871997,\n        -1.8782265,\n        -2.0252016,\n        -2.358552,\n        -1.8533338,\n        -2.083486,\n        -1.9487244,\n        -1.882554,\n        -2.3023517,\n        -1.9679939,\n        -1.8223289,\n        -1.8072971,\n        -2.100339,\n        -2.0707767,\n        -1.9666268,\n        -1.96567,\n        -2.5006826,\n        -2.1979032,\n        -2.529154,\n        -2.0866454,\n        -1.954549,\n        -1.8969742,\n        -1.8679923,\n        -1.9180048,\n        -1.9437361,\n        -2.1521046,\n        -2.1750622,\n        -1.9755299,\n        -1.9878112,\n        -1.8535962,\n        -2.0658393,\n        -2.0703077,\n        -2.6077945,\n        -2.06919,\n        -1.9849111,\n        -3.7793386,\n        -2.493756,\n        -1.9866674,\n        -2.0299413,\n        -3.1066453,\n        -2.6105788,\n        -2.399347,\n        -1.9853977,\n        -2.2428186,\n        -2.2038195,\n        -2.1202793,\n        -2.2240715,\n        -2.861552,\n        -1.891804,\n        -1.9685422,\n        -2.3588803,\n        -2.4016817,\n        -2.257354,\n        -2.43017,\n        -1.8820353,\n        -1.89598,\n        -2.3582041,\n        -1.9059237,\n        -2.4794056,\n        -2.6336129,\n        -2.449261,\n        -2.1423671,\n        -2.46767,\n        -2.4063032,\n        -3.253103,\n        -2.4521403,\n        -2.572619,\n        -2.8532908,\n        -2.4926558,\n        -2.2619705,\n        -2.8127205,\n        -2.6316361,\n        -2.3038146,\n        -2.5326352,\n        -2.7956316,\n        -2.110337,\n        -1.9526602,\n        -2.311502,\n        -2.0347152,\n        -2.957697,\n        -2.0455315,\n        -2.0883517,\n        -2.1302292,\n        -2.7572336,\n        -2.6276178,\n        -2.4254637,\n        -2.5230994,\n        -2.1851058,\n        -2.0715158,\n        -2.0062604,\n        -2.202653,\n        -2.5688324,\n        -2.925744,\n        -2.4261808,\n        -2.4870608,\n        -2.187646,\n        -3.0609841,\n        -2.6472135,\n        -3.3474867,\n        -3.581345,\n        -2.7716699,\n        -2.7266572,\n        -4.3082376,\n        -3.310241,\n        -5.471515,\n        -4.33562,\n        -2.650184,\n        -4.791706,\n        -2.9953864,\n        -5.1024785,\n        -2.8273878,\n        -2.5138736,\n        -4.5667977,\n        -4.2295265,\n        -3.2456486,\n        -2.8843367,\n        -2.5892117,\n        -2.611454,\n        -2.31589,\n        -2.3485153,\n        -5.4121265,\n        -3.8516903,\n        -2.8110795,\n        -2.3436418,\n        -2.582545,\n        -3.97944,\n        -4.651603,\n        -2.411318,\n        -3.0528884,\n        -3.0907474,\n        -3.0097985,\n        -4.7342324,\n        -3.5315208,\n        -2.1657097,\n        -4.2390723,\n        -2.936228,\n        -2.5539465,\n        -4.192869,\n        -3.0271382,\n        -2.7899885,\n        -5.885918,\n        -3.638292,\n        -4.598609,\n        -2.6090565,\n        -2.0222142,\n        -3.514531,\n        -2.9101071,\n        -2.6641734,\n        -1.9962803,\n        -2.7080035,\n        -3.011154,\n        -2.6983337,\n        -3.5960157,\n        -3.275243,\n        -4.408549,\n        -2.4606354,\n        -3.5107026,\n        -3.00748,\n        -6.500345,\n        -3.3213196,\n        -7.8639097,\n        -3.4727492,\n        -4.6648107,\n        -3.6666577,\n        -7.5058265,\n        -3.0931523,\n        -2.670792,\n        -3.4180133,\n        -4.5167036,\n        -3.7396464,\n        -3.338267,\n        -2.600346,\n        -2.5254362,\n        -3.317571,\n        -4.302277,\n        -3.4191096,\n        -2.7749753,\n        -4.1880503,\n        -2.3452837,\n        -3.5135424,\n        -4.338403,\n        -5.9206514,\n        -4.549452,\n        -3.1331377,\n        -5.1705585,\n        -2.442981,\n        -4.7312174,\n        -2.3868985,\n        -2.524929,\n        -3.433922,\n        -4.5218687,\n        -2.8124268,\n        -3.0652313,\n        -3.77391,\n        -3.3301105,\n        -2.085214,\n        -2.547295,\n        -2.3550396,\n        -2.8430412,\n        -2.4300187,\n        -3.0907922,\n        -4.117058,\n        -2.9792786,\n        -2.9986253,\n        -5.410131,\n        -4.5892816,\n        -3.133783,\n        -3.771516,\n        -2.7256246,\n        -2.3179505,\n        -2.8882284,\n        -2.3008482,\n        -2.0544724,\n        -5.2797623,\n        -2.7960348,\n        -3.1948164,\n        -3.7833238,\n        -4.181805,\n        -4.8220587,\n        -3.06365,\n        -3.503508,\n        -2.8300083\n      ],\n      \"pointIndex\": [\n        0,\n        1023,\n        255,\n        419951453,\n        96956636,\n        445365168,\n        848769,\n        624321631,\n        296844304,\n        845975460,\n        86206595,\n        261096359,\n        774555207,\n        582347094,\n        470468273,\n        3146417,\n        967960060,\n        722597372,\n        929042572,\n        25896987,\n        95569830,\n        99639565,\n        107498041,\n        1005199081,\n        148716648,\n        595698045,\n        846168,\n        723052043,\n        91259954,\n        306273711,\n        423919356,\n        107725919,\n        4992630,\n        92104369,\n        132129363,\n        275864745,\n        27370141,\n        624707289,\n        109142797,\n        722737018,\n        193644209,\n        394639409,\n        860441624,\n        548465859,\n        400518711,\n        193207638,\n        388007331,\n        147947184,\n        682816607,\n        27369995,\n        722548468,\n        993098847,\n        532818024,\n        109149472,\n        378158300,\n        100320351,\n        222193406,\n        814885078,\n        99712897,\n        99870350,\n        722566833,\n        150043946,\n        132308992,\n        715793696,\n        204614223,\n        25211,\n        173673659,\n        100181587,\n        70274174,\n        548940810,\n        718668443,\n        814452517,\n        723516440,\n        674958431,\n        100224979,\n        752969893,\n        285918601,\n        292581326,\n        794307651,\n        123023730,\n        386237782,\n        624298398,\n        3640080,\n        582665851,\n        100132215,\n        723055281,\n        901433053,\n        995876529\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -1443679510462455370\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7347459,\n        -1.7394735,\n        -1.7381483,\n        -1.754231,\n        -1.7740259,\n        -1.7581607,\n        -1.7471995,\n        -1.7623876,\n        -1.7909236,\n        -1.9176275,\n        -1.7906872,\n        -1.7900074,\n        -1.8092201,\n        -1.7612339,\n        -1.7569292,\n        -2.0437467,\n        -1.775545,\n        -1.9971812,\n        -1.8742638,\n        -1.9384868,\n        -1.9833477,\n        -1.8229709,\n        -1.9066781,\n        -1.8882413,\n        -1.8450756,\n        -1.8620235,\n        -2.068681,\n        -1.8370917,\n        -2.0627403,\n        -1.8765723,\n        -1.7724979,\n        -2.0590239,\n        -2.449461,\n        -1.7891802,\n        -1.7974008,\n        -2.2016551,\n        -2.1197975,\n        -1.8853545,\n        -1.9215225,\n        -2.0432634,\n        -1.9771065,\n        -2.0160198,\n        -2.2218409,\n        -1.9348078,\n        -1.8426956,\n        -1.9509114,\n        -2.038082,\n        -2.5543044,\n        -2.152087,\n        -2.648513,\n        -2.133948,\n        -1.916019,\n        -2.000006,\n        -2.0770962,\n        -2.1443796,\n        -1.9983968,\n        -2.1011918,\n        -2.0810418,\n        -2.1482136,\n        -1.9335482,\n        -1.9012893,\n        -1.824644,\n        -1.938661,\n        -2.2046938,\n        -2.2691963,\n        -3.6068864,\n        -2.8661828,\n        -2.1628983,\n        -1.9279547,\n        -1.833832,\n        -2.0551453,\n        -2.713674,\n        -2.795415,\n        -2.462656,\n        -2.2616754,\n        -1.9144489,\n        -2.1650782,\n        -1.997512,\n        -2.782421,\n        -2.4250057,\n        -2.214602,\n        -2.3261878,\n        -2.34799,\n        -2.2042305,\n        -2.126051,\n        -2.268383,\n        -2.750201,\n        -2.0563984,\n        -2.671494,\n        -2.0033872,\n        -2.6995451,\n        -1.9782327,\n        -2.4117901,\n        -2.925236,\n        -2.1892033,\n        -2.6363072,\n        -2.9420497,\n        -2.8249805,\n        -2.6455832,\n        -2.725805,\n        -3.0097766,\n        -2.5371299,\n        -2.717369,\n        -2.7761137,\n        -2.1064897,\n        -2.2279794,\n        -2.0551527,\n        -2.2883182,\n        -2.7291057,\n        -2.5638142,\n        -3.1108234,\n        -3.1955476,\n        -2.0476542,\n        -2.485119,\n        -2.1437218,\n        -2.220851,\n        -2.4143832,\n        -2.8322825,\n        -2.46383,\n        -2.1007354,\n        -2.3364544,\n        -2.2614713,\n        -2.0341384,\n        -1.912449,\n        -2.8362796,\n        -2.3952615,\n        -2.6028316,\n        -2.668597,\n        -3.8440006,\n        -3.5483372,\n        -2.652846,\n        -3.705314,\n        -4.7733226,\n        -4.5258327,\n        -5.084285,\n        -4.82561,\n        -2.233787,\n        -2.7597992,\n        -2.1867697,\n        -2.7386017,\n        -2.0815248,\n        -4.6069117,\n        -3.7802503,\n        -3.1006753,\n        -4.680951,\n        -6.1302423,\n        -4.324174,\n        -7.00268,\n        -4.050277,\n        -2.5169914,\n        -2.504396,\n        -2.6829255,\n        -3.2965174,\n        -3.1454434,\n        -2.7643566,\n        -5.549664,\n        -2.0992694,\n        -2.8647306,\n        -2.9470987,\n        -2.5179236,\n        -3.5217705,\n        -2.493775,\n        -3.2341912,\n        -3.0145593,\n        -3.587833,\n        -2.447179,\n        -2.5390737,\n        -2.601489,\n        -2.771792,\n        -2.5648513,\n        -2.1629953,\n        -2.683225,\n        -4.4290395,\n        -2.8727617,\n        -4.1634107,\n        -7.609809,\n        -3.982252,\n        -3.9179842,\n        -3.3660645,\n        -5.2745104,\n        -2.1295073,\n        -3.0498695,\n        -4.7773256,\n        -3.5502975,\n        -2.9065478,\n        -4.306385,\n        -2.8694503,\n        -3.4804726,\n        -3.3991513,\n        -2.7444286,\n        -2.5565257,\n        -3.3066907,\n        -4.623295,\n        -3.410556,\n        -4.410689,\n        -3.9592216,\n        -3.3761177,\n        -2.822778,\n        -2.7244134,\n        -2.7992866,\n        -5.9734287,\n        -4.5867805,\n        -3.2222269,\n        -2.874869,\n        -4.0116825,\n        -4.6141043,\n        -3.812028,\n        -3.4712565,\n        -4.0439873,\n        -4.130561,\n        -3.120064,\n        -2.7037315,\n        -2.42152,\n        -3.3963015,\n        -3.260484,\n        -3.5456893,\n        -3.0255744,\n        -2.8895788,\n        -2.8674421,\n        -6.5274324,\n        -3.7662706,\n        -3.1172621,\n        -3.491285,\n        -3.7791576,\n        -5.5442123,\n        -2.525858,\n        -3.3795898,\n        -8.791986,\n        -3.3659992,\n        -2.8019388,\n        -4.1974254,\n        -2.8413677,\n        -2.26471,\n        -2.8140738,\n        -2.4340503,\n        -3.7309914,\n        -3.7927318,\n        -4.864571,\n        -2.794539,\n        -3.5314653,\n        -2.785886,\n        -2.6446779,\n        -3.092817,\n        -2.6836123,\n        -5.2115974,\n        -4.941303,\n        -3.84799,\n        -2.0850093,\n        -3.6065555,\n        -3.431646,\n        -3.6578736,\n        -2.778165,\n        -2.8372567,\n        -3.7021728,\n        -2.6941583\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        255,\n        121510804,\n        735934784,\n        516644950,\n        152201424,\n        36215733,\n        735940435,\n        542315149,\n        903854308,\n        736615363,\n        258746231,\n        34794797,\n        782551256,\n        319268912,\n        149559599,\n        693352500,\n        585322202,\n        147749702,\n        215650085,\n        50769623,\n        757945700,\n        1065035478,\n        353623269,\n        349435891,\n        256493971,\n        235019083,\n        663660283,\n        3212567,\n        736833756,\n        336190540,\n        16202432,\n        531534714,\n        106151486,\n        531904117,\n        596274883,\n        315199618,\n        786685146,\n        923527607,\n        955621339,\n        349788009,\n        974505360,\n        234733889,\n        158138714,\n        1091199620,\n        736142803,\n        101284997,\n        101405390,\n        353686018,\n        531962048,\n        706200395,\n        151538621,\n        776073587,\n        736716106,\n        101275348,\n        147811068,\n        367450797,\n        101885690,\n        16074429,\n        319605335,\n        52186228,\n        69939700,\n        102111105,\n        102130640,\n        983102903,\n        736035865,\n        398422989,\n        1081458622,\n        602836680,\n        300481917,\n        277106593,\n        1098574487,\n        92681970,\n        616747125,\n        227483847,\n        735965216,\n        1039111769,\n        532147965,\n        1025186457,\n        1006096533,\n        542328344,\n        736318141,\n        736364878,\n        829785401,\n        476649019,\n        532245839,\n        101890891\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -5814447565348109662\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7511228,\n        -1.7547671,\n        -1.7649827,\n        -1.7602155,\n        -1.7644068,\n        -1.7974786,\n        -1.8111378,\n        -1.76645,\n        -1.7629541,\n        -1.7887746,\n        -1.8424741,\n        -1.8604313,\n        -1.825724,\n        -1.813784,\n        -1.8164799,\n        -1.7996159,\n        -1.9427739,\n        -1.92378,\n        -1.9825747,\n        -1.9355977,\n        -1.929168,\n        -1.9899186,\n        -1.8879479,\n        -1.9810914,\n        -2.139502,\n        -1.8925554,\n        -1.8323439,\n        -1.9457619,\n        -1.9108175,\n        -1.8209031,\n        -1.8621061,\n        -1.8155944,\n        -2.0194373,\n        -2.2031505,\n        -2.2139351,\n        -1.9271352,\n        -2.0317855,\n        -2.103354,\n        -2.068229,\n        -1.9535385,\n        -1.9589801,\n        -2.0048394,\n        -2.141987,\n        -2.1395955,\n        -2.0265355,\n        -1.890522,\n        -1.8963068,\n        -2.1620572,\n        -2.1312373,\n        -2.3032746,\n        -2.2189019,\n        -1.9480989,\n        -2.142841,\n        -1.8506192,\n        -1.9154953,\n        -2.0063877,\n        -1.9699386,\n        -2.2777627,\n        -2.3113956,\n        -2.062702,\n        -2.4268925,\n        -1.8760568,\n        -2.0476923,\n        -1.971264,\n        -2.4704957,\n        -2.1776824,\n        -2.1748862,\n        -2.3667104,\n        -2.265379,\n        -2.649047,\n        -2.4815402,\n        -1.9881247,\n        -2.339695,\n        -2.1896973,\n        -2.3558364,\n        -2.3274946,\n        -2.1282146,\n        -2.111352,\n        -2.153737,\n        -1.9565834,\n        -2.4722,\n        -2.1751769,\n        -2.5557127,\n        -2.5569649,\n        -2.4326591,\n        -2.9176764,\n        -2.2936957,\n        -2.1785498,\n        -2.1606998,\n        -2.1816666,\n        -2.1546662,\n        -2.2656825,\n        -2.3489978,\n        -1.9178797,\n        -2.2084754,\n        -2.425072,\n        -2.697172,\n        -2.2067614,\n        -2.4354026,\n        -3.3640983,\n        -2.5109417,\n        -2.4522586,\n        -2.6146648,\n        -2.590165,\n        -2.4847088,\n        -3.1080565,\n        -2.4759765,\n        -2.1774573,\n        -1.9996259,\n        -2.6539307,\n        -2.0263243,\n        -2.041786,\n        -2.6337276,\n        -2.7405884,\n        -2.14849,\n        -2.297524,\n        -3.4519875,\n        -2.9706388,\n        -2.5154743,\n        -2.9371212,\n        -2.1149318,\n        -2.8048918,\n        -2.8034158,\n        -2.0286958,\n        -2.3253965,\n        -2.4457614,\n        -2.1075993,\n        -2.1707664,\n        -2.4698887,\n        -6.3985457,\n        -5.4553065,\n        -3.0534954,\n        -3.7535856,\n        -3.2119567,\n        -5.0649343,\n        -3.1517365,\n        -2.4478056,\n        -5.2392926,\n        -2.2751443,\n        -2.8633776,\n        -2.672378,\n        -3.5623927,\n        -4.931572,\n        -7.0922966,\n        -3.4948246,\n        -2.7835896,\n        -2.8300126,\n        -6.69674,\n        -2.2213356,\n        -4.852264,\n        -2.5520232,\n        -3.6996858,\n        -3.5319383,\n        -2.7030215,\n        -2.4836402,\n        -3.2883487,\n        -4.276891,\n        -2.2965822,\n        -3.7260692,\n        -3.054534,\n        -2.256999,\n        -3.9361992,\n        -2.6429262,\n        -2.685454,\n        -3.2607734,\n        -3.1706688,\n        -2.6984189,\n        -4.9599323,\n        -4.66685,\n        -2.8531756,\n        -5.126991,\n        -4.127395,\n        -4.217008,\n        -2.5120008,\n        -3.669386,\n        -2.5234091,\n        -2.249207,\n        -2.4890218,\n        -3.3276532,\n        -2.9966183,\n        -4.327049,\n        -4.2945223,\n        -2.4592247,\n        -3.5943713,\n        -3.735938,\n        -2.5562365,\n        -2.998724,\n        -2.0148306,\n        -6.272867,\n        -2.3961935,\n        -3.9867659,\n        -4.4374504,\n        -3.3698826,\n        -3.496857,\n        -3.086979,\n        -2.6779304,\n        -3.9720068,\n        -4.0344386,\n        -2.590073,\n        -8.39037,\n        -6.465327,\n        -3.459859,\n        -2.994035,\n        -2.6803858,\n        -3.9088588,\n        -6.345051,\n        -2.745923,\n        -3.633128,\n        -3.0604568,\n        -3.1205533,\n        -2.825281,\n        -6.732635,\n        -3.4789882,\n        -3.7997115,\n        -2.7432237,\n        -3.4335766,\n        -4.691795,\n        -2.347267,\n        -4.2432585,\n        -4.254174,\n        -4.5696106,\n        -4.059252,\n        -3.132693,\n        -2.108108,\n        -2.608798,\n        -3.0443778,\n        -2.771694,\n        -3.182069,\n        -3.4979742,\n        -2.8097785,\n        -4.098891,\n        -2.8197036,\n        -3.3921201,\n        -3.7390442,\n        -4.041276,\n        -3.0251033,\n        -3.1271384,\n        -4.8476815,\n        -2.759677,\n        -3.203345,\n        -2.9378736,\n        -2.2823048,\n        -2.356367,\n        -3.997121,\n        -3.2126071,\n        -3.6619453,\n        -3.7652857,\n        -2.7961602,\n        -2.204932,\n        -3.3174736,\n        -2.674685,\n        -3.7326925,\n        -3.7288668,\n        -2.671264,\n        -3.8200424,\n        -2.8722172\n      ],\n      \"pointIndex\": [\n        0,\n        1034,\n        256,\n        658378273,\n        272611859,\n        628255945,\n        603116971,\n        637527626,\n        651346795,\n        881443268,\n        147910250,\n        44829463,\n        49340583,\n        1035125951,\n        742467179,\n        388383245,\n        101864873,\n        813158695,\n        111498040,\n        1067105795,\n        600418683,\n        428136723,\n        984148151,\n        675960328,\n        234153215,\n        967506175,\n        112176063,\n        77723278,\n        102483830,\n        43476314,\n        276541196,\n        535711148,\n        218602949,\n        295304823,\n        932595614,\n        52532765,\n        216033529,\n        886620574,\n        1051026440,\n        722086920,\n        342897228,\n        142999594,\n        966642102,\n        206144551,\n        544190789,\n        314220980,\n        545201433,\n        628122974,\n        111747165,\n        17247933,\n        278565179,\n        106714368,\n        223878028,\n        101819255,\n        526601,\n        96025368,\n        28377725,\n        68289406,\n        66433649,\n        275442575,\n        20451783,\n        475766536,\n        743076873,\n        93914778,\n        93319406,\n        896615515,\n        749806872,\n        535766469,\n        2351178,\n        176407629,\n        535838823,\n        321299206,\n        369671275,\n        166461715,\n        25710530,\n        191293970,\n        853746211,\n        215527865,\n        98870745,\n        233634706,\n        109252870,\n        112348063,\n        743076508,\n        743332664,\n        101955932,\n        696848625,\n        637904759,\n        867338788,\n        1034\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 9075576264812164155\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7621267,\n        -1.7643011,\n        -1.763683,\n        -1.7669947,\n        -1.7705675,\n        -1.7852368,\n        -1.7799686,\n        -1.787016,\n        -1.7860427,\n        -1.7906151,\n        -1.804153,\n        -1.7959678,\n        -1.7886349,\n        -1.780712,\n        -1.8689903,\n        -1.8409675,\n        -1.8390886,\n        -1.7871509,\n        -1.7943671,\n        -1.8191289,\n        -1.8323922,\n        -2.0644372,\n        -1.811986,\n        -1.9270225,\n        -1.8859811,\n        -1.8729037,\n        -1.7891858,\n        -1.912327,\n        -1.8494794,\n        -2.0217195,\n        -2.0321834,\n        -1.8470848,\n        -1.8650146,\n        -1.8897678,\n        -2.2105854,\n        -1.8611658,\n        -2.2318077,\n        -1.9293029,\n        -1.9301625,\n        -2.0512986,\n        -1.9385352,\n        -1.8857056,\n        -2.0632272,\n        -2.0671606,\n        -3.1690257,\n        -1.845041,\n        -2.0162363,\n        -1.9533851,\n        -2.3013062,\n        -1.942272,\n        -2.048776,\n        -2.0071447,\n        -1.9014169,\n        -1.8215562,\n        -1.8878288,\n        -2.084124,\n        -2.105544,\n        -1.930875,\n        -1.8534234,\n        -2.4047348,\n        -2.0737162,\n        -2.0852304,\n        -2.2555728,\n        -1.9201483,\n        -2.3723779,\n        -2.4816203,\n        -2.2572203,\n        -2.00026,\n        -2.2352657,\n        -3.4017093,\n        -2.3812375,\n        -2.3600125,\n        -1.948214,\n        -2.378624,\n        -5.116084,\n        -2.0990605,\n        -2.6211152,\n        -2.379424,\n        -2.3524148,\n        -3.2209916,\n        -2.104442,\n        -2.2540069,\n        -3.4046257,\n        -2.0181348,\n        -2.247497,\n        -2.21532,\n        -2.3385,\n        -2.1694512,\n        -2.1761441,\n        -3.4597387,\n        -3.4778054,\n        -3.3390796,\n        -2.1121626,\n        -2.5394504,\n        -2.52151,\n        -2.0399206,\n        -2.916308,\n        -2.3191895,\n        -2.467902,\n        -2.1683786,\n        -2.0806942,\n        -2.1553402,\n        -2.9445858,\n        -2.1864026,\n        -2.1571403,\n        -2.1272745,\n        -2.2371612,\n        -1.9967877,\n        -2.1002073,\n        -1.8886058,\n        -1.9997722,\n        -2.4290895,\n        -2.2380104,\n        -2.5270011,\n        -2.1228848,\n        -1.9394033,\n        -2.0246792,\n        -2.047522,\n        -1.9095999,\n        -2.7972586,\n        -2.8287325,\n        -2.2579675,\n        -2.2667637,\n        -2.340637,\n        -2.446997,\n        -2.7029297,\n        -2.5301113,\n        -2.0188444,\n        -3.9801939,\n        -4.3221707,\n        -3.7231228,\n        -3.0825982,\n        -3.1652906,\n        -2.4247813,\n        -3.682891,\n        -2.4476895,\n        -3.1326087,\n        -3.161309,\n        -3.8905435,\n        -4.791954,\n        -3.681021,\n        -3.9389684,\n        -2.7608206,\n        -5.541943,\n        -2.8197322,\n        -4.649828,\n        -2.5776565,\n        -7.7122736,\n        -2.6602604,\n        -6.0711694,\n        -5.9011617,\n        -2.451023,\n        -2.4370933,\n        -3.926926,\n        -3.1887536,\n        -3.626978,\n        -3.3805752,\n        -2.938607,\n        -2.6062891,\n        -3.9242072,\n        -4.9757347,\n        -3.2677634,\n        -3.495387,\n        -3.2367918,\n        -4.1905117,\n        -5.1742563,\n        -4.6259055,\n        -6.2827764,\n        -3.8574424,\n        -2.5101068,\n        -3.5722072,\n        -2.9579268,\n        -3.013874,\n        -3.0330877,\n        -2.6339154,\n        -2.8156097,\n        -7.825379,\n        -2.2691917,\n        -2.188605,\n        -3.525295,\n        -3.6323483,\n        -4.5217543,\n        -3.9747667,\n        -3.508919,\n        -4.025803,\n        -3.7148688,\n        -2.4592361,\n        -7.4555864,\n        -3.52768,\n        -6.5292764,\n        -3.0102146,\n        -2.1134815,\n        -3.1245093,\n        -4.395163,\n        -6.3417835,\n        -2.6456265,\n        -3.9936397,\n        -2.5499356,\n        -4.0562053,\n        -4.370556,\n        -2.4170387,\n        -4.2614927,\n        -2.9061453,\n        -2.6378443,\n        -2.8298671,\n        -3.002344,\n        -3.434466,\n        -2.5555396,\n        -2.2291453,\n        -2.8412664,\n        -3.4274383,\n        -3.688502,\n        -2.2401543,\n        -2.257583,\n        -2.260312,\n        -5.796482,\n        -2.4867,\n        -5.135162,\n        -2.5456197,\n        -2.6322596,\n        -2.2805068,\n        -3.0550194,\n        -2.5237148,\n        -3.818549,\n        -3.6654675,\n        -2.6598873,\n        -2.7249722,\n        -3.9809496,\n        -2.8234189,\n        -2.1623554,\n        -2.6842587,\n        -2.3207495,\n        -3.400828,\n        -2.1300373,\n        -2.0394132,\n        -2.4269812,\n        -3.234777,\n        -2.1932037,\n        -2.0368474,\n        -4.900823,\n        -4.0642343,\n        -2.8681023,\n        -4.077743,\n        -4.435043,\n        -3.6128042,\n        -2.295886,\n        -2.7263608,\n        -2.4196534,\n        -3.7086027,\n        -3.7876077,\n        -4.5064445,\n        -3.5139484,\n        -4.9897804,\n        -2.8504028,\n        -3.0639005,\n        -4.883213\n      ],\n      \"pointIndex\": [\n        4,\n        1030,\n        256,\n        729477161,\n        666722929,\n        867092667,\n        15851955,\n        199407070,\n        728920587,\n        105566440,\n        672057594,\n        304475970,\n        96243294,\n        10170045,\n        803807104,\n        140289208,\n        729748292,\n        72926334,\n        309953025,\n        1047057688,\n        963181714,\n        58597307,\n        229197315,\n        729472995,\n        88170001,\n        546851916,\n        729277820,\n        106065483,\n        968337439,\n        763717344,\n        96905421,\n        800893290,\n        800260166,\n        728888411,\n        22150304,\n        1048984053,\n        154146676,\n        992363062,\n        353550576,\n        869454101,\n        976773150,\n        856137810,\n        729104404,\n        367962993,\n        968829758,\n        1028357464,\n        915599229,\n        951370357,\n        893578912,\n        7468862,\n        367999715,\n        105780275,\n        729527444,\n        294239272,\n        316073828,\n        96071757,\n        59596822,\n        95982658,\n        644094438,\n        296472609,\n        5367309,\n        96690087,\n        367066252,\n        96911156,\n        73831936,\n        956087054,\n        611836366,\n        284826207,\n        88163963,\n        367394684,\n        728817819,\n        96690251,\n        35892540,\n        243004824,\n        25130781,\n        242741397,\n        12972296,\n        218635775,\n        729012748,\n        222536880,\n        956465631,\n        172678074,\n        729284679,\n        528951533,\n        729376091,\n        546882251,\n        969053348,\n        956303298,\n        1026\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 7626925126556980011\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6453152,\n        -1.6614803,\n        -1.6473477,\n        -1.6637434,\n        -1.6657788,\n        -1.6709785,\n        -1.6593904,\n        -1.7031668,\n        -1.6661329,\n        -1.7637069,\n        -1.6732891,\n        -1.6751361,\n        -1.6830605,\n        -1.7353961,\n        -1.7056031,\n        -1.7816361,\n        -1.7211266,\n        -1.6736615,\n        -1.6712811,\n        -1.7788934,\n        -1.7689418,\n        -1.98051,\n        -1.7674062,\n        -1.6919633,\n        -1.726131,\n        -1.6992354,\n        -1.7500296,\n        -1.8240614,\n        -1.7669265,\n        -1.7340643,\n        -1.7259476,\n        -1.7856888,\n        -2.0210254,\n        -1.7659334,\n        -1.9669611,\n        -1.7746406,\n        -1.7213353,\n        -1.8095107,\n        -1.7860314,\n        -1.7844567,\n        -1.8628109,\n        -1.8234218,\n        -1.7996061,\n        -2.0139098,\n        -2.4851983,\n        -1.8392967,\n        -1.8668737,\n        -1.7904509,\n        -1.7440853,\n        -1.7646767,\n        -1.7269187,\n        -2.0644567,\n        -1.8533282,\n        -1.826338,\n        -1.8013415,\n        -2.2637458,\n        -2.1390707,\n        -1.7990445,\n        -2.1773386,\n        -1.8273649,\n        -1.8796239,\n        -2.0236528,\n        -1.7728702,\n        -1.796254,\n        -2.532183,\n        -2.2640858,\n        -2.4402175,\n        -3.0296679,\n        -2.314462,\n        -2.0515208,\n        -1.9945575,\n        -2.1140153,\n        -2.1377769,\n        -2.3789532,\n        -1.8386341,\n        -2.9078696,\n        -2.1706057,\n        -1.9561146,\n        -2.509588,\n        -2.4163966,\n        -1.8126116,\n        -2.8045378,\n        -2.3591607,\n        -1.9112797,\n        -2.2252865,\n        -2.481348,\n        -1.8035455,\n        -2.317629,\n        -2.1645021,\n        -2.5073428,\n        -3.436823,\n        -2.3951838,\n        -2.470066,\n        -1.8905722,\n        -2.4852116,\n        -2.6534238,\n        -1.9617916,\n        -2.4085793,\n        -1.7725836,\n        -2.2931128,\n        -2.3572016,\n        -1.8851287,\n        -1.7457126,\n        -2.2169604,\n        -2.1736746,\n        -2.0225914,\n        -2.203509,\n        -2.3658428,\n        -2.001331,\n        -2.306623,\n        -1.9306526,\n        -3.3003302,\n        -2.761781,\n        -2.2639189,\n        -2.63765,\n        -1.8915013,\n        -2.467299,\n        -2.368252,\n        -2.4827886,\n        -2.3959568,\n        -1.862398,\n        -2.3019955,\n        -2.168439,\n        -2.248333,\n        -2.3444448,\n        -2.6407728,\n        -1.7951647,\n        -2.3113253,\n        -2.0520954,\n        -3.3269024,\n        -2.776521,\n        -2.9649208,\n        -3.2885637,\n        -3.7380702,\n        -2.5704145,\n        -4.2792087,\n        -3.7526665,\n        -4.164063,\n        -2.572014,\n        -2.4035897,\n        -2.801594,\n        -3.9647415,\n        -2.0116417,\n        -3.0158849,\n        -2.9728572,\n        -3.526608,\n        -2.900915,\n        -3.164121,\n        -2.682977,\n        -2.2862837,\n        -2.2686222,\n        -3.430326,\n        -4.1960926,\n        -3.1282237,\n        -3.2611117,\n        -3.4850717,\n        -3.356468,\n        -3.7435386,\n        -4.6425886,\n        -2.7259989,\n        -4.021318,\n        -2.699796,\n        -2.2573965,\n        -2.9755998,\n        -3.3193903,\n        -2.7120194,\n        -2.8888395,\n        -2.9086554,\n        -2.017178,\n        -3.199633,\n        -2.9147167,\n        -4.1172676,\n        -5.221965,\n        -1.826881,\n        -3.5926323,\n        -2.884518,\n        -2.638278,\n        -4.167975,\n        -3.7219253,\n        -3.134112,\n        -3.9213047,\n        -3.4879425,\n        -5.1565504,\n        -3.8892734,\n        -2.8100936,\n        -3.8140543,\n        -3.069654,\n        -2.1012354,\n        -2.0253437,\n        -3.95567,\n        -2.640066,\n        -2.6701024,\n        -3.9289021,\n        -2.226513,\n        -2.8312628,\n        -4.4321938,\n        -2.4696536,\n        -3.551235,\n        -1.954873,\n        -4.96489,\n        -3.6705532,\n        -2.6112492,\n        -4.298699,\n        -1.9479235,\n        -2.187532,\n        -2.2313077,\n        -2.9024248,\n        -3.2534902,\n        -2.7297554,\n        -2.3533392,\n        -4.280109,\n        -3.1994207,\n        -2.2891314,\n        -3.4899526,\n        -3.4446754,\n        -2.743274,\n        -2.7455976,\n        -3.549796,\n        -3.896597,\n        -3.74461,\n        -3.1503246,\n        -1.9323839,\n        -3.2648163,\n        -3.516856,\n        -4.318212,\n        -2.9047408,\n        -2.7972558,\n        -3.5502274,\n        -6.969227,\n        -4.3229594,\n        -2.974017,\n        -4.5718613,\n        -2.1149542,\n        -3.5208528,\n        -2.5356367,\n        -5.1357164,\n        -2.7164354,\n        -3.223692,\n        -2.9092143,\n        -8.328084,\n        -3.312988,\n        -1.9736284,\n        -2.565976,\n        -2.4032283,\n        -5.665805,\n        -2.4105263,\n        -5.0699058,\n        -3.0245457,\n        -3.0754614,\n        -2.4222598,\n        -2.4483094,\n        -4.354423,\n        -2.9659104,\n        -6.277456,\n        -1.8456411\n      ],\n      \"pointIndex\": [\n        0,\n        1033,\n        255,\n        737909804,\n        264081900,\n        102740904,\n        41043409,\n        1092182004,\n        111961536,\n        93844074,\n        164839321,\n        985981554,\n        738131265,\n        111808790,\n        627635953,\n        102750122,\n        62725430,\n        34221341,\n        151864513,\n        543494778,\n        556015113,\n        826005706,\n        203773138,\n        1090008334,\n        568827872,\n        135932481,\n        48637673,\n        25836568,\n        665053228,\n        556675487,\n        387133972,\n        34352709,\n        11775348,\n        1072431840,\n        152979362,\n        4990641,\n        985989549,\n        1083155150,\n        271570003,\n        310343043,\n        556915269,\n        430146251,\n        366244972,\n        896197824,\n        103353170,\n        103531376,\n        602574436,\n        111405311,\n        236030666,\n        285109047,\n        767762052,\n        854458550,\n        418148486,\n        103352540,\n        103493096,\n        910349206,\n        3178002,\n        608561031,\n        738462145,\n        927044910,\n        103237758,\n        103418766,\n        102738444,\n        567463329,\n        196405325,\n        310837944,\n        48905156,\n        46123288,\n        789070312,\n        145308117,\n        41405525,\n        167550102,\n        229613352,\n        949425728,\n        772107533,\n        109468212,\n        111489490,\n        1044589884,\n        727054296,\n        55625362,\n        819190664,\n        556060471,\n        772046948,\n        107684267,\n        1018028166,\n        601744516,\n        738205241,\n        1104533179\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": 7002002977686977193\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7950889,\n        -1.8024329,\n        -1.7982606,\n        -1.8035696,\n        -1.8281398,\n        -1.8639559,\n        -1.8087004,\n        -1.804131,\n        -1.8129811,\n        -1.8773781,\n        -1.8434898,\n        -1.8707309,\n        -1.8924979,\n        -1.8152995,\n        -1.8171902,\n        -1.8398887,\n        -1.8526347,\n        -1.8247981,\n        -2.1454496,\n        -1.8797507,\n        -1.9207648,\n        -1.8511003,\n        -1.8830664,\n        -1.941836,\n        -2.0017285,\n        -2.0601752,\n        -2.2392924,\n        -1.8281189,\n        -1.9432348,\n        -2.0512037,\n        -1.8189921,\n        -2.1194618,\n        -1.8805789,\n        -1.9361976,\n        -2.0394275,\n        -1.9197465,\n        -1.9410212,\n        -2.3605905,\n        -2.348308,\n        -1.9919758,\n        -2.149609,\n        -1.9918503,\n        -2.4071586,\n        -2.0133476,\n        -2.017764,\n        -1.9526561,\n        -1.8889744,\n        -1.9908056,\n        -1.9667652,\n        -2.3325937,\n        -2.0894902,\n        -2.076378,\n        -2.1836076,\n        -2.3076813,\n        -2.2943592,\n        -1.8839735,\n        -1.9707458,\n        -1.983549,\n        -1.995676,\n        -2.0890472,\n        -2.862559,\n        -1.8193679,\n        -1.8380246,\n        -2.2423315,\n        -2.468462,\n        -2.0481381,\n        -1.9301186,\n        -2.1135626,\n        -2.0003445,\n        -3.339482,\n        -2.0420012,\n        -2.6025243,\n        -1.9849141,\n        -2.2839487,\n        -2.1166253,\n        -3.2656379,\n        -2.4801033,\n        -3.483298,\n        -2.6237442,\n        -2.5392437,\n        -2.0321038,\n        -2.7748365,\n        -2.5770319,\n        -2.2059803,\n        -2.0158105,\n        -2.8712418,\n        -3.0587385,\n        -2.089214,\n        -2.257485,\n        -2.7786255,\n        -2.223792,\n        -2.0820284,\n        -2.5444684,\n        -1.955107,\n        -1.9992361,\n        -2.177288,\n        -2.8146691,\n        -2.2187133,\n        -2.0178208,\n        -2.4498284,\n        -2.379427,\n        -2.415771,\n        -2.6705427,\n        -2.2836223,\n        -2.1893816,\n        -2.6651056,\n        -2.4998262,\n        -3.0688515,\n        -2.328383,\n        -2.576427,\n        -2.8966498,\n        -2.1519067,\n        -2.0669699,\n        -2.2762961,\n        -2.376951,\n        -2.0982225,\n        -2.240475,\n        -2.5130389,\n        -2.325388,\n        -3.9591777,\n        -2.5050554,\n        -3.4666228,\n        -3.394599,\n        -1.8523328,\n        -1.8810489,\n        -2.2190344,\n        -2.1856875,\n        -2.3854992,\n        -3.3166938,\n        -3.4789212,\n        -2.932038,\n        -2.1035392,\n        -2.2995434,\n        -2.8314764,\n        -2.635736,\n        -4.288215,\n        -3.381147,\n        -2.484196,\n        -2.5066197,\n        -5.4097366,\n        -3.6838872,\n        -3.7094543,\n        -2.7782228,\n        -2.649833,\n        -2.6238856,\n        -2.0065053,\n        -2.1537538,\n        -2.9406908,\n        -4.200969,\n        -2.1703742,\n        -3.143672,\n        -4.5232615,\n        -4.398172,\n        -3.290937,\n        -5.564251,\n        -5.6624002,\n        -3.6470442,\n        -3.0140178,\n        -4.956365,\n        -4.4648337,\n        -4.55664,\n        -5.5627747,\n        -2.6607583,\n        -3.151384,\n        -5.553583,\n        -4.391885,\n        -3.7576783,\n        -2.8761003,\n        -2.5753236,\n        -3.6718168,\n        -4.6668377,\n        -4.191763,\n        -3.9836898,\n        -3.803054,\n        -4.1707954,\n        -2.4958916,\n        -2.9946797,\n        -2.7124336,\n        -4.257737,\n        -3.2523026,\n        -4.0422,\n        -5.5625987,\n        -6.711552,\n        -2.9596972,\n        -2.676174,\n        -2.935767,\n        -2.7992232,\n        -2.1417933,\n        -3.6941488,\n        -2.061812,\n        -3.19682,\n        -2.2330732,\n        -2.647088,\n        -3.0428147,\n        -3.0830877,\n        -5.725817,\n        -2.8337967,\n        -3.7987382,\n        -3.9266934,\n        -3.8552983,\n        -2.9410214,\n        -3.116069,\n        -2.8893232,\n        -2.753992,\n        -3.271512,\n        -5.0558834,\n        -3.365363,\n        -2.5611825,\n        -3.649718,\n        -2.613497,\n        -2.3420799,\n        -4.52002,\n        -6.0026717,\n        -2.8498583,\n        -2.837464,\n        -3.9614494,\n        -3.5146484,\n        -2.8133976,\n        -3.1637628,\n        -5.4409914,\n        -3.1536539,\n        -2.9955816,\n        -3.5310907,\n        -2.2398264,\n        -2.1533244,\n        -3.271155,\n        -2.6116178,\n        -2.973396,\n        -3.0415344,\n        -2.4453475,\n        -3.3096204,\n        -5.7699924,\n        -3.7320168,\n        -3.1326647,\n        -3.3863149,\n        -3.453471,\n        -2.6393962,\n        -4.4104514,\n        -5.6062555,\n        -7.18547,\n        -4.151678,\n        -6.0377564,\n        -2.6741362,\n        -3.4682512,\n        -6.467457,\n        -5.2168,\n        -3.5244913,\n        -3.5629482,\n        -2.1165059,\n        -4.5981874,\n        -2.5121496,\n        -2.3814912,\n        -2.918366,\n        -6.7975726\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        254,\n        15808997,\n        31395584,\n        544325994,\n        205012329,\n        1007602689,\n        110781380,\n        1056602209,\n        787366551,\n        299630002,\n        501655347,\n        555405793,\n        658572095,\n        329207652,\n        439883859,\n        330256018,\n        555078540,\n        356945520,\n        993130394,\n        393202098,\n        461868252,\n        1073954910,\n        757216739,\n        565415956,\n        827566055,\n        283526957,\n        69841290,\n        111142829,\n        17111873,\n        63549186,\n        633788108,\n        955804368,\n        8627587,\n        944323890,\n        466112183,\n        30299,\n        12940510,\n        847685513,\n        983116125,\n        165168896,\n        808547922,\n        664665450,\n        1071175664,\n        955495765,\n        544229432,\n        138450399,\n        495508604,\n        17748440,\n        101276153,\n        4109341,\n        1049999802,\n        84883502,\n        73497080,\n        1068317207,\n        101480069,\n        849438381,\n        554984832,\n        101981303,\n        271927530,\n        691435967,\n        517016733,\n        279748431,\n        101669033,\n        861421531,\n        735102160,\n        88934859,\n        101889977,\n        111475607,\n        494559182,\n        25668594,\n        1028971096,\n        1043241290,\n        697229083,\n        422864340,\n        101978623,\n        999704685,\n        704689540,\n        15396512,\n        735766122,\n        554971642,\n        55886345,\n        735144659,\n        894649641,\n        711489373,\n        99958146,\n        582143\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 254,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1494,\n      \"compressed\": true,\n      \"randomSeed\": -2136158392537716479\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.8389049,\n        -1.8582478,\n        -1.8621566,\n        -1.8643055,\n        -1.8693254,\n        -1.8780724,\n        -1.8710219,\n        -1.8679392,\n        -1.9071573,\n        -1.8911194,\n        -1.874181,\n        -1.9101987,\n        -1.9005471,\n        -1.9019557,\n        -1.8884401,\n        -1.8702489,\n        -1.9078918,\n        -2.0959382,\n        -1.9267707,\n        -1.9354508,\n        -1.9006048,\n        -1.9771824,\n        -2.0172932,\n        -1.9246653,\n        -1.9861399,\n        -1.9785607,\n        -2.008981,\n        -1.9311252,\n        -2.0913882,\n        -1.8953483,\n        -1.9956613,\n        -1.9409268,\n        -1.9159292,\n        -2.1305833,\n        -2.1965997,\n        -2.1932933,\n        -2.1346116,\n        -2.2781348,\n        -2.4737878,\n        -2.0728786,\n        -1.9427085,\n        -1.921938,\n        -1.9417433,\n        -2.0542886,\n        -2.1985457,\n        -2.2231855,\n        -2.027962,\n        -2.358813,\n        -2.2925675,\n        -2.1171737,\n        -2.3344688,\n        -2.149847,\n        -2.000718,\n        -2.2742512,\n        -2.07202,\n        -1.970888,\n        -2.2694538,\n        -2.481769,\n        -2.4127634,\n        -2.04462,\n        -2.1020927,\n        -2.046081,\n        -2.0036356,\n        -2.0248015,\n        -2.5876567,\n        -2.2180665,\n        -2.2848003,\n        -2.1774225,\n        -2.695223,\n        -2.599535,\n        -2.4118931,\n        -2.378754,\n        -3.6088274,\n        -2.1606565,\n        -2.4938564,\n        -2.9503899,\n        -3.5877342,\n        -2.6274383,\n        -2.642765,\n        -2.1210396,\n        -2.399021,\n        -1.9635254,\n        -1.9627998,\n        -2.1645207,\n        -2.595981,\n        -2.034358,\n        -2.6089687,\n        -2.1189146,\n        -2.514278,\n        -2.2882314,\n        -2.616787,\n        -2.2300506,\n        -2.3724105,\n        -2.0632086,\n        -2.0844212,\n        -2.4226224,\n        -2.424359,\n        -2.4508584,\n        -2.377973,\n        -2.2268398,\n        -2.174555,\n        -2.3871496,\n        -2.4086947,\n        -4.8962784,\n        -2.5416145,\n        -2.8147495,\n        -2.7882042,\n        -3.0068736,\n        -2.8295286,\n        -2.977355,\n        -2.2350152,\n        -2.3157363,\n        -2.116972,\n        -2.3414912,\n        -2.4307325,\n        -2.5298557,\n        -2.9733257,\n        -2.7213256,\n        -2.7864554,\n        -2.0854812,\n        -2.061156,\n        -2.5432658,\n        -2.4814675,\n        -2.3671818,\n        -2.050107,\n        -2.8674266,\n        -2.1412132,\n        -2.115003,\n        -2.309362,\n        -2.9748385,\n        -3.1750696,\n        -6.117957,\n        -3.2993228,\n        -2.4326904,\n        -2.7656958,\n        -3.6295707,\n        -3.7796872,\n        -3.4501362,\n        -3.5745273,\n        -2.862744,\n        -3.1402147,\n        -3.124184,\n        -4.393241,\n        -2.6333466,\n        -2.4891274,\n        -6.58714,\n        -6.1481028,\n        -2.1976051,\n        -3.243949,\n        -2.6412618,\n        -2.675868,\n        -3.130527,\n        -3.8998673,\n        -4.5808806,\n        -3.8964477,\n        -2.8419526,\n        -4.0908203,\n        -3.4633973,\n        -2.8850899,\n        -2.430864,\n        -4.3311377,\n        -3.332464,\n        -3.5289812,\n        -3.8982253,\n        -3.044324,\n        -3.3283892,\n        -2.657505,\n        -5.225944,\n        -2.4993463,\n        -2.7446127,\n        -3.8239253,\n        -2.9740188,\n        -3.3479397,\n        -2.9222534,\n        -5.0559607,\n        -4.0083375,\n        -4.7264814,\n        -2.9090033,\n        -4.7571225,\n        -3.8021138,\n        -3.5721817,\n        -2.670062,\n        -4.261798,\n        -2.711046,\n        -2.59482,\n        -2.9176433,\n        -10.787248,\n        -2.744291,\n        -2.708359,\n        -2.5521755,\n        -3.1740572,\n        -4.420747,\n        -2.8186436,\n        -2.6948342,\n        -3.2279184,\n        -3.211412,\n        -2.950143,\n        -2.4618986,\n        -2.6309628,\n        -3.0331423,\n        -4.045979,\n        -2.6429436,\n        -3.946502,\n        -2.7471793,\n        -2.5223243,\n        -2.8541257,\n        -4.1439,\n        -5.1998134,\n        -6.639591,\n        -4.3845534,\n        -2.8370016,\n        -4.676921,\n        -3.8033836,\n        -4.114506,\n        -4.5905237,\n        -3.1949565,\n        -3.0241742,\n        -3.3643944,\n        -5.335859,\n        -9.016249,\n        -4.3608932,\n        -2.8401525,\n        -3.3107944,\n        -3.051773,\n        -2.9649436,\n        -3.1472259,\n        -2.704784,\n        -2.5415337,\n        -2.8733041,\n        -2.9558587,\n        -2.8383915,\n        -4.776263,\n        -2.5792568,\n        -3.0381863,\n        -3.0412335,\n        -3.6932375,\n        -3.6586297,\n        -4.4209785,\n        -3.0423696,\n        -2.1743321,\n        -3.8594322,\n        -3.0191092,\n        -3.0187564,\n        -2.97037,\n        -5.26951,\n        -3.5140324,\n        -3.0615833,\n        -4.0525646,\n        -7.478893,\n        -3.0442631,\n        -2.15131,\n        -5.027723,\n        -4.1371365,\n        -2.7515888,\n        -3.10321,\n        -2.9851701\n      ],\n      \"pointIndex\": [\n        8,\n        1033,\n        256,\n        274834802,\n        722840473,\n        231678069,\n        795912372,\n        815986087,\n        274399687,\n        91672173,\n        723028016,\n        746495215,\n        722424190,\n        668321911,\n        450435642,\n        959782935,\n        814222401,\n        91643433,\n        431947113,\n        640329714,\n        440239316,\n        803290989,\n        407064496,\n        948964996,\n        736532039,\n        89436982,\n        102027248,\n        774952204,\n        906155678,\n        42942890,\n        722225094,\n        150976907,\n        705382868,\n        791125,\n        738621164,\n        723101289,\n        92181983,\n        309633540,\n        181631932,\n        214993954,\n        673129959,\n        607445380,\n        386690518,\n        158514299,\n        697817580,\n        135764299,\n        98727903,\n        929611432,\n        373798555,\n        526785268,\n        187422052,\n        101146420,\n        101266241,\n        195809237,\n        523883877,\n        739602867,\n        87932408,\n        625076074,\n        648537731,\n        429581157,\n        60706746,\n        56887256,\n        92390361,\n        96658348,\n        91668470,\n        13828540,\n        413818852,\n        128077719,\n        75864152,\n        722531227,\n        142816003,\n        156383949,\n        517013490,\n        573720696,\n        83774327,\n        722413657,\n        722293852,\n        649538714,\n        890518968,\n        76366437,\n        527262720,\n        299555085,\n        324557643,\n        17994668,\n        491599935,\n        629151794,\n        194396952,\n        877713305,\n        1025\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -4741493720461802206\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.752243,\n        -1.7533684,\n        -1.7537396,\n        -1.7677537,\n        -1.7604302,\n        -1.7603213,\n        -1.7693717,\n        -1.8080763,\n        -1.7989587,\n        -1.7701063,\n        -1.7832211,\n        -1.8612016,\n        -1.8499945,\n        -1.8546313,\n        -1.7741475,\n        -1.8801541,\n        -1.8818759,\n        -1.8566812,\n        -1.8662964,\n        -1.9340073,\n        -1.919586,\n        -1.9013982,\n        -2.041985,\n        -2.0190444,\n        -1.879311,\n        -1.942772,\n        -1.8506889,\n        -1.8971379,\n        -1.8959066,\n        -1.8148371,\n        -1.7917287,\n        -1.944895,\n        -1.8909316,\n        -2.082204,\n        -2.2887707,\n        -1.9522427,\n        -2.1629555,\n        -2.2777903,\n        -2.1407144,\n        -1.9476439,\n        -2.603377,\n        -2.0637445,\n        -2.1055667,\n        -1.9408816,\n        -2.0634787,\n        -2.3307023,\n        -2.4172614,\n        -2.4749308,\n        -2.122187,\n        -1.9501388,\n        -1.9178616,\n        -2.0424654,\n        -2.30427,\n        -2.0592194,\n        -2.0665574,\n        -1.9108924,\n        -1.9543146,\n        -2.02071,\n        -2.1656165,\n        -1.8619995,\n        -1.9695305,\n        -1.8394514,\n        -1.832046,\n        -2.5664601,\n        -2.0323565,\n        -1.9110042,\n        -2.181856,\n        -3.7353666,\n        -2.5042226,\n        -2.4701302,\n        -2.5479352,\n        -2.082757,\n        -2.0454297,\n        -4.2367597,\n        -2.5641592,\n        -2.2863944,\n        -2.2945366,\n        -2.7220585,\n        -2.2455914,\n        -2.1216009,\n        -2.223087,\n        -3.6326237,\n        -2.7867792,\n        -2.5062628,\n        -2.1318126,\n        -3.077885,\n        -2.2636397,\n        -3.8174121,\n        -2.0398188,\n        -2.417976,\n        -2.0693085,\n        -2.4093754,\n        -2.6121898,\n        -2.7665045,\n        -2.5095088,\n        -2.542665,\n        -3.0018454,\n        -2.467339,\n        -2.4261658,\n        -2.0704176,\n        -2.5217953,\n        -1.9301082,\n        -2.1728778,\n        -2.2834375,\n        -2.3522706,\n        -2.3934624,\n        -2.7616513,\n        -2.2527175,\n        -2.216753,\n        -2.2046478,\n        -2.2435293,\n        -2.096313,\n        -2.1982265,\n        -2.4378765,\n        -2.0432262,\n        -2.274692,\n        -2.4702756,\n        -2.8248513,\n        -2.4346766,\n        -2.127786,\n        -2.6401625,\n        -3.0516562,\n        -2.354917,\n        -1.8942295,\n        -3.3560169,\n        -3.3887403,\n        -1.8469124,\n        -4.950463,\n        -2.9408238,\n        -2.9539542,\n        -2.16367,\n        -3.6794176,\n        -4.468372,\n        -3.0370595,\n        -2.718664,\n        -3.8908901,\n        -5.214369,\n        -4.506366,\n        -2.604456,\n        -3.2428453,\n        -5.4598346,\n        -4.3525577,\n        -3.7062507,\n        -4.2400312,\n        -2.6525626,\n        -7.7891884,\n        -2.1886504,\n        -4.5189743,\n        -4.713356,\n        -7.961303,\n        -2.636962,\n        -2.9795358,\n        -2.3244765,\n        -2.4545517,\n        -4.193894,\n        -4.1652184,\n        -2.9075158,\n        -2.6870363,\n        -3.210856,\n        -5.10711,\n        -2.1955056,\n        -3.7639387,\n        -3.14421,\n        -4.9564524,\n        -3.759317,\n        -4.1807175,\n        -3.5626283,\n        -4.674296,\n        -5.1786294,\n        -2.2147112,\n        -4.037663,\n        -3.3469887,\n        -3.2820742,\n        -4.46251,\n        -2.584488,\n        -6.760511,\n        -4.0670295,\n        -3.897869,\n        -4.645763,\n        -4.12243,\n        -2.8523934,\n        -2.4867897,\n        -4.4655023,\n        -2.838182,\n        -2.8930097,\n        -2.8383503,\n        -3.3800075,\n        -4.1138377,\n        -3.0825334,\n        -2.861209,\n        -3.8822439,\n        -4.4328766,\n        -2.6305277,\n        -3.8431482,\n        -5.4775,\n        -3.3559694,\n        -2.9085054,\n        -3.1475658,\n        -3.3428876,\n        -2.573073,\n        -4.376477,\n        -4.332161,\n        -4.128444,\n        -2.3647585,\n        -3.13329,\n        -3.1092715,\n        -2.5185754,\n        -2.960571,\n        -5.9437976,\n        -3.7282207,\n        -2.4549882,\n        -2.7592452,\n        -3.1832874,\n        -4.007347,\n        -3.395472,\n        -3.5422246,\n        -3.5614986,\n        -2.716633,\n        -2.8029552,\n        -2.3256674,\n        -2.3200977,\n        -2.4524872,\n        -3.2619445,\n        -4.8563776,\n        -2.366631,\n        -3.9910896,\n        -4.675242,\n        -2.8386018,\n        -5.1089964,\n        -3.2167192,\n        -2.1725712,\n        -3.0112796,\n        -2.9056556,\n        -2.5540504,\n        -3.3297763,\n        -2.8778107,\n        -3.0756452,\n        -2.8496907,\n        -3.207745,\n        -3.5825412,\n        -2.3615453,\n        -2.6907015,\n        -5.2823467,\n        -3.7028236,\n        -4.088507,\n        -2.8520346,\n        -2.4795697,\n        -3.2107942,\n        -2.532128,\n        -5.2959027,\n        -4.060747,\n        -3.59214,\n        -3.4624677,\n        -4.1062493,\n        -2.3069463\n      ],\n      \"pointIndex\": [\n        0,\n        1030,\n        255,\n        599157497,\n        733163768,\n        430667831,\n        147887729,\n        1010523402,\n        330465266,\n        110646685,\n        881819272,\n        865481917,\n        88752414,\n        775336303,\n        111348581,\n        155252199,\n        8602322,\n        597396805,\n        715235725,\n        779861567,\n        3899346,\n        326648732,\n        542208487,\n        1039223259,\n        108522539,\n        12775810,\n        954706104,\n        1095577735,\n        978022754,\n        733212835,\n        348736900,\n        95473788,\n        412276462,\n        678114570,\n        706913169,\n        3604091,\n        749943297,\n        471954771,\n        347427408,\n        991820970,\n        643721315,\n        223456610,\n        732754695,\n        173730375,\n        819244216,\n        101113255,\n        611315637,\n        18169048,\n        1049668325,\n        827494107,\n        421570787,\n        1059333078,\n        192911752,\n        948894261,\n        101042534,\n        100982421,\n        193651786,\n        20516379,\n        64919072,\n        295658493,\n        101692273,\n        1043475195,\n        532477572,\n        136312642,\n        75568263,\n        1012091369,\n        972944514,\n        1040921449,\n        165170326,\n        17869111,\n        199152163,\n        523709110,\n        90337152,\n        101193502,\n        422937033,\n        519944870,\n        19135606,\n        337418591,\n        349593493,\n        1069682181,\n        970819812,\n        542474742,\n        812478370,\n        436972795,\n        560016287,\n        605535668,\n        768020755,\n        274656138\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 4112441456578588848\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.8468616,\n        -1.8577148,\n        -1.8584853,\n        -1.8745211,\n        -1.8839381,\n        -1.94503,\n        -1.8643191,\n        -1.9429883,\n        -1.9136758,\n        -1.887171,\n        -2.0373986,\n        -1.9586806,\n        -1.973279,\n        -1.8782005,\n        -1.9030881,\n        -1.9531076,\n        -1.9696432,\n        -1.9732479,\n        -2.0213916,\n        -1.8983077,\n        -1.8967689,\n        -2.228283,\n        -2.0511718,\n        -2.143769,\n        -2.0200603,\n        -1.9988045,\n        -2.010393,\n        -2.0065608,\n        -1.9569137,\n        -2.0529115,\n        -1.9493077,\n        -1.972344,\n        -2.003803,\n        -2.0773947,\n        -2.3127484,\n        -2.2136736,\n        -2.3328328,\n        -2.0914764,\n        -2.2352169,\n        -1.9156684,\n        -2.0050547,\n        -1.9736114,\n        -2.0609012,\n        -2.4742713,\n        -2.408185,\n        -2.112794,\n        -2.0947654,\n        -2.252205,\n        -2.1628177,\n        -2.1104808,\n        -2.3102467,\n        -2.0179017,\n        -2.1094272,\n        -2.5425253,\n        -2.1143463,\n        -2.0709038,\n        -2.2949069,\n        -2.1579678,\n        -2.0980825,\n        -2.2455487,\n        -2.1234345,\n        -1.9941081,\n        -2.2426062,\n        -1.9967397,\n        -2.2771735,\n        -2.8543398,\n        -2.0841372,\n        -2.6515484,\n        -2.9777184,\n        -2.4021668,\n        -2.32482,\n        -2.5901673,\n        -2.2633555,\n        -2.6911893,\n        -2.66621,\n        -2.204409,\n        -2.5489252,\n        -2.259339,\n        -2.4003046,\n        -2.4987068,\n        -2.1814759,\n        -2.812882,\n        -2.5994446,\n        -2.30832,\n        -2.6440303,\n        -2.1507392,\n        -2.3437405,\n        -2.7207494,\n        -2.8433661,\n        -2.6356754,\n        -2.711919,\n        -2.1972442,\n        -2.7441664,\n        -2.2961626,\n        -2.968949,\n        -2.461496,\n        -2.7065513,\n        -3.0666206,\n        -2.5042677,\n        -2.1511161,\n        -2.410049,\n        -3.1150498,\n        -3.1232743,\n        -2.4711747,\n        -2.1338673,\n        -2.4432127,\n        -2.1258492,\n        -4.4138207,\n        -2.848164,\n        -2.72116,\n        -2.172272,\n        -2.3880615,\n        -2.2456584,\n        -3.668334,\n        -2.4958708,\n        -2.1932604,\n        -2.4945786,\n        -3.4778702,\n        -2.1258447,\n        -2.7599905,\n        -2.3769214,\n        -2.2937937,\n        -2.162861,\n        -2.0463183,\n        -2.1354322,\n        -2.2427704,\n        -2.4339166,\n        -2.0415628,\n        -4.943682,\n        -3.4416797,\n        -2.5816982,\n        -5.9348507,\n        -3.8164678,\n        -2.6876583,\n        -5.4150963,\n        -3.630908,\n        -2.7943237,\n        -4.6381755,\n        -3.0852098,\n        -2.9603436,\n        -2.4037251,\n        -2.8560138,\n        -2.5588925,\n        -3.2732491,\n        -4.544583,\n        -3.545748,\n        -3.1403248,\n        -3.2913904,\n        -3.3318906,\n        -5.949521,\n        -3.3513587,\n        -3.6747687,\n        -4.3431773,\n        -2.784656,\n        -2.9858577,\n        -2.2760127,\n        -5.151183,\n        -2.8132288,\n        -2.4708939,\n        -3.2697375,\n        -3.1871397,\n        -3.1962037,\n        -3.3917303,\n        -2.8500671,\n        -3.5735934,\n        -3.4771545,\n        -3.7590485,\n        -4.591124,\n        -3.6463501,\n        -2.7249668,\n        -2.8283684,\n        -2.2954454,\n        -2.239896,\n        -3.2431097,\n        -2.3933356,\n        -3.5373518,\n        -3.6627674,\n        -4.899869,\n        -3.8739188,\n        -4.2079306,\n        -4.7997704,\n        -4.528742,\n        -5.6278872,\n        -2.3141618,\n        -2.4921196,\n        -3.3306365,\n        -2.9292293,\n        -5.6206913,\n        -2.9606137,\n        -3.4221005,\n        -3.5165875,\n        -3.1108592,\n        -2.7344708,\n        -3.2038844,\n        -2.8518286,\n        -4.1912704,\n        -3.926871,\n        -4.0916147,\n        -2.917304,\n        -2.835496,\n        -3.090715,\n        -3.7037914,\n        -5.6778426,\n        -3.4291205,\n        -4.4189024,\n        -4.103101,\n        -4.0025125,\n        -2.9999018,\n        -2.7050889,\n        -2.2683651,\n        -2.1401865,\n        -3.9155257,\n        -2.509768,\n        -2.7189946,\n        -3.2245197,\n        -4.96693,\n        -5.27048,\n        -2.9756134,\n        -3.0993168,\n        -3.86548,\n        -2.7816083,\n        -2.9752896,\n        -3.4838676,\n        -2.7449431,\n        -2.4590232,\n        -2.763978,\n        -5.3659043,\n        -5.958337,\n        -3.8688467,\n        -2.6120355,\n        -3.0672078,\n        -2.8505383,\n        -3.0771983,\n        -2.9232905,\n        -3.3036036,\n        -4.107447,\n        -4.397995,\n        -2.456479,\n        -2.7572386,\n        -2.7803311,\n        -4.0642967,\n        -2.587039,\n        -2.8032584,\n        -2.5563083,\n        -6.0977263,\n        -2.1731086,\n        -6.140041,\n        -2.2150917,\n        -2.1759338,\n        -3.2463398,\n        -2.650551,\n        -3.2623303,\n        -7.156755,\n        -3.746851,\n        -4.3848667,\n        -2.2472951\n      ],\n      \"pointIndex\": [\n        0,\n        1034,\n        256,\n        680026234,\n        25753038,\n        12322195,\n        158222352,\n        628183527,\n        221921252,\n        102613109,\n        1046167656,\n        420157403,\n        101865424,\n        130688969,\n        867791470,\n        377508014,\n        551236938,\n        102803294,\n        169321924,\n        660760488,\n        17159871,\n        825170103,\n        664320069,\n        766642787,\n        840599011,\n        112123920,\n        741356230,\n        68618471,\n        659963173,\n        164969792,\n        17902792,\n        102050060,\n        65493591,\n        128223140,\n        583803222,\n        317799508,\n        377694450,\n        741395435,\n        157743738,\n        742004031,\n        832370462,\n        669705405,\n        1090208986,\n        557194788,\n        111052416,\n        101995009,\n        377742949,\n        112315196,\n        166604936,\n        159845504,\n        6249425,\n        402426115,\n        111691467,\n        112014602,\n        12402500,\n        113626412,\n        377079561,\n        4891505,\n        17230538,\n        557671104,\n        350389592,\n        337418299,\n        995193557,\n        154041381,\n        72942473,\n        866821859,\n        734569468,\n        163175720,\n        101829412,\n        647736761,\n        159522,\n        218332428,\n        102385336,\n        380438010,\n        101779620,\n        557387877,\n        196072007,\n        311519757,\n        742211272,\n        1034906183,\n        37137878,\n        868791521,\n        170337170,\n        882633515,\n        742305584,\n        742053317,\n        1030514781,\n        873156198,\n        1002\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 4595323003598604010\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7530217,\n        -1.7586434,\n        -1.773849,\n        -1.762592,\n        -1.8131016,\n        -1.7756336,\n        -1.8009828,\n        -1.7741543,\n        -1.8676512,\n        -1.8646407,\n        -1.8707527,\n        -1.8453454,\n        -1.7917652,\n        -1.8828427,\n        -1.8115611,\n        -1.8559371,\n        -1.8460398,\n        -1.9498363,\n        -1.900532,\n        -1.9233395,\n        -1.894581,\n        -1.8888321,\n        -2.0496674,\n        -1.9423137,\n        -1.8953594,\n        -1.8436334,\n        -1.8194907,\n        -1.9315331,\n        -1.8865117,\n        -1.9552362,\n        -1.8730898,\n        -1.861845,\n        -1.8884866,\n        -1.860233,\n        -2.0214474,\n        -2.1342142,\n        -2.0347815,\n        -2.1565762,\n        -1.9986768,\n        -2.179284,\n        -2.2245903,\n        -1.955952,\n        -2.103774,\n        -2.0903578,\n        -2.0280917,\n        -2.2951984,\n        -2.1308136,\n        -2.0280867,\n        -2.0078814,\n        -2.2627485,\n        -2.4328034,\n        -1.927272,\n        -2.1103585,\n        -1.9007114,\n        -1.9655051,\n        -2.1686418,\n        -2.275647,\n        -1.9243559,\n        -1.9329118,\n        -2.1062279,\n        -1.9933932,\n        -2.1088676,\n        -1.9385817,\n        -1.8690796,\n        -1.9547449,\n        -2.9583771,\n        -2.4123116,\n        -2.3242242,\n        -3.0487976,\n        -2.2495193,\n        -2.7892995,\n        -2.1749918,\n        -2.1613662,\n        -2.6085825,\n        -2.2442203,\n        -2.296857,\n        -2.157981,\n        -2.1260428,\n        -2.2767558,\n        -2.222487,\n        -2.5803642,\n        -2.616387,\n        -3.002638,\n        -2.5098755,\n        -2.300245,\n        -2.6012204,\n        -2.3810315,\n        -2.7191963,\n        -2.3465118,\n        -2.1526763,\n        -2.180598,\n        -2.3629231,\n        -2.407416,\n        -2.1790383,\n        -2.1630478,\n        -2.5708795,\n        -2.7394047,\n        -2.1088605,\n        -2.1446357,\n        -2.8772933,\n        -2.3019164,\n        -2.7520542,\n        -2.7482584,\n        -2.1248322,\n        -1.9692047,\n        -2.289044,\n        -2.3144379,\n        -2.1382654,\n        -1.9641585,\n        -2.0770772,\n        -2.445008,\n        -2.7688713,\n        -2.2054334,\n        -2.3642776,\n        -2.3608549,\n        -2.1265533,\n        -2.2273238,\n        -1.9348043,\n        -1.9927022,\n        -2.584606,\n        -2.2076051,\n        -2.110738,\n        -2.1699257,\n        -2.2359812,\n        -2.5681713,\n        -2.115875,\n        -2.0355823,\n        -1.9197799,\n        -3.7159243,\n        -2.3769798,\n        -3.2322044,\n        -3.3137698,\n        -2.9943788,\n        -2.7963326,\n        -2.6795058,\n        -2.4802117,\n        -3.4642274,\n        -5.1411734,\n        -3.66784,\n        -3.256203,\n        -2.312808,\n        -3.182899,\n        -2.9188633,\n        -3.687109,\n        -2.7260873,\n        -3.5744486,\n        -2.2075148,\n        -3.5530431,\n        -3.1810625,\n        -2.6014674,\n        -4.4537644,\n        -2.347711,\n        -2.7858908,\n        -2.2913628,\n        -2.3800812,\n        -2.574793,\n        -2.1803203,\n        -2.487599,\n        -2.4158523,\n        -2.9215314,\n        -2.3955321,\n        -3.1068192,\n        -5.5564446,\n        -6.155976,\n        -4.783527,\n        -3.0313432,\n        -4.6340785,\n        -2.9346814,\n        -3.4056609,\n        -2.9865975,\n        -4.482046,\n        -3.2434604,\n        -3.8362653,\n        -3.1985016,\n        -3.7307,\n        -5.748415,\n        -3.335481,\n        -3.912007,\n        -7.3912263,\n        -2.2853935,\n        -2.7286966,\n        -3.0656602,\n        -2.99954,\n        -4.2920375,\n        -2.8718886,\n        -2.6310093,\n        -2.4226272,\n        -4.126571,\n        -2.6441429,\n        -2.213361,\n        -2.18956,\n        -3.3949723,\n        -6.7583833,\n        -4.7095194,\n        -3.3340955,\n        -4.805968,\n        -2.397384,\n        -2.71102,\n        -2.1774907,\n        -3.360004,\n        -4.7620926,\n        -3.0819888,\n        -2.475726,\n        -3.1784081,\n        -4.8078237,\n        -3.70299,\n        -3.4703214,\n        -2.1942518,\n        -2.2097945,\n        -5.100668,\n        -2.802018,\n        -3.0861547,\n        -2.614707,\n        -3.3183014,\n        -3.8127666,\n        -2.7256925,\n        -2.7711666,\n        -2.5503476,\n        -3.528591,\n        -2.3531575,\n        -2.5413828,\n        -5.374771,\n        -5.0652924,\n        -3.141435,\n        -4.5464096,\n        -3.9200919,\n        -2.547698,\n        -3.4862366,\n        -2.3858218,\n        -3.1564581,\n        -2.7105162,\n        -2.4559617,\n        -2.2118587,\n        -2.4247873,\n        -2.6961098,\n        -2.2285678,\n        -2.5738456,\n        -5.701704,\n        -3.4467978,\n        -4.680556,\n        -2.6207056,\n        -2.3404016,\n        -2.4840865,\n        -3.8120284,\n        -2.602705,\n        -5.501521,\n        -2.279263,\n        -2.925833,\n        -2.895566,\n        -2.7750962,\n        -2.5733013,\n        -3.2968647,\n        -6.0132313,\n        -2.511559,\n        -3.8756485,\n        -2.3297174\n      ],\n      \"pointIndex\": [\n        0,\n        1032,\n        256,\n        130316049,\n        182105832,\n        111076132,\n        177234996,\n        997881312,\n        971575849,\n        803574544,\n        108973323,\n        99959135,\n        798281318,\n        1058098,\n        1054391543,\n        114761459,\n        101574028,\n        873951083,\n        572162546,\n        107933,\n        75000016,\n        327553633,\n        698303492,\n        1042033525,\n        327999288,\n        733359851,\n        102139982,\n        971271501,\n        516312555,\n        272018520,\n        101432289,\n        101435011,\n        775923155,\n        101481556,\n        743646378,\n        446065006,\n        23645681,\n        34451246,\n        595379049,\n        288901872,\n        990539428,\n        1080377477,\n        630020555,\n        1016808779,\n        850122508,\n        971730593,\n        241165909,\n        971091193,\n        755150944,\n        17729772,\n        111502861,\n        111546447,\n        288352119,\n        111975727,\n        88585509,\n        115353647,\n        102111069,\n        458115968,\n        542179417,\n        625317949,\n        742927762,\n        101471961,\n        82062374,\n        458554097,\n        743330367,\n        915215041,\n        350764572,\n        278643608,\n        111120402,\n        274324176,\n        63993397,\n        17575794,\n        208122117,\n        299358723,\n        93871000,\n        237336095,\n        101866291,\n        183064427,\n        679387979,\n        337496871,\n        34464391,\n        690885954,\n        743158299,\n        718346139,\n        549107147,\n        521345269,\n        807182715,\n        1062661369,\n        1032\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 4638085235929102383\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7205982,\n        -1.7462441,\n        -1.7224296,\n        -1.7497537,\n        -1.751028,\n        -1.8456761,\n        -1.7242068,\n        -1.7767053,\n        -1.7673215,\n        -1.784796,\n        -1.8923337,\n        -1.8468895,\n        -1.8458989,\n        -1.7726264,\n        -1.7494782,\n        -1.8162304,\n        -1.8506732,\n        -1.8071682,\n        -1.8505014,\n        -1.8663504,\n        -1.8602628,\n        -1.9369192,\n        -2.0685985,\n        -1.8470418,\n        -1.924243,\n        -1.8627342,\n        -1.896682,\n        -1.805714,\n        -1.7924759,\n        -1.8212261,\n        -1.8413523,\n        -1.8166019,\n        -1.9876587,\n        -1.9688573,\n        -1.9867836,\n        -2.1416805,\n        -1.8227341,\n        -1.984371,\n        -1.8936837,\n        -2.0074413,\n        -1.9986775,\n        -2.0292919,\n        -2.149405,\n        -2.153581,\n        -2.0198772,\n        -2.322012,\n        -2.1020737,\n        -1.8685888,\n        -1.866675,\n        -1.9476718,\n        -1.9762423,\n        -1.9931087,\n        -2.0990224,\n        -1.9319134,\n        -1.9236432,\n        -1.8683274,\n        -1.9440625,\n        -1.981607,\n        -1.8613373,\n        -1.9011766,\n        -2.035621,\n        -2.3928623,\n        -1.8578784,\n        -1.8394058,\n        -1.8834316,\n        -2.4774234,\n        -2.745633,\n        -2.330674,\n        -2.2214074,\n        -2.007715,\n        -2.6319563,\n        -2.150613,\n        -2.7581625,\n        -1.855395,\n        -2.1326241,\n        -2.4616501,\n        -2.2978334,\n        -2.11462,\n        -2.2043338,\n        -2.2443311,\n        -2.0909631,\n        -2.0210614,\n        -2.1119738,\n        -2.194734,\n        -2.0480645,\n        -2.5312035,\n        -2.4746358,\n        -2.252974,\n        -2.2932506,\n        -2.15691,\n        -2.5898726,\n        -2.5293722,\n        -2.8775036,\n        -2.3297377,\n        -2.2259276,\n        -2.3367941,\n        -2.0050807,\n        -2.1409955,\n        -2.0319598,\n        -2.6143742,\n        -2.217846,\n        -2.2131999,\n        -2.3793766,\n        -2.1143787,\n        -2.2651167,\n        -2.14071,\n        -2.4633844,\n        -2.256193,\n        -2.2779107,\n        -2.337873,\n        -2.2562742,\n        -2.2765844,\n        -2.1672094,\n        -2.2050443,\n        -2.3057387,\n        -2.3357394,\n        -2.6255481,\n        -2.1946425,\n        -1.9281527,\n        -2.233351,\n        -2.1962113,\n        -2.4188945,\n        -2.1262202,\n        -2.428657,\n        -2.8092623,\n        -2.33741,\n        -2.2612424,\n        -2.203239,\n        -3.0023918,\n        -2.4995358,\n        -4.8756423,\n        -3.8188999,\n        -2.8692102,\n        -6.1800284,\n        -3.7025652,\n        -3.0154636,\n        -2.6822631,\n        -2.8073263,\n        -4.2340283,\n        -2.1536407,\n        -2.5218174,\n        -4.1774282,\n        -3.2186656,\n        -3.0366569,\n        -2.383663,\n        -5.6088076,\n        -2.7851872,\n        -3.2519581,\n        -2.5382798,\n        -3.4154825,\n        -7.3163767,\n        -2.4979515,\n        -4.079501,\n        -2.642788,\n        -2.4024146,\n        -5.3088193,\n        -3.2866457,\n        -3.7070508,\n        -3.195286,\n        -2.6170375,\n        -2.6562896,\n        -4.32288,\n        -4.6731935,\n        -4.7537546,\n        -2.0546222,\n        -3.0340793,\n        -3.7126343,\n        -4.2914305,\n        -2.4861906,\n        -2.346633,\n        -3.5469143,\n        -2.829168,\n        -3.6296227,\n        -2.5642827,\n        -2.7284346,\n        -3.6774755,\n        -3.813522,\n        -2.3384602,\n        -2.86601,\n        -3.2119765,\n        -2.4721215,\n        -4.9027615,\n        -3.1063216,\n        -3.5586183,\n        -2.8718479,\n        -3.2273788,\n        -3.837107,\n        -2.9378967,\n        -2.4388022,\n        -2.7371254,\n        -2.8474655,\n        -7.5629826,\n        -2.6986318,\n        -3.3238146,\n        -2.394846,\n        -2.2454329,\n        -4.489093,\n        -2.320416,\n        -2.4232886,\n        -3.2989602,\n        -4.470059,\n        -3.3212802,\n        -3.0554433,\n        -2.8215988,\n        -2.7696617,\n        -2.5973637,\n        -2.4393094,\n        -2.9112113,\n        -3.0350218,\n        -2.3597577,\n        -2.6048572,\n        -2.319356,\n        -2.2565455,\n        -2.7014203,\n        -5.087077,\n        -3.4070683,\n        -2.2615533,\n        -2.4370053,\n        -5.2219357,\n        -4.003551,\n        -2.6806061,\n        -2.9045835,\n        -2.5617766,\n        -2.5563123,\n        -4.494617,\n        -3.316142,\n        -2.8841674,\n        -6.208882,\n        -4.5145392,\n        -4.1788445,\n        -3.5765295,\n        -3.076847,\n        -3.4893527,\n        -2.654117,\n        -3.4339619,\n        -2.3059309,\n        -2.8830016,\n        -3.1932635,\n        -2.603867,\n        -2.422528,\n        -3.7851768,\n        -3.45282,\n        -3.857833,\n        -2.928278,\n        -2.8723693,\n        -2.3710341,\n        -2.141756,\n        -2.6236925,\n        -3.822677,\n        -3.6973562,\n        -2.8623853,\n        -3.3728542,\n        -2.470519,\n        -2.7904298,\n        -3.4461827,\n        -2.6518717\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        256,\n        636394200,\n        540065279,\n        399278101,\n        499280,\n        606713065,\n        891442592,\n        1021974215,\n        212538431,\n        393720532,\n        445796427,\n        323044657,\n        51701512,\n        729576531,\n        107782822,\n        34941551,\n        823410191,\n        305112990,\n        763647192,\n        967328688,\n        737338494,\n        398657872,\n        112280788,\n        737004872,\n        8618236,\n        598413488,\n        566043792,\n        533128314,\n        392209829,\n        737266652,\n        603032783,\n        37453917,\n        293619559,\n        101366862,\n        101395175,\n        489560317,\n        737710841,\n        2163210,\n        202671885,\n        17754786,\n        915624963,\n        540430186,\n        37990816,\n        102222641,\n        636034155,\n        101960673,\n        17872387,\n        913257153,\n        540985857,\n        397352648,\n        478910612,\n        332234994,\n        5293089,\n        645386275,\n        1072636657,\n        274765347,\n        512374679,\n        79526015,\n        101701358,\n        15511096,\n        728027530,\n        101327015,\n        838272620,\n        108479865,\n        294691815,\n        980385105,\n        644477492,\n        151252929,\n        804807352,\n        237006048,\n        195546607,\n        389448737,\n        737210924,\n        5481644,\n        4507261,\n        737094663,\n        797262587,\n        895794236,\n        195217606,\n        134716329,\n        234964024,\n        487214283,\n        868397547,\n        827532387,\n        167915189,\n        867652677,\n        1031\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -8084622083950763255\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7102325,\n        -1.7106767,\n        -1.7218745,\n        -1.7244233,\n        -1.7484547,\n        -1.7266585,\n        -1.772923,\n        -1.7281314,\n        -1.734391,\n        -1.752405,\n        -1.7837251,\n        -1.7333705,\n        -1.7823449,\n        -1.817378,\n        -1.8416303,\n        -1.7505863,\n        -1.7800984,\n        -1.9186565,\n        -1.8795339,\n        -1.8090374,\n        -1.8483154,\n        -1.9879023,\n        -1.8898373,\n        -1.7887161,\n        -1.8517776,\n        -1.8900756,\n        -1.9099575,\n        -1.845195,\n        -1.854385,\n        -1.8797662,\n        -1.8626833,\n        -1.7688557,\n        -2.2654526,\n        -1.9594679,\n        -1.7884431,\n        -2.1558592,\n        -2.0876853,\n        -2.0811498,\n        -2.0088906,\n        -1.968705,\n        -1.8442656,\n        -1.8887308,\n        -1.8607655,\n        -3.0018377,\n        -2.1193664,\n        -1.9289687,\n        -1.934932,\n        -1.8580946,\n        -1.8834854,\n        -1.8530927,\n        -2.0006964,\n        -2.0897899,\n        -2.0771008,\n        -1.9469224,\n        -2.0084789,\n        -2.2256832,\n        -2.083075,\n        -1.8657632,\n        -1.9859991,\n        -2.0511184,\n        -1.9161814,\n        -2.4712257,\n        -1.9124635,\n        -1.7808094,\n        -2.200315,\n        -2.2816424,\n        -2.2963982,\n        -2.0322251,\n        -2.0530868,\n        -2.19593,\n        -2.399444,\n        -2.2074344,\n        -2.203682,\n        -2.0975952,\n        -2.1148884,\n        -2.646782,\n        -2.401001,\n        -2.039515,\n        -2.1529696,\n        -2.337291,\n        -2.0687835,\n        -2.413439,\n        -3.056558,\n        -2.312271,\n        -2.0148916,\n        -2.5917294,\n        -2.1651769,\n        -3.1119053,\n        -3.8390062,\n        -2.6814091,\n        -2.4874709,\n        -2.0005178,\n        -2.5307102,\n        -2.0668283,\n        -2.4102242,\n        -2.5936291,\n        -2.5020297,\n        -2.0634851,\n        -1.9583627,\n        -2.0916836,\n        -2.759579,\n        -2.054291,\n        -2.1813471,\n        -2.1769204,\n        -2.5858107,\n        -2.339425,\n        -2.8946817,\n        -2.5120027,\n        -2.153619,\n        -2.1004753,\n        -2.2984838,\n        -2.282694,\n        -2.397826,\n        -2.256161,\n        -2.505667,\n        -2.0490823,\n        -2.1189764,\n        -2.0119197,\n        -2.5590618,\n        -2.0721855,\n        -2.1660438,\n        -2.1627402,\n        -2.0932841,\n        -2.7577784,\n        -2.6576955,\n        -2.0936344,\n        -2.3629692,\n        -1.837828,\n        -2.7482083,\n        -2.3181837,\n        -3.399002,\n        -2.5708067,\n        -3.2452874,\n        -2.955638,\n        -2.9488106,\n        -2.7362404,\n        -2.7175627,\n        -3.9895449,\n        -4.5948253,\n        -2.4460433,\n        -2.8982131,\n        -3.036289,\n        -2.6314957,\n        -2.8254166,\n        -3.2840562,\n        -3.8990328,\n        -2.2284503,\n        -2.629425,\n        -4.0859966,\n        -3.3323238,\n        -2.3749952,\n        -3.6505778,\n        -4.3730307,\n        -2.5451531,\n        -3.2214994,\n        -4.9090548,\n        -3.3397892,\n        -2.9609098,\n        -2.6038854,\n        -2.8999689,\n        -2.9174776,\n        -2.337947,\n        -5.2149744,\n        -2.909756,\n        -2.886039,\n        -4.881517,\n        -3.1630714,\n        -2.9587462,\n        -2.8257945,\n        -2.2071874,\n        -2.1769478,\n        -3.0848937,\n        -2.9584112,\n        -2.4563642,\n        -2.2238102,\n        -5.1002517,\n        -3.1732137,\n        -5.56727,\n        -4.225469,\n        -2.8651657,\n        -3.1462452,\n        -9.351812,\n        -3.2106128,\n        -3.6531513,\n        -8.315354,\n        -4.451452,\n        -2.5656998,\n        -3.6258245,\n        -3.6576633,\n        -2.4644089,\n        -2.8058252,\n        -3.717551,\n        -3.3379807,\n        -3.3480532,\n        -2.6379082,\n        -2.1303928,\n        -3.9951026,\n        -2.6979685,\n        -2.5670674,\n        -2.140809,\n        -2.9114952,\n        -3.4694004,\n        -3.273883,\n        -2.554731,\n        -2.128335,\n        -4.286048,\n        -6.0300655,\n        -2.4410648,\n        -2.459763,\n        -2.6265686,\n        -5.183496,\n        -2.838463,\n        -5.6320653,\n        -5.394695,\n        -3.785626,\n        -5.6052732,\n        -3.4936693,\n        -2.6826017,\n        -5.205424,\n        -3.548408,\n        -6.740735,\n        -2.9324296,\n        -2.3768349,\n        -5.805819,\n        -3.1873932,\n        -2.4063437,\n        -2.9075701,\n        -3.0286765,\n        -4.9715776,\n        -2.6412039,\n        -4.5007315,\n        -3.5917234,\n        -2.9010386,\n        -2.401519,\n        -4.756611,\n        -2.0551183,\n        -2.6262233,\n        -3.2444184,\n        -2.6554153,\n        -2.59962,\n        -3.252687,\n        -3.0759692,\n        -6.125144,\n        -3.1548474,\n        -2.172181,\n        -2.4412844,\n        -3.1854272,\n        -3.338207,\n        -3.7814631,\n        -5.218697,\n        -3.292173,\n        -2.7732189,\n        -2.6392634,\n        -2.8407757,\n        -5.678224,\n        -2.1455255\n      ],\n      \"pointIndex\": [\n        0,\n        1033,\n        256,\n        12925232,\n        19860142,\n        18043876,\n        737946932,\n        823923450,\n        565691376,\n        183021252,\n        636417067,\n        51757271,\n        737816363,\n        686449508,\n        861844860,\n        427978892,\n        962954478,\n        534324419,\n        636861970,\n        376576157,\n        660837573,\n        738776017,\n        13232831,\n        1017869695,\n        290656334,\n        102027249,\n        455666323,\n        53302351,\n        102608512,\n        8690865,\n        102430203,\n        905674303,\n        18691713,\n        295185350,\n        228444615,\n        18363935,\n        590983554,\n        184520304,\n        386941844,\n        774726528,\n        377365903,\n        738735374,\n        47170199,\n        164112874,\n        830353971,\n        653232343,\n        102479584,\n        127943082,\n        26373829,\n        853188653,\n        1065219595,\n        102500515,\n        151820201,\n        49759229,\n        534223220,\n        636861288,\n        57704659,\n        534440039,\n        753399667,\n        624494858,\n        533952662,\n        843738734,\n        788583314,\n        101668504,\n        726055258,\n        835280702,\n        1043594699,\n        534232169,\n        102361968,\n        187845267,\n        962395576,\n        1031624016,\n        130439257,\n        903453516,\n        1095012325,\n        25807281,\n        286351021,\n        838971423,\n        949437683,\n        405169980,\n        375988219,\n        652284322,\n        655425716,\n        761459031,\n        376440460,\n        614799833,\n        738409697,\n        785504736,\n        1033\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 1876375996478369417\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.656019,\n        -1.6580104,\n        -1.6578665,\n        -1.6640373,\n        -1.6712515,\n        -1.6762936,\n        -1.6730125,\n        -1.6662111,\n        -1.6866534,\n        -1.7095369,\n        -1.7216151,\n        -1.7155652,\n        -1.6988926,\n        -1.6777173,\n        -1.6937543,\n        -1.6859257,\n        -1.7018273,\n        -1.6874223,\n        -1.8165721,\n        -1.7920341,\n        -1.7250608,\n        -1.7262647,\n        -1.7521154,\n        -1.7604737,\n        -1.7186152,\n        -1.8678156,\n        -1.7036458,\n        -1.684932,\n        -1.7272867,\n        -1.7302197,\n        -1.8157284,\n        -1.6931717,\n        -1.8684835,\n        -1.9245646,\n        -1.7443494,\n        -2.3173852,\n        -2.3280199,\n        -1.9060123,\n        -1.885168,\n        -1.9172245,\n        -1.9962476,\n        -1.7415087,\n        -1.804002,\n        -1.7874721,\n        -1.8480979,\n        -1.7777001,\n        -1.7719274,\n        -1.9258261,\n        -1.8681453,\n        -2.0535119,\n        -1.8060087,\n        -2.0503588,\n        -1.9844753,\n        -1.7585043,\n        -2.822942,\n        -1.7392656,\n        -1.7083489,\n        -2.1541893,\n        -1.7901094,\n        -1.7949449,\n        -1.7823777,\n        -1.8484274,\n        -1.8914237,\n        -1.9511005,\n        -1.898297,\n        -2.041157,\n        -2.243955,\n        -2.0601175,\n        -2.067163,\n        -3.0740283,\n        -1.8782701,\n        -2.6605234,\n        -2.3239706,\n        -2.8802342,\n        -2.3282926,\n        -1.9755129,\n        -2.4496348,\n        -1.9982197,\n        -2.0463336,\n        -2.0588064,\n        -2.2567818,\n        -2.8873181,\n        -2.2662315,\n        -1.9776943,\n        -2.3999114,\n        -2.2588086,\n        -1.991948,\n        -1.8267877,\n        -1.8300658,\n        -2.3278456,\n        -1.9874305,\n        -1.9373515,\n        -1.8812344,\n        -1.8328129,\n        -1.9801755,\n        -2.1545746,\n        -2.2382958,\n        -2.1801105,\n        -2.0798466,\n        -2.4324725,\n        -2.375275,\n        -1.8256987,\n        -2.637144,\n        -2.389102,\n        -3.5763822,\n        -2.1500144,\n        -2.226164,\n        -2.3502855,\n        -2.6395602,\n        -2.8662357,\n        -2.8829107,\n        -1.9964328,\n        -1.8523451,\n        -1.7519509,\n        -1.9110743,\n        -2.157954,\n        -2.4338145,\n        -2.005445,\n        -1.9867252,\n        -1.9197575,\n        -1.8163303,\n        -1.8098514,\n        -2.0345504,\n        -2.0476131,\n        -2.043342,\n        -2.5049953,\n        -1.9068856,\n        -1.974582,\n        -3.9669302,\n        -2.4419277,\n        -2.7219682,\n        -3.466173,\n        -2.1854036,\n        -2.248525,\n        -2.4205265,\n        -2.1431952,\n        -2.7012627,\n        -2.2175283,\n        -2.1179817,\n        -3.480231,\n        -4.594685,\n        -4.148525,\n        -2.5355668,\n        -2.7218988,\n        -3.107209,\n        -3.0598428,\n        -3.2045312,\n        -3.0200884,\n        -3.7437663,\n        -2.8024817,\n        -2.8117232,\n        -3.225925,\n        -2.039732,\n        -4.758175,\n        -3.326319,\n        -2.503129,\n        -2.1320293,\n        -2.0589864,\n        -4.8441477,\n        -2.5802937,\n        -2.1175923,\n        -2.7892077,\n        -4.0004416,\n        -3.2411966,\n        -4.2669926,\n        -3.4929066,\n        -2.565467,\n        -4.054648,\n        -2.159858,\n        -3.35157,\n        -3.173471,\n        -2.6001463,\n        -2.4695635,\n        -4.582937,\n        -2.2184834,\n        -2.050262,\n        -1.8738642,\n        -5.7333293,\n        -1.9362634,\n        -4.2702174,\n        -3.821907,\n        -3.0997074,\n        -2.6365895,\n        -2.1213505,\n        -3.0730777,\n        -2.0265932,\n        -2.5071113,\n        -2.6132777,\n        -5.2329154,\n        -3.6723025,\n        -2.0291698,\n        -2.535118,\n        -3.3466601,\n        -3.638253,\n        -2.5475395,\n        -2.1876795,\n        -2.3624208,\n        -2.5825639,\n        -4.5250173,\n        -2.9519923,\n        -3.0251846,\n        -3.7545924,\n        -3.4826229,\n        -4.3434815,\n        -2.801646,\n        -3.7608492,\n        -5.958017,\n        -2.4195075,\n        -6.94603,\n        -4.2892704,\n        -4.299722,\n        -3.2493396,\n        -3.3746479,\n        -3.3338714,\n        -4.1913214,\n        -3.5012274,\n        -6.0780706,\n        -2.761881,\n        -4.0485625,\n        -3.4803429,\n        -4.089557,\n        -3.9174495,\n        -3.7321932,\n        -2.5362492,\n        -2.3141446,\n        -2.4441571,\n        -3.818552,\n        -3.7019074,\n        -2.0417445,\n        -3.7441072,\n        -1.9211413,\n        -3.3088958,\n        -5.695583,\n        -2.4738545,\n        -2.9835706,\n        -3.9554763,\n        -4.294223,\n        -4.011585,\n        -2.8165956,\n        -4.076753,\n        -2.13725,\n        -7.237174,\n        -2.3534398,\n        -3.445057,\n        -2.4009888,\n        -2.042431,\n        -2.2111802,\n        -2.7649636,\n        -2.5871248,\n        -2.6968222,\n        -3.3464444,\n        -3.311163,\n        -4.6309342,\n        -4.032488,\n        -4.330639,\n        -2.9021716\n      ],\n      \"pointIndex\": [\n        0,\n        1033,\n        256,\n        473734442,\n        2664181,\n        112067496,\n        482288034,\n        1039874851,\n        242855522,\n        215912766,\n        359297542,\n        556121297,\n        897529663,\n        729678080,\n        869322643,\n        1253250,\n        1017850521,\n        12928206,\n        745622665,\n        48739830,\n        643858568,\n        194606339,\n        543284606,\n        111913508,\n        50264771,\n        804052480,\n        35729433,\n        757147520,\n        102290598,\n        643731060,\n        93634695,\n        745801115,\n        211791523,\n        141190295,\n        876711343,\n        746008406,\n        225347926,\n        556059445,\n        745878593,\n        302545701,\n        343325408,\n        859987449,\n        291273751,\n        21035170,\n        110844348,\n        243767315,\n        770969719,\n        378589457,\n        481228433,\n        771979702,\n        361435172,\n        93029262,\n        212219082,\n        101586459,\n        356612281,\n        101668145,\n        844435841,\n        816933684,\n        102297813,\n        501970905,\n        101851348,\n        661906690,\n        297946411,\n        3653217,\n        745331709,\n        150287811,\n        745204308,\n        38443672,\n        422201509,\n        650003963,\n        599857732,\n        218302497,\n        826156409,\n        225364932,\n        543229573,\n        745452197,\n        191193,\n        556259609,\n        326404372,\n        386302759,\n        102348821,\n        640860344,\n        367363669,\n        745727814,\n        638602199,\n        21202352,\n        745923171,\n        111964321,\n        1030\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -2204909260360194046\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.730323,\n        -1.742828,\n        -1.7478267,\n        -1.7488226,\n        -1.7658932,\n        -1.7979212,\n        -1.7513748,\n        -1.7601001,\n        -1.8694206,\n        -1.7907435,\n        -1.8348733,\n        -1.8080705,\n        -1.9233587,\n        -1.8082566,\n        -1.7582871,\n        -1.76278,\n        -1.8169695,\n        -1.9336684,\n        -1.9069245,\n        -1.8038816,\n        -1.84187,\n        -1.8483424,\n        -1.8486806,\n        -1.8643656,\n        -1.8404105,\n        -1.9839911,\n        -1.9260427,\n        -1.8572724,\n        -1.8246914,\n        -1.798383,\n        -1.9120096,\n        -1.7666714,\n        -2.270041,\n        -1.9683915,\n        -1.9873596,\n        -1.9512979,\n        -2.0728245,\n        -1.9253695,\n        -1.94445,\n        -1.8157637,\n        -1.9546065,\n        -2.1858442,\n        -2.0819132,\n        -1.9846267,\n        -1.9278313,\n        -2.015536,\n        -2.1885037,\n        -1.9759471,\n        -2.1634674,\n        -2.0335054,\n        -1.9880213,\n        -2.1104834,\n        -1.9865267,\n        -2.0156908,\n        -2.3391416,\n        -1.9760637,\n        -1.9009969,\n        -1.9003448,\n        -1.9998822,\n        -1.8235197,\n        -1.8613955,\n        -1.9787422,\n        -2.3973725,\n        -1.7952727,\n        -2.3285503,\n        -2.353048,\n        -2.7485626,\n        -2.2356188,\n        -2.834423,\n        -2.1354256,\n        -2.226179,\n        -2.1007419,\n        -2.2443857,\n        -2.1306915,\n        -2.444321,\n        -2.8282566,\n        -1.9531182,\n        -2.0946836,\n        -2.2666774,\n        -2.2573767,\n        -2.1929116,\n        -2.30298,\n        -1.9809619,\n        -2.7987509,\n        -2.550264,\n        -2.283383,\n        -2.3615265,\n        -2.9488018,\n        -2.2610583,\n        -3.1094718,\n        -2.7515755,\n        -2.1720796,\n        -2.2347355,\n        -2.2103302,\n        -2.190134,\n        -2.7173781,\n        -2.33031,\n        -2.616049,\n        -2.5021603,\n        -3.0292487,\n        -2.7652547,\n        -2.2304795,\n        -2.4867032,\n        -2.3336337,\n        -2.1582317,\n        -2.2383635,\n        -2.1751463,\n        -2.2199006,\n        -2.177874,\n        -2.4507465,\n        -2.9135127,\n        -2.0451937,\n        -1.9926541,\n        -2.0187225,\n        -2.2701643,\n        -2.308212,\n        -2.3873026,\n        -2.2092636,\n        -2.752517,\n        -1.9147124,\n        -2.357816,\n        -2.2217934,\n        -2.0571294,\n        -2.2435346,\n        -2.2057357,\n        -3.3210113,\n        -2.5308971,\n        -1.8066989,\n        -4.0470686,\n        -2.378088,\n        -3.5435028,\n        -2.8419998,\n        -3.1035783,\n        -4.3567505,\n        -4.4041286,\n        -4.1313505,\n        -2.3421085,\n        -3.2468865,\n        -3.1362343,\n        -2.8429937,\n        -2.435654,\n        -4.2078276,\n        -2.3967023,\n        -2.4916565,\n        -3.2946203,\n        -3.5685098,\n        -2.7417114,\n        -2.1358972,\n        -3.034784,\n        -4.156872,\n        -2.9813933,\n        -3.027067,\n        -4.866516,\n        -2.1356533,\n        -3.0307004,\n        -3.9780993,\n        -5.536298,\n        -3.1343129,\n        -3.4540765,\n        -3.5801847,\n        -2.3000717,\n        -2.5552542,\n        -2.4868872,\n        -2.6451788,\n        -2.9759266,\n        -4.552585,\n        -3.490288,\n        -4.1270084,\n        -3.3920908,\n        -4.825763,\n        -6.669724,\n        -3.847581,\n        -2.6483405,\n        -2.621604,\n        -4.932221,\n        -3.0901523,\n        -3.3913136,\n        -2.4451401,\n        -2.8663428,\n        -8.479628,\n        -3.5417347,\n        -6.19819,\n        -2.781825,\n        -2.8313844,\n        -3.1323414,\n        -2.3094926,\n        -6.6986847,\n        -2.5223255,\n        -3.894261,\n        -2.2034428,\n        -2.7831585,\n        -2.8053238,\n        -3.980969,\n        -2.7004611,\n        -2.430564,\n        -3.1612046,\n        -3.270475,\n        -3.0698972,\n        -3.2594793,\n        -3.1010172,\n        -3.6406608,\n        -4.078909,\n        -6.4668474,\n        -2.279233,\n        -2.6484544,\n        -2.8335543,\n        -3.2978861,\n        -3.8962936,\n        -3.0345144,\n        -3.2081096,\n        -3.04323,\n        -3.4193723,\n        -2.4854243,\n        -2.9113395,\n        -2.3584383,\n        -8.27435,\n        -3.8476827,\n        -2.668799,\n        -5.420402,\n        -3.4819248,\n        -3.6587346,\n        -3.1479921,\n        -3.0343697,\n        -2.5434158,\n        -2.1916368,\n        -2.6067011,\n        -3.1182675,\n        -3.9397695,\n        -2.4570735,\n        -2.7494276,\n        -2.8964815,\n        -2.7550788,\n        -2.770167,\n        -2.6556418,\n        -2.406011,\n        -4.1283875,\n        -2.3407385,\n        -3.6427588,\n        -3.2966878,\n        -2.1310863,\n        -2.049569,\n        -2.5727491,\n        -2.8036466,\n        -2.7366984,\n        -3.2630098,\n        -2.1418307,\n        -2.421375,\n        -2.4011054,\n        -2.494355,\n        -2.8232613,\n        -3.2020233,\n        -3.3500326,\n        -3.652649,\n        -7.6765313,\n        -4.580076,\n        -2.053722\n      ],\n      \"pointIndex\": [\n        0,\n        1034,\n        256,\n        892856593,\n        83782320,\n        1040366132,\n        39458104,\n        354110855,\n        731111470,\n        297899383,\n        970946101,\n        610707000,\n        434160422,\n        525807418,\n        741603480,\n        658257941,\n        101817190,\n        754317410,\n        109338530,\n        1054501127,\n        741518521,\n        741603610,\n        615599867,\n        930829415,\n        101870795,\n        193933163,\n        608,\n        991204086,\n        544293808,\n        102081741,\n        943758026,\n        445749317,\n        101830553,\n        868007044,\n        79054706,\n        687741632,\n        5016139,\n        38572396,\n        94169663,\n        12879736,\n        93790366,\n        93259240,\n        165672636,\n        838735358,\n        791170729,\n        167433940,\n        17330732,\n        112699772,\n        112928865,\n        17248314,\n        741386624,\n        101812964,\n        52049029,\n        9511314,\n        472328981,\n        1787747,\n        973841945,\n        327664817,\n        93627964,\n        102373904,\n        600260,\n        919492622,\n        742128323,\n        101987016,\n        39972498,\n        102636947,\n        101864795,\n        198046330,\n        1039804545,\n        150548776,\n        545078689,\n        395631936,\n        17238497,\n        324606297,\n        13061357,\n        558199045,\n        1052176024,\n        157759970,\n        264843794,\n        313323575,\n        1084386925,\n        741612347,\n        374240190,\n        40027325,\n        497241746,\n        800521821,\n        764095481,\n        348247169,\n        1034\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 5154851018987160\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.8081539,\n        -1.8374668,\n        -1.8200548,\n        -1.8378677,\n        -1.8699274,\n        -1.8204843,\n        -1.837169,\n        -1.8560339,\n        -1.8706534,\n        -1.9132446,\n        -1.9125419,\n        -1.8441378,\n        -1.843584,\n        -1.9976562,\n        -1.8647866,\n        -1.8639615,\n        -1.9602515,\n        -2.0096579,\n        -1.9230106,\n        -2.0387719,\n        -1.9202425,\n        -2.0972016,\n        -1.9226053,\n        -2.0360386,\n        -1.9534422,\n        -2.0847018,\n        -1.8873698,\n        -2.0146236,\n        -2.360392,\n        -1.8668797,\n        -2.2097003,\n        -1.9132732,\n        -1.9552976,\n        -2.076997,\n        -1.9653509,\n        -2.1352947,\n        -2.065349,\n        -2.3188722,\n        -1.9250214,\n        -2.4378948,\n        -2.0432777,\n        -2.142098,\n        -1.9360743,\n        -2.487698,\n        -2.1203842,\n        -2.517966,\n        -1.9550864,\n        -2.0991573,\n        -2.159543,\n        -2.1411753,\n        -2.1662586,\n        -2.385162,\n        -2.2047498,\n        -2.0531802,\n        -1.9113472,\n        -2.2622082,\n        -2.1727529,\n        -2.456896,\n        -2.9784014,\n        -1.9387985,\n        -2.0268776,\n        -2.5803947,\n        -2.2401416,\n        -1.9755834,\n        -2.4991083,\n        -2.5011022,\n        -2.0349967,\n        -3.7397747,\n        -2.9534667,\n        -1.9944588,\n        -2.037487,\n        -2.8034809,\n        -2.3971744,\n        -3.147875,\n        -2.4144685,\n        -2.3702087,\n        -2.379169,\n        -2.5259547,\n        -2.1148481,\n        -2.552007,\n        -2.7552524,\n        -2.1916394,\n        -2.8888388,\n        -2.3583164,\n        -2.1572099,\n        -2.4837234,\n        -2.0414467,\n        -2.527586,\n        -2.8253407,\n        -2.319006,\n        -3.878649,\n        -2.556014,\n        -2.6850367,\n        -2.0110333,\n        -2.835685,\n        -2.2800486,\n        -2.222945,\n        -2.204285,\n        -2.1956346,\n        -2.7633805,\n        -2.450427,\n        -2.3268688,\n        -2.6188986,\n        -2.5636187,\n        -2.4748957,\n        -2.2742703,\n        -2.3604908,\n        -2.5363846,\n        -2.1213899,\n        -2.207771,\n        -1.984652,\n        -2.5722198,\n        -2.834556,\n        -3.3804405,\n        -2.203345,\n        -2.8947597,\n        -3.4934092,\n        -3.3917785,\n        -3.1407318,\n        -1.968794,\n        -2.0351653,\n        -2.9231207,\n        -2.4528592,\n        -2.8457716,\n        -2.8268237,\n        -2.8932538,\n        -2.2901223,\n        -1.9915406,\n        -3.8448112,\n        -2.6099825,\n        -5.02955,\n        -3.5970562,\n        -3.6908493,\n        -2.3327637,\n        -2.6917741,\n        -3.83888,\n        -4.2903056,\n        -6.846097,\n        -5.440284,\n        -2.6044068,\n        -3.2046525,\n        -3.0699115,\n        -2.0554912,\n        -3.3021948,\n        -6.288108,\n        -3.5863686,\n        -2.4533448,\n        -4.337389,\n        -5.0689197,\n        -3.590733,\n        -2.4593842,\n        -2.7087104,\n        -5.2342978,\n        -3.591647,\n        -3.240329,\n        -3.914902,\n        -3.1409209,\n        -2.2925715,\n        -2.2064683,\n        -4.645542,\n        -2.8061657,\n        -3.4201372,\n        -6.2779756,\n        -2.6473892,\n        -2.25159,\n        -3.0004153,\n        -4.3399687,\n        -2.778047,\n        -3.686897,\n        -6.5347357,\n        -4.746953,\n        -3.2072554,\n        -3.0714288,\n        -2.2840912,\n        -2.2471886,\n        -4.857505,\n        -2.7394643,\n        -3.725049,\n        -3.6219473,\n        -3.732815,\n        -4.6330414,\n        -4.9115586,\n        -5.9235597,\n        -2.7386565,\n        -3.1102774,\n        -2.7139144,\n        -3.1139874,\n        -4.584773,\n        -4.4270344,\n        -3.285366,\n        -3.441138,\n        -3.2958968,\n        -4.4018235,\n        -3.7060828,\n        -2.7993503,\n        -7.576483,\n        -6.246723,\n        -2.2072713,\n        -4.2723317,\n        -2.8648398,\n        -3.3212817,\n        -3.0167284,\n        -2.7412393,\n        -2.6174183,\n        -4.830457,\n        -5.2901287,\n        -3.81263,\n        -3.8005095,\n        -3.0297546,\n        -3.3717444,\n        -5.015555,\n        -3.2489576,\n        -7.602393,\n        -2.5329897,\n        -9.401524,\n        -3.324802,\n        -4.047294,\n        -3.743376,\n        -2.448434,\n        -2.5218654,\n        -2.5525658,\n        -2.5849967,\n        -2.642297,\n        -3.920143,\n        -2.6349912,\n        -5.1725287,\n        -5.2311077,\n        -5.918149,\n        -4.343884,\n        -3.5727563,\n        -4.8944764,\n        -3.4397783,\n        -3.5534933,\n        -7.381869,\n        -5.341848,\n        -3.879571,\n        -3.4132533,\n        -5.1299214,\n        -5.8725877,\n        -2.2350998,\n        -3.5410697,\n        -2.562155,\n        -2.9176178,\n        -3.6295671,\n        -3.4202685,\n        -4.111259,\n        -3.0156856,\n        -3.8100078,\n        -3.2862966,\n        -3.1310773,\n        -2.8316627,\n        -3.1592243,\n        -3.6751955,\n        -6.4956803,\n        -7.003652,\n        -2.2157168\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        256,\n        730705442,\n        101541269,\n        872337104,\n        733828760,\n        979610331,\n        534473934,\n        734528974,\n        813205535,\n        287203103,\n        389823967,\n        111232745,\n        532327301,\n        969391962,\n        664575724,\n        832452191,\n        1065470943,\n        301502293,\n        102230419,\n        325946114,\n        625279169,\n        989065186,\n        101541559,\n        431849792,\n        969883009,\n        891533291,\n        22464106,\n        565533949,\n        370275387,\n        734159992,\n        969270065,\n        734632883,\n        147912931,\n        828569205,\n        887385125,\n        645128931,\n        531554789,\n        803581937,\n        729826545,\n        38785253,\n        760795904,\n        226882793,\n        786958481,\n        414281905,\n        988240819,\n        14948113,\n        10468712,\n        111382807,\n        218315513,\n        111669281,\n        360493175,\n        374912759,\n        48659354,\n        101529269,\n        1094859648,\n        110779103,\n        583507302,\n        13927011,\n        17752342,\n        144463961,\n        198210241,\n        99386786,\n        776156565,\n        8539921,\n        841806657,\n        679562898,\n        101786306,\n        182607929,\n        17226317,\n        199717,\n        92859558,\n        93368488,\n        734037877,\n        428492437,\n        101196893,\n        62051651,\n        612144416,\n        454735625,\n        209071873,\n        926926205,\n        101280947,\n        733900418,\n        644389555,\n        727404667,\n        427786535,\n        1090139886,\n        1031\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 5217279339685084469\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7402439,\n        -1.7715176,\n        -1.7717422,\n        -1.7729152,\n        -1.7921242,\n        -1.7870976,\n        -1.7758383,\n        -1.7789812,\n        -1.8094795,\n        -1.7930634,\n        -1.8064483,\n        -1.8188128,\n        -1.792601,\n        -1.7994062,\n        -1.7764919,\n        -1.8312615,\n        -1.8498967,\n        -1.8220494,\n        -1.9127506,\n        -1.8363383,\n        -1.8584883,\n        -1.816268,\n        -1.9125257,\n        -1.9043639,\n        -1.9346454,\n        -2.0878932,\n        -1.8999056,\n        -1.8516107,\n        -1.8305874,\n        -1.825061,\n        -1.7851197,\n        -1.8423272,\n        -1.8922483,\n        -1.9946264,\n        -1.8978056,\n        -1.9140006,\n        -1.9510587,\n        -2.0384092,\n        -1.9432021,\n        -1.8557298,\n        -1.8817563,\n        -2.0289328,\n        -1.9969633,\n        -2.0407221,\n        -1.9843104,\n        -2.0930736,\n        -2.0045543,\n        -1.9131368,\n        -1.9693974,\n        -1.9793379,\n        -2.3255649,\n        -2.1001842,\n        -2.1157966,\n        -1.9513168,\n        -2.4928272,\n        -1.9755663,\n        -2.0237503,\n        -1.8789812,\n        -1.8864049,\n        -2.1880121,\n        -1.8265959,\n        -2.554407,\n        -1.7857251,\n        -1.8430191,\n        -1.9230493,\n        -2.6205242,\n        -2.3441565,\n        -2.6843765,\n        -2.3986797,\n        -2.2215936,\n        -2.143354,\n        -2.0351074,\n        -2.0040874,\n        -3.107786,\n        -2.0737038,\n        -2.2980378,\n        -2.2727246,\n        -2.278061,\n        -2.0756097,\n        -1.8627639,\n        -2.027572,\n        -2.1260366,\n        -2.1081305,\n        -2.0530365,\n        -2.0896275,\n        -2.4408777,\n        -3.171343,\n        -2.4679239,\n        -2.0811162,\n        -2.8935795,\n        -2.0410137,\n        -2.6020117,\n        -2.2546835,\n        -2.7901294,\n        -2.466587,\n        -1.9456893,\n        -2.0967803,\n        -2.079998,\n        -2.105479,\n        -3.0702155,\n        -2.2765713,\n        -2.4188864,\n        -2.5822628,\n        -2.652611,\n        -2.1469536,\n        -2.8105438,\n        -2.305231,\n        -2.0727637,\n        -2.304691,\n        -2.6648202,\n        -2.8879018,\n        -2.3512928,\n        -2.9288738,\n        -2.6278293,\n        -2.3775413,\n        -2.7552533,\n        -2.3438475,\n        -2.17639,\n        -1.8908613,\n        -2.5958147,\n        -2.374652,\n        -2.3235338,\n        -1.93208,\n        -3.1439962,\n        -2.779953,\n        -2.098122,\n        -1.7961223,\n        -2.524623,\n        -6.1634088,\n        -2.234678,\n        -2.4268363,\n        -2.7040474,\n        -3.890868,\n        -2.4822774,\n        -2.463142,\n        -3.2118921,\n        -3.2390893,\n        -2.6532862,\n        -4.385273,\n        -2.744431,\n        -2.9693918,\n        -2.2373397,\n        -2.243682,\n        -3.2172477,\n        -2.9261057,\n        -2.8296132,\n        -2.0616693,\n        -9.601502,\n        -3.495537,\n        -4.6509814,\n        -2.4027858,\n        -3.2622712,\n        -3.134057,\n        -4.5410876,\n        -2.684269,\n        -2.4046478,\n        -3.3885858,\n        -2.761631,\n        -2.1438603,\n        -2.157503,\n        -1.9436723,\n        -2.1001916,\n        -2.5161092,\n        -4.653356,\n        -2.6913075,\n        -7.1517897,\n        -3.5990102,\n        -3.41811,\n        -2.497729,\n        -2.250088,\n        -2.4825282,\n        -5.0283313,\n        -3.0872464,\n        -3.9509413,\n        -4.047483,\n        -2.8081558,\n        -3.23343,\n        -3.2008176,\n        -4.0549593,\n        -3.6211317,\n        -4.5967646,\n        -2.5286436,\n        -2.1516042,\n        -3.1540043,\n        -2.893712,\n        -2.8100188,\n        -2.601966,\n        -3.9216847,\n        -3.706026,\n        -3.1440256,\n        -2.6428132,\n        -2.5636818,\n        -2.020567,\n        -2.281868,\n        -3.9018033,\n        -4.2405014,\n        -3.4563286,\n        -2.2985587,\n        -2.3244133,\n        -5.6315384,\n        -3.4825268,\n        -4.190746,\n        -2.4551625,\n        -2.731267,\n        -2.700726,\n        -2.7283032,\n        -5.2848864,\n        -4.379558,\n        -3.20883,\n        -2.2771497,\n        -3.3881977,\n        -3.0294724,\n        -4.051019,\n        -2.4255893,\n        -3.080031,\n        -3.0699701,\n        -4.016986,\n        -2.4882278,\n        -2.342394,\n        -2.9968462,\n        -2.8596075,\n        -3.7575955,\n        -3.3068979,\n        -2.6474195,\n        -4.8887773,\n        -7.5796194,\n        -3.702549,\n        -2.990904,\n        -4.659867,\n        -2.7444,\n        -3.8493836,\n        -3.880225,\n        -3.4993148,\n        -2.7812548,\n        -3.0489693,\n        -3.534438,\n        -3.2690976,\n        -2.97994,\n        -3.840988,\n        -2.8579106,\n        -3.848355,\n        -2.4177978,\n        -2.4319067,\n        -2.8399181,\n        -3.2112718,\n        -2.4927819,\n        -2.4932735,\n        -4.5259833,\n        -3.6803274,\n        -3.7987075,\n        -3.02941,\n        -5.06464,\n        -6.8033524,\n        -3.0422902,\n        -1.8099041,\n        -4.20267\n      ],\n      \"pointIndex\": [\n        0,\n        1032,\n        256,\n        322351781,\n        736453349,\n        21358807,\n        532481954,\n        1062624459,\n        44859889,\n        110987148,\n        101471901,\n        737004487,\n        570387769,\n        653203616,\n        870390445,\n        92999519,\n        617470591,\n        930169121,\n        542155801,\n        578519467,\n        875577705,\n        1044989688,\n        863110088,\n        1099815297,\n        17303150,\n        628833145,\n        44916074,\n        111678178,\n        387406006,\n        606649764,\n        9336841,\n        978059944,\n        701176298,\n        102086622,\n        140403413,\n        777917117,\n        634967691,\n        93896673,\n        670187970,\n        634816746,\n        658685306,\n        423725050,\n        39619214,\n        186421378,\n        737004706,\n        737058911,\n        858037786,\n        831270595,\n        40897519,\n        349547230,\n        111075652,\n        721877136,\n        51545994,\n        22409289,\n        1050065693,\n        40657401,\n        405732249,\n        101459289,\n        101587623,\n        8944842,\n        101597621,\n        635016669,\n        406018648,\n        1081674617,\n        383183594,\n        736325061,\n        217943044,\n        635219231,\n        68666250,\n        111690245,\n        334376103,\n        593657005,\n        40545350,\n        320351456,\n        67785386,\n        449550085,\n        102408424,\n        291740869,\n        736452662,\n        901496954,\n        737144093,\n        8951158,\n        349305231,\n        703310285,\n        611540236,\n        301477630,\n        701729482,\n        626664039,\n        834\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -7622741475383530684\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.774023,\n        -1.7915899,\n        -1.7797278,\n        -1.8040701,\n        -1.797157,\n        -1.81414,\n        -1.7931064,\n        -1.8391774,\n        -1.9360573,\n        -1.7999569,\n        -1.860272,\n        -1.8243421,\n        -1.8224331,\n        -1.8005509,\n        -1.9047723,\n        -1.8399519,\n        -1.8803104,\n        -2.0661376,\n        -1.938313,\n        -1.9353746,\n        -1.8669463,\n        -1.8922094,\n        -1.9107784,\n        -2.0149715,\n        -1.8881586,\n        -1.8292402,\n        -1.8839419,\n        -1.8483727,\n        -2.048407,\n        -2.229058,\n        -1.925212,\n        -1.8424052,\n        -1.8865591,\n        -1.8842703,\n        -1.9844824,\n        -2.1245017,\n        -2.0911503,\n        -1.9543402,\n        -2.0314279,\n        -1.935985,\n        -2.060191,\n        -1.9419324,\n        -1.8944805,\n        -1.9358683,\n        -2.0144799,\n        -2.0392423,\n        -2.1870213,\n        -2.1212554,\n        -2.0225325,\n        -1.9322077,\n        -2.2719066,\n        -1.9704443,\n        -1.9016994,\n        -1.8957658,\n        -1.932177,\n        -1.9371688,\n        -1.8726842,\n        -2.4470332,\n        -2.1778688,\n        -2.242843,\n        -2.386395,\n        -2.1215289,\n        -2.0632644,\n        -1.896643,\n        -1.9706954,\n        -2.3573024,\n        -2.4457123,\n        -2.3162618,\n        -2.1529365,\n        -2.1207964,\n        -2.215731,\n        -2.2784538,\n        -2.1408665,\n        -2.7754986,\n        -3.0355916,\n        -2.2823129,\n        -2.0461767,\n        -2.0364351,\n        -2.25437,\n        -1.9475571,\n        -3.3389747,\n        -2.6050103,\n        -2.2292125,\n        -2.0845304,\n        -2.9060955,\n        -2.1522748,\n        -2.150977,\n        -2.4641476,\n        -2.7047272,\n        -2.4684005,\n        -2.2043037,\n        -2.0582888,\n        -2.089416,\n        -2.650215,\n        -2.380549,\n        -3.1685722,\n        -2.3977356,\n        -2.8262534,\n        -2.6793022,\n        -2.393338,\n        -2.0523756,\n        -3.2790527,\n        -2.5578258,\n        -3.6209726,\n        -2.1033509,\n        -2.0837903,\n        -2.463229,\n        -2.0076504,\n        -2.3548303,\n        -2.7862737,\n        -1.9345337,\n        -4.3442693,\n        -2.0491817,\n        -2.2950623,\n        -2.600202,\n        -2.5076246,\n        -2.4542227,\n        -2.4990933,\n        -2.511882,\n        -2.5147402,\n        -2.4657295,\n        -2.9325738,\n        -2.552659,\n        -2.4603546,\n        -2.262115,\n        -2.2550583,\n        -2.126276,\n        -2.0902517,\n        -2.7578685,\n        -3.3128507,\n        -2.5182905,\n        -2.8661375,\n        -3.2242167,\n        -4.054849,\n        -3.5046144,\n        -3.8067148,\n        -2.4343145,\n        -4.1870623,\n        -2.4265811,\n        -2.5110447,\n        -2.583645,\n        -3.0129137,\n        -4.2257824,\n        -2.5676336,\n        -3.0421224,\n        -2.5205665,\n        -2.580509,\n        -3.8105268,\n        -4.5849757,\n        -3.0702279,\n        -4.3156548,\n        -5.339658,\n        -2.4640913,\n        -3.1956568,\n        -3.5012345,\n        -2.1150496,\n        -2.41229,\n        -3.3688462,\n        -2.4248898,\n        -2.633352,\n        -2.5163412,\n        -5.524555,\n        -8.271144,\n        -4.23015,\n        -2.625746,\n        -3.109844,\n        -2.6400166,\n        -2.247166,\n        -3.2418237,\n        -4.3864026,\n        -3.2039948,\n        -2.212502,\n        -3.369073,\n        -2.2032907,\n        -2.6539803,\n        -3.2240422,\n        -2.6461356,\n        -3.6769938,\n        -4.2367015,\n        -3.5965903,\n        -3.993267,\n        -2.508745,\n        -2.693874,\n        -4.23602,\n        -3.8297422,\n        -3.0399187,\n        -2.7611825,\n        -2.7981215,\n        -3.327198,\n        -2.5528767,\n        -3.2257109,\n        -4.360021,\n        -4.2763367,\n        -2.8529687,\n        -2.5961845,\n        -3.0175123,\n        -2.844181,\n        -3.5635657,\n        -3.4486747,\n        -3.3586383,\n        -2.439909,\n        -5.841004,\n        -3.0691068,\n        -6.45293,\n        -4.8199387,\n        -2.8189893,\n        -2.73818,\n        -5.8322053,\n        -3.7959895,\n        -2.4074452,\n        -3.7499707,\n        -3.3297813,\n        -2.226307,\n        -3.956066,\n        -5.8001723,\n        -5.681697,\n        -2.690155,\n        -2.4887192,\n        -3.4882722,\n        -3.751763,\n        -4.210731,\n        -2.3716323,\n        -4.4690185,\n        -4.7768936,\n        -4.98789,\n        -3.72979,\n        -4.115402,\n        -5.0938144,\n        -6.105075,\n        -3.920368,\n        -2.9314928,\n        -2.9202876,\n        -2.5164769,\n        -3.3145072,\n        -2.9779308,\n        -3.7029696,\n        -5.565864,\n        -4.0892644,\n        -3.9738,\n        -4.5081983,\n        -4.2934494,\n        -4.2340026,\n        -2.7774496,\n        -3.2071543,\n        -3.9750972,\n        -4.6741076,\n        -3.725502,\n        -2.9946032,\n        -4.4441195,\n        -4.248562,\n        -3.0811677,\n        -3.390902,\n        -2.641154,\n        -2.4110358,\n        -3.4044423,\n        -2.811184\n      ],\n      \"pointIndex\": [\n        0,\n        1027,\n        256,\n        795885189,\n        932794105,\n        366304510,\n        302620956,\n        730686999,\n        98322627,\n        324446893,\n        934006890,\n        372306516,\n        549635283,\n        882361931,\n        147958088,\n        101104940,\n        7008949,\n        199760027,\n        1053712108,\n        372334067,\n        290391436,\n        730642959,\n        781867907,\n        833752788,\n        110527534,\n        85534968,\n        482000475,\n        101105239,\n        17006988,\n        302636524,\n        418677249,\n        412110020,\n        8606079,\n        90849690,\n        100584802,\n        751480431,\n        83486627,\n        47802095,\n        23221628,\n        158822718,\n        405743321,\n        292597151,\n        49815305,\n        730917521,\n        849220238,\n        435379570,\n        12779126,\n        110101638,\n        40025284,\n        109949597,\n        110527616,\n        65076474,\n        26444367,\n        629497573,\n        100483916,\n        131993,\n        628893666,\n        985067299,\n        217795360,\n        1084043019,\n        992418307,\n        861570920,\n        92095961,\n        730820283,\n        629262542,\n        730374661,\n        927724258,\n        649998783,\n        17107639,\n        195867320,\n        31649727,\n        237391626,\n        82666283,\n        1040376095,\n        301244563,\n        298625690,\n        785725167,\n        595803452,\n        730345084,\n        731101690,\n        100641805,\n        1074847555,\n        372208448,\n        943362196,\n        853426892,\n        730254287,\n        661201039,\n        91968908,\n        1027\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 6914291515535893206\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7116114,\n        -1.7193624,\n        -1.7149334,\n        -1.7765579,\n        -1.7227011,\n        -1.7280554,\n        -1.803988,\n        -1.8192419,\n        -1.880212,\n        -1.738068,\n        -1.726055,\n        -1.7334659,\n        -1.7686765,\n        -1.9413058,\n        -1.8400279,\n        -1.8209994,\n        -1.8396332,\n        -1.918436,\n        -1.9853563,\n        -1.7632383,\n        -1.870952,\n        -1.7960379,\n        -1.8212568,\n        -1.847499,\n        -1.735794,\n        -1.8251816,\n        -1.8882881,\n        -2.0010302,\n        -1.9731063,\n        -1.9634166,\n        -1.8403689,\n        -1.8324114,\n        -1.832549,\n        -1.9132676,\n        -2.0487852,\n        -1.9600496,\n        -1.9369367,\n        -2.0255048,\n        -1.9971377,\n        -1.7876623,\n        -1.9030501,\n        -1.9452527,\n        -1.9870477,\n        -1.8936048,\n        -2.052909,\n        -2.2554862,\n        -1.916798,\n        -2.0171506,\n        -1.947983,\n        -1.9357775,\n        -1.8765414,\n        -1.9844109,\n        -2.1523173,\n        -1.8931376,\n        -2.2072728,\n        -2.0916834,\n        -2.2469761,\n        -2.0514321,\n        -2.061675,\n        -2.477312,\n        -2.159413,\n        -1.8466673,\n        -1.9504511,\n        -2.0847456,\n        -2.6211605,\n        -2.119909,\n        -1.9779993,\n        -2.0145493,\n        -2.2308486,\n        -2.5133805,\n        -2.3332384,\n        -3.3734694,\n        -2.7167888,\n        -2.2176805,\n        -2.0644975,\n        -3.0167255,\n        -2.115833,\n        -2.5317225,\n        -2.028342,\n        -2.6108594,\n        -3.0894272,\n        -2.1356087,\n        -2.08475,\n        -2.2557914,\n        -2.4471416,\n        -2.0012484,\n        -2.0584621,\n        -2.0478685,\n        -2.1917071,\n        -2.1792703,\n        -2.0852518,\n        -2.4448123,\n        -2.508048,\n        -2.3951724,\n        -2.2761412,\n        -2.202093,\n        -2.1741457,\n        -2.3569362,\n        -2.2688167,\n        -1.9402057,\n        -2.3326848,\n        -1.9554741,\n        -2.4394348,\n        -2.1338592,\n        -2.2859528,\n        -3.0535233,\n        -2.6992383,\n        -2.3668253,\n        -1.9805595,\n        -2.2191916,\n        -2.9119806,\n        -3.0231638,\n        -3.7940264,\n        -3.472415,\n        -2.2586713,\n        -2.8812935,\n        -2.508815,\n        -2.3506222,\n        -2.7313209,\n        -3.2039728,\n        -2.7354372,\n        -2.2849772,\n        -2.4973497,\n        -2.3568068,\n        -2.6124933,\n        -2.632691,\n        -2.060145,\n        -2.2867982,\n        -3.925342,\n        -3.8566859,\n        -4.712603,\n        -2.7829478,\n        -2.603042,\n        -3.408554,\n        -2.3177102,\n        -3.0999222,\n        -4.162783,\n        -3.377817,\n        -2.3031337,\n        -3.2809277,\n        -2.8068426,\n        -2.7257047,\n        -3.1452985,\n        -3.7654269,\n        -3.3857672,\n        -3.0550027,\n        -3.1665387,\n        -6.631067,\n        -3.0573728,\n        -2.56562,\n        -3.0590615,\n        -4.4657974,\n        -3.4445226,\n        -2.5729485,\n        -3.8201149,\n        -3.057486,\n        -3.3230453,\n        -4.286448,\n        -6.512903,\n        -2.8680158,\n        -2.9603152,\n        -4.0648785,\n        -3.4620616,\n        -3.6702805,\n        -2.4065633,\n        -2.529432,\n        -3.29861,\n        -3.702016,\n        -2.990955,\n        -2.8959627,\n        -2.6874902,\n        -2.3227425,\n        -2.2434037,\n        -2.4430377,\n        -3.0195284,\n        -4.2813425,\n        -3.0892925,\n        -3.2313266,\n        -2.6872704,\n        -4.8481565,\n        -2.2721426,\n        -2.767864,\n        -2.378176,\n        -2.7616863,\n        -3.8259947,\n        -4.6816,\n        -3.7140455,\n        -2.9816554,\n        -2.5336738,\n        -2.459883,\n        -3.1654189,\n        -2.6054814,\n        -3.4753697,\n        -2.4494786,\n        -2.881146,\n        -3.305719,\n        -3.675132,\n        -4.84059,\n        -3.1957328,\n        -2.6364293,\n        -2.0368037,\n        -2.454147,\n        -4.4343657,\n        -2.8810613,\n        -2.626602,\n        -2.4633522,\n        -3.2197614,\n        -4.500364,\n        -3.1722047,\n        -2.9617815,\n        -2.6197367,\n        -3.4670172,\n        -4.1381783,\n        -4.145891,\n        -5.9670277,\n        -2.559469,\n        -2.4521472,\n        -2.6493185,\n        -2.1386209,\n        -3.3228016,\n        -2.2299201,\n        -3.2444708,\n        -5.173487,\n        -3.8103428,\n        -3.3155723,\n        -6.7491,\n        -5.1732445,\n        -4.4776053,\n        -5.708737,\n        -4.027221,\n        -6.0136676,\n        -5.257943,\n        -3.108551,\n        -2.6943,\n        -3.427133,\n        -2.8769026,\n        -3.136073,\n        -3.536758,\n        -2.8335114,\n        -5.6172113,\n        -3.6632707,\n        -2.8230252,\n        -7.357853,\n        -2.4444187,\n        -4.881888,\n        -2.7099879,\n        -3.6604736,\n        -2.8145733,\n        -3.1300023,\n        -4.95443,\n        -2.9404337,\n        -3.2223191,\n        -3.1461432,\n        -2.5188353,\n        -3.2525144,\n        -6.871737\n      ],\n      \"pointIndex\": [\n        0,\n        1018,\n        256,\n        519300794,\n        146506775,\n        85205723,\n        85851963,\n        33333624,\n        608728875,\n        768456528,\n        12989886,\n        251283549,\n        569589069,\n        109015562,\n        842669925,\n        383253129,\n        99847830,\n        108087464,\n        150587725,\n        16700547,\n        276942475,\n        613190057,\n        526605359,\n        199033721,\n        429988369,\n        895354274,\n        719643584,\n        526507206,\n        638148238,\n        100129074,\n        1023832956,\n        100457226,\n        784201501,\n        608676791,\n        455582248,\n        96558550,\n        100475878,\n        99780633,\n        150651584,\n        526863024,\n        496725642,\n        439743694,\n        231269939,\n        608492494,\n        618537702,\n        46730916,\n        764339704,\n        608577466,\n        720190978,\n        100555791,\n        368632517,\n        519225434,\n        714819425,\n        992735130,\n        987540509,\n        17249233,\n        31717446,\n        148305268,\n        16717006,\n        617825478,\n        617883879,\n        951104530,\n        951679556,\n        102869664,\n        720181002,\n        598194776,\n        759237020,\n        863257752,\n        923809299,\n        178763214,\n        202905107,\n        156826406,\n        201842509,\n        96149979,\n        159108543,\n        816172591,\n        145758374,\n        314526868,\n        952495265,\n        377269834,\n        519281456,\n        720019725,\n        818326958,\n        224983534,\n        720093826,\n        395904528,\n        840796964,\n        1028295913,\n        1018\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 3787296109052444972\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.7822136,\n        -1.7858903,\n        -1.7831833,\n        -1.8468641,\n        -1.8153635,\n        -1.7894135,\n        -1.7899549,\n        -1.8472897,\n        -1.874336,\n        -1.8338463,\n        -1.8562658,\n        -1.7948748,\n        -1.8154186,\n        -1.8210052,\n        -1.7925607,\n        -1.9889343,\n        -1.9014237,\n        -1.8876861,\n        -1.9601402,\n        -1.8670936,\n        -1.9100761,\n        -2.0564911,\n        -1.9975893,\n        -1.9024395,\n        -1.9173448,\n        -1.8259848,\n        -1.9069486,\n        -1.8745391,\n        -1.845707,\n        -1.8465031,\n        -1.8662122,\n        -1.990147,\n        -2.617406,\n        -2.0310469,\n        -1.9210284,\n        -2.0028937,\n        -1.9912771,\n        -2.1610796,\n        -2.0602884,\n        -1.969253,\n        -1.9323772,\n        -2.0420732,\n        -2.0165038,\n        -2.0973177,\n        -2.3302162,\n        -2.0785978,\n        -2.0696776,\n        -2.146571,\n        -2.0262496,\n        -1.9717865,\n        -2.0699093,\n        -1.9783448,\n        -1.8757489,\n        -1.9281723,\n        -1.9349449,\n        -1.9520977,\n        -1.876881,\n        -1.8879987,\n        -2.0160127,\n        -1.9712034,\n        -2.0479581,\n        -2.0290914,\n        -1.9482405,\n        -2.1074188,\n        -2.1253855,\n        -3.0996,\n        -2.7313483,\n        -2.421764,\n        -2.6459022,\n        -2.2389348,\n        -2.3732555,\n        -2.0153146,\n        -2.3576233,\n        -2.0678961,\n        -2.3686795,\n        -2.6385045,\n        -2.2504377,\n        -2.6802177,\n        -2.5189142,\n        -2.7589202,\n        -2.4385686,\n        -2.158965,\n        -2.5009072,\n        -2.561385,\n        -2.4129808,\n        -2.0552092,\n        -2.8681686,\n        -2.228679,\n        -2.40116,\n        -2.4174414,\n        -2.507823,\n        -2.4867747,\n        -2.6855984,\n        -2.5417142,\n        -2.5246844,\n        -2.714921,\n        -2.457773,\n        -2.2524397,\n        -3.1546721,\n        -2.350862,\n        -2.2299194,\n        -2.2532384,\n        -2.2387931,\n        -3.050243,\n        -2.3252528,\n        -2.0254424,\n        -2.1375828,\n        -1.981914,\n        -1.9652152,\n        -2.5572667,\n        -2.0133367,\n        -2.806495,\n        -1.9728041,\n        -1.9287292,\n        -1.9306608,\n        -2.4589405,\n        -2.3715656,\n        -2.374967,\n        -2.3084733,\n        -2.3992236,\n        -2.0633545,\n        -2.0626562,\n        -2.3103857,\n        -2.2842586,\n        -2.1463304,\n        -2.821505,\n        -1.9568058,\n        -4.181665,\n        -3.561463,\n        -2.2161896,\n        -5.574698,\n        -4.390722,\n        -8.17298,\n        -3.1846137,\n        -4.6807904,\n        -2.7791724,\n        -3.1926627,\n        -2.6756501,\n        -3.4955966,\n        -3.6733696,\n        -2.2717636,\n        -2.7559743,\n        -3.0809062,\n        -3.874045,\n        -3.0782855,\n        -2.6614878,\n        -5.440899,\n        -2.3082488,\n        -2.3861485,\n        -4.2845254,\n        -4.9419475,\n        -3.2617643,\n        -2.9930928,\n        -2.982296,\n        -3.7816305,\n        -2.7534387,\n        -4.3778343,\n        -5.1580095,\n        -2.6776557,\n        -2.932181,\n        -3.4272132,\n        -2.646206,\n        -4.590671,\n        -2.2563183,\n        -2.7549548,\n        -3.39904,\n        -3.9190156,\n        -2.6796625,\n        -3.0648444,\n        -2.9834712,\n        -3.099239,\n        -2.346894,\n        -3.2532306,\n        -3.3351984,\n        -3.9248145,\n        -2.4177618,\n        -3.646646,\n        -3.8769495,\n        -3.1397655,\n        -3.476859,\n        -4.1073747,\n        -4.064442,\n        -5.118728,\n        -3.5872788,\n        -2.9099495,\n        -2.8846123,\n        -3.9311929,\n        -3.6287043,\n        -2.6901407,\n        -2.5542114,\n        -3.933894,\n        -6.483869,\n        -3.1626828,\n        -4.0844603,\n        -4.0413003,\n        -2.6275907,\n        -2.607264,\n        -3.3051088,\n        -3.2341933,\n        -3.383726,\n        -2.9389303,\n        -3.2197423,\n        -3.4664629,\n        -2.446136,\n        -4.020691,\n        -4.2334065,\n        -4.3312907,\n        -3.316707,\n        -3.71078,\n        -2.5877986,\n        -3.0791035,\n        -3.800262,\n        -4.5766344,\n        -2.7088625,\n        -2.4924634,\n        -3.4855504,\n        -2.1242063,\n        -5.634541,\n        -2.2405314,\n        -3.9521825,\n        -3.410082,\n        -3.8928645,\n        -2.0707848,\n        -3.598642,\n        -4.171565,\n        -2.0834177,\n        -3.7352693,\n        -2.6254995,\n        -2.2136407,\n        -3.8100197,\n        -3.0341992,\n        -4.7872925,\n        -3.4985518,\n        -2.4324002,\n        -2.3759787,\n        -3.1454952,\n        -2.579984,\n        -3.0597894,\n        -3.0585287,\n        -2.5369391,\n        -6.497095,\n        -2.2841067,\n        -2.8507705,\n        -4.7513022,\n        -4.9640994,\n        -2.3823218,\n        -2.373228,\n        -3.3436816,\n        -3.097494,\n        -3.3242962,\n        -2.5567005,\n        -3.1087434,\n        -3.958174,\n        -4.2728915,\n        -2.1432495\n      ],\n      \"pointIndex\": [\n        4,\n        1034,\n        255,\n        1084224352,\n        612775973,\n        775011877,\n        14885892,\n        694393224,\n        724387472,\n        16039808,\n        484741238,\n        31984030,\n        136453127,\n        848243867,\n        661869503,\n        98261798,\n        729204304,\n        13155652,\n        20208865,\n        8585069,\n        766293447,\n        98823435,\n        1041024517,\n        1094378143,\n        106654982,\n        729925165,\n        729287122,\n        526260818,\n        564845704,\n        546415658,\n        273691629,\n        963105285,\n        98033810,\n        623036457,\n        142564715,\n        1025132454,\n        729907883,\n        88257394,\n        220961513,\n        729425505,\n        301172538,\n        526260874,\n        797634529,\n        295643722,\n        68737602,\n        852032819,\n        12991394,\n        103148435,\n        97887981,\n        188230236,\n        1025796622,\n        773904579,\n        379029408,\n        249903464,\n        98694588,\n        98319010,\n        1058839226,\n        98070031,\n        12780293,\n        370194563,\n        98598047,\n        97856078,\n        1015206828,\n        396894883,\n        985009838,\n        135930133,\n        41674035,\n        145270276,\n        975948990,\n        963749947,\n        106297018,\n        10881852,\n        641156228,\n        730126360,\n        729488250,\n        1057291153,\n        659342375,\n        98826916,\n        414199327,\n        370617982,\n        526304407,\n        220481755,\n        586276802,\n        31893725,\n        939302324,\n        584880609,\n        686776491,\n        1095865273\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -5672753689059104532\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.6926837,\n        -1.6953135,\n        -1.693962,\n        -1.705493,\n        -1.7032226,\n        -1.6956446,\n        -1.7786808,\n        -1.7313516,\n        -1.7263241,\n        -1.8418415,\n        -1.7423624,\n        -1.7487277,\n        -1.7095972,\n        -1.836097,\n        -1.7796501,\n        -1.7407198,\n        -1.8755876,\n        -1.784155,\n        -1.7682477,\n        -1.8493526,\n        -1.9121087,\n        -1.871401,\n        -1.8240936,\n        -1.7850237,\n        -1.7668632,\n        -1.8328153,\n        -1.8089825,\n        -1.8687636,\n        -1.8616337,\n        -1.8001138,\n        -1.7807441,\n        -1.8213158,\n        -1.9684231,\n        -1.9526222,\n        -2.0338533,\n        -2.1408112,\n        -2.2852738,\n        -1.858178,\n        -1.9813877,\n        -1.9749699,\n        -2.139913,\n        -1.9170501,\n        -2.0204365,\n        -1.9807174,\n        -1.8917884,\n        -2.1505165,\n        -1.8590758,\n        -2.3220747,\n        -1.8225602,\n        -1.8185203,\n        -1.788242,\n        -1.8414049,\n        -1.923939,\n        -1.9286638,\n        -1.8780352,\n        -2.113653,\n        -2.0078194,\n        -2.1109946,\n        -2.1245277,\n        -2.000779,\n        -1.9095522,\n        -2.0611165,\n        -1.7839305,\n        -1.8407682,\n        -2.0298007,\n        -1.9727197,\n        -2.399584,\n        -2.4340546,\n        -2.8725019,\n        -2.3679655,\n        -2.2451673,\n        -2.6139588,\n        -3.44795,\n        -2.4071953,\n        -3.473595,\n        -1.9029214,\n        -3.50474,\n        -2.0787017,\n        -1.9874296,\n        -2.3343253,\n        -2.140817,\n        -2.51489,\n        -2.4835806,\n        -2.2768292,\n        -2.1408467,\n        -2.2224417,\n        -2.0280964,\n        -2.250768,\n        -2.6658762,\n        -1.8936803,\n        -2.19682,\n        -2.5010095,\n        -2.241517,\n        -1.9188976,\n        -2.3375504,\n        -2.6596344,\n        -2.5181677,\n        -1.9098765,\n        -2.1285794,\n        -3.0819921,\n        -1.9810567,\n        -2.025805,\n        -1.824923,\n        -2.229054,\n        -2.2262502,\n        -2.0635526,\n        -2.0247133,\n        -2.0068202,\n        -2.379216,\n        -2.650716,\n        -2.270745,\n        -2.480365,\n        -2.2698033,\n        -2.1786253,\n        -2.6187828,\n        -2.6349397,\n        -2.4408374,\n        -2.16784,\n        -2.6513734,\n        -2.3868005,\n        -3.7567942,\n        -2.0029392,\n        -2.3018806,\n        -2.119536,\n        -2.4022324,\n        -2.0750139,\n        -1.82465,\n        -1.9100113,\n        -4.3763776,\n        -2.2374268,\n        -2.2899783,\n        -3.27979,\n        -3.7282438,\n        -2.8306334,\n        -2.806642,\n        -3.3831556,\n        -2.5906067,\n        -2.9573925,\n        -3.3446858,\n        -3.4346583,\n        -2.380708,\n        -2.54247,\n        -2.5066462,\n        -6.5671644,\n        -4.9619064,\n        -5.505471,\n        -3.6775048,\n        -5.238884,\n        -2.4483514,\n        -4.7738414,\n        -5.076409,\n        -2.9238966,\n        -2.0590265,\n        -5.9587255,\n        -4.4078083,\n        -3.310751,\n        -2.1367946,\n        -3.657216,\n        -2.1717088,\n        -2.6528625,\n        -4.5085382,\n        -2.6432714,\n        -2.179861,\n        -2.646898,\n        -3.457495,\n        -2.911759,\n        -2.9127052,\n        -2.6047547,\n        -4.2863765,\n        -3.0610344,\n        -3.6705728,\n        -5.0402956,\n        -2.730269,\n        -2.7664797,\n        -2.209829,\n        -2.28492,\n        -2.6362314,\n        -2.744169,\n        -2.679837,\n        -2.1399748,\n        -4.5384364,\n        -6.703098,\n        -2.5642817,\n        -2.6404674,\n        -4.9929314,\n        -5.103037,\n        -3.0537462,\n        -2.4483762,\n        -2.2775574,\n        -2.751555,\n        -2.4706,\n        -5.9384184,\n        -3.2506516,\n        -2.9461324,\n        -2.9949832,\n        -1.9924194,\n        -2.587697,\n        -2.8196552,\n        -2.4542396,\n        -4.5523725,\n        -3.297143,\n        -2.5533626,\n        -4.3790913,\n        -3.1475134,\n        -5.38378,\n        -2.4848385,\n        -2.6386168,\n        -2.9099476,\n        -3.3272696,\n        -2.4708343,\n        -4.6673064,\n        -4.429226,\n        -2.1829848,\n        -2.2980208,\n        -2.3107493,\n        -3.160515,\n        -2.5365744,\n        -3.2322974,\n        -4.24716,\n        -4.0019116,\n        -2.9284987,\n        -2.5140069,\n        -2.3332412,\n        -4.2852173,\n        -2.7014065,\n        -3.1869707,\n        -4.6838903,\n        -2.4974182,\n        -2.6522963,\n        -4.6085706,\n        -4.223153,\n        -2.8773668,\n        -2.690828,\n        -3.079323,\n        -3.2760134,\n        -2.5726979,\n        -2.3127034,\n        -3.4032867,\n        -2.7532125,\n        -3.2214618,\n        -3.6197226,\n        -4.10335,\n        -4.7822146,\n        -2.63212,\n        -2.7473643,\n        -3.254893,\n        -3.0988417,\n        -2.918465,\n        -3.0267155,\n        -3.5459418,\n        -2.644576,\n        -3.4331813,\n        -2.2789237,\n        -2.186767,\n        -3.0235982,\n        -2.0071416\n      ],\n      \"pointIndex\": [\n        0,\n        1031,\n        256,\n        157718647,\n        731955296,\n        593370763,\n        10755667,\n        735980882,\n        101284650,\n        101194541,\n        621141072,\n        912832920,\n        864309552,\n        1005297019,\n        735936365,\n        592170510,\n        749188869,\n        101194704,\n        101395734,\n        470901845,\n        236753025,\n        327069920,\n        1002660931,\n        971316482,\n        92864529,\n        369172905,\n        501405610,\n        98224,\n        729189570,\n        7168367,\n        756265080,\n        735931688,\n        225883162,\n        289334653,\n        138585232,\n        110852237,\n        102112011,\n        237598655,\n        456664,\n        314402926,\n        318750991,\n        779055088,\n        411582759,\n        111475703,\n        972015691,\n        836566216,\n        130646318,\n        553902566,\n        458058464,\n        802460600,\n        9533434,\n        736873773,\n        101714220,\n        374896746,\n        258505784,\n        618452152,\n        585782867,\n        1086968543,\n        403115858,\n        736773555,\n        984935064,\n        297424123,\n        211521903,\n        375247243,\n        36916596,\n        374889458,\n        900658965,\n        147081637,\n        787552544,\n        227564148,\n        101799732,\n        1048472888,\n        866953296,\n        314235899,\n        30104660,\n        226930488,\n        932259359,\n        261088710,\n        278470411,\n        353899952,\n        1084589557,\n        903296329,\n        736039268,\n        385972574,\n        375830241,\n        1049862522,\n        736946731,\n        1036225808,\n        1031\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 256,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": 3900708465367051125\n    },\n    {\n      \"version\": \"2.0\",\n      \"weight\": [\n        -1.773475,\n        -1.7750823,\n        -1.8039148,\n        -1.7926276,\n        -1.8222865,\n        -1.845183,\n        -1.8057612,\n        -1.8494796,\n        -1.7959207,\n        -1.8870348,\n        -1.8974857,\n        -1.876898,\n        -1.8554938,\n        -1.8363189,\n        -1.8354127,\n        -1.9268205,\n        -1.8698647,\n        -1.885897,\n        -1.8517307,\n        -1.9742464,\n        -1.8898609,\n        -1.8985574,\n        -2.143419,\n        -1.8942182,\n        -1.9570466,\n        -1.9082452,\n        -1.8667456,\n        -1.9100269,\n        -1.8808385,\n        -1.9234487,\n        -1.8657014,\n        -2.015931,\n        -2.212913,\n        -2.079569,\n        -1.9724065,\n        -2.1434267,\n        -1.9690714,\n        -1.9440209,\n        -1.9119865,\n        -2.007949,\n        -2.0713964,\n        -1.9026221,\n        -2.139437,\n        -1.9902244,\n        -1.9000142,\n        -2.1980624,\n        -2.1457427,\n        -2.0067115,\n        -2.216963,\n        -2.0078409,\n        -2.0122359,\n        -1.9996728,\n        -1.915653,\n        -2.0284946,\n        -2.0626378,\n        -1.9454858,\n        -2.1004593,\n        -2.1423385,\n        -2.129475,\n        -1.9262174,\n        -1.977599,\n        -1.8705202,\n        -1.9307132,\n        -2.121574,\n        -2.1680737,\n        -2.8059728,\n        -2.4647405,\n        -2.2606895,\n        -2.313719,\n        -2.2319176,\n        -2.0318754,\n        -2.2380648,\n        -2.8375976,\n        -2.01969,\n        -2.7087097,\n        -3.1395059,\n        -2.647385,\n        -1.9943793,\n        -2.3498397,\n        -2.0658169,\n        -2.610816,\n        -2.1383333,\n        -2.4891677,\n        -2.1063142,\n        -1.9341099,\n        -2.4924214,\n        -2.5667586,\n        -2.282426,\n        -2.0006623,\n        -1.9943624,\n        -2.4567404,\n        -2.231521,\n        -2.3374805,\n        -2.4438033,\n        -2.332853,\n        -2.3785813,\n        -2.0585291,\n        -2.3684952,\n        -2.5352397,\n        -2.6059043,\n        -2.9607844,\n        -2.7364335,\n        -2.5759447,\n        -2.209563,\n        -2.3307643,\n        -2.3000574,\n        -2.1801481,\n        -2.1513462,\n        -2.1336575,\n        -2.316492,\n        -2.228415,\n        -2.2936692,\n        -2.0270746,\n        -2.1820061,\n        -2.6000726,\n        -2.6558099,\n        -2.1633737,\n        -2.7425945,\n        -2.7003589,\n        -2.0453358,\n        -2.0839334,\n        -2.2034597,\n        -2.253998,\n        -2.482663,\n        -1.929147,\n        -3.4676502,\n        -2.129417,\n        -2.6941292,\n        -7.0536947,\n        -3.806295,\n        -2.4286122,\n        -7.0180564,\n        -3.3881392,\n        -5.136512,\n        -3.3981059,\n        -3.8108766,\n        -3.3840854,\n        -4.1985803,\n        -7.441683,\n        -4.710874,\n        -4.6753526,\n        -5.147007,\n        -2.0370023,\n        -2.9905117,\n        -4.3437905,\n        -4.3806868,\n        -3.3249528,\n        -2.386633,\n        -4.0770426,\n        -4.478549,\n        -3.0408673,\n        -3.3467047,\n        -7.755527,\n        -4.897061,\n        -4.6437964,\n        -3.2557454,\n        -2.3448482,\n        -2.6056879,\n        -2.9355764,\n        -2.6539245,\n        -2.3337276,\n        -4.9639745,\n        -3.606498,\n        -2.7616386,\n        -2.3524702,\n        -2.9494593,\n        -3.1875296,\n        -3.3687086,\n        -3.209242,\n        -2.2259765,\n        -3.8973262,\n        -2.767681,\n        -3.0772197,\n        -4.175838,\n        -3.2830734,\n        -2.7101471,\n        -2.3953116,\n        -2.1449072,\n        -3.6896677,\n        -2.0912066,\n        -4.703734,\n        -3.083674,\n        -3.4057825,\n        -2.9368424,\n        -4.7275095,\n        -2.6227207,\n        -2.6972537,\n        -3.3707354,\n        -3.1903024,\n        -3.4124644,\n        -3.5304496,\n        -4.147959,\n        -4.2551765,\n        -2.4099114,\n        -2.739606,\n        -3.1460524,\n        -2.7673385,\n        -3.9829004,\n        -5.0396767,\n        -8.161678,\n        -4.1825,\n        -3.8418117,\n        -3.1704817,\n        -4.044603,\n        -4.431857,\n        -4.0561657,\n        -3.804961,\n        -2.8525105,\n        -3.0746922,\n        -4.928778,\n        -2.9308677,\n        -2.5835655,\n        -2.5127409,\n        -3.5566525,\n        -2.3309612,\n        -3.8846962,\n        -2.4834597,\n        -2.2897837,\n        -3.1633084,\n        -3.7128065,\n        -3.0293717,\n        -3.5332227,\n        -3.3851402,\n        -2.5752258,\n        -3.3975627,\n        -3.2494516,\n        -2.9095368,\n        -3.657072,\n        -2.277026,\n        -9.3977165,\n        -2.7371886,\n        -2.8658223,\n        -2.9208083,\n        -3.3934886,\n        -2.8629465,\n        -2.7921765,\n        -4.398424,\n        -3.6718414,\n        -4.985429,\n        -3.2450895,\n        -2.1611288,\n        -2.948474,\n        -4.8580427,\n        -4.6616244,\n        -3.1946328,\n        -4.452989,\n        -2.2626927,\n        -3.6593711,\n        -3.1519105,\n        -4.835316,\n        -1.9358096,\n        -5.905126,\n        -4.93197,\n        -4.673987,\n        -4.10079\n      ],\n      \"pointIndex\": [\n        0,\n        1021,\n        255,\n        394238046,\n        170267394,\n        8373199,\n        17358714,\n        1058249458,\n        36850088,\n        892019326,\n        160816191,\n        438959241,\n        726546027,\n        726833720,\n        243399625,\n        212128356,\n        100009038,\n        441913846,\n        726828303,\n        726199270,\n        11636710,\n        368016567,\n        227786778,\n        986704158,\n        509806210,\n        633471384,\n        170358897,\n        553937212,\n        557851753,\n        984184861,\n        99845407,\n        367788974,\n        997148214,\n        140058925,\n        915193933,\n        86729994,\n        710561,\n        937253755,\n        725964970,\n        471159564,\n        372941430,\n        700960540,\n        861110378,\n        1044283242,\n        102025688,\n        91630475,\n        110381205,\n        167225102,\n        55448283,\n        299567365,\n        62334951,\n        406814666,\n        725997473,\n        579786519,\n        223063024,\n        726472639,\n        236069841,\n        433698543,\n        470115719,\n        726415211,\n        13675477,\n        170381457,\n        669794301,\n        295154386,\n        137527569,\n        1041628532,\n        23196972,\n        622673270,\n        807549675,\n        761847138,\n        622949123,\n        794009092,\n        99383575,\n        851860179,\n        451380848,\n        235079501,\n        726276406,\n        14670191,\n        284459662,\n        36573442,\n        1054928935,\n        895539518,\n        150876656,\n        368018464,\n        100213126,\n        726626683,\n        968342721,\n        1064327134\n      ],\n      \"storeSequenceIndicesEnabled\": false,\n      \"size\": 255,\n      \"capacity\": 256,\n      \"initialAcceptFraction\": 0.125,\n      \"timeDecay\": 1.0e-4,\n      \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n      \"maxSequenceIndex\": 1493,\n      \"compressed\": true,\n      \"randomSeed\": -4321941696720968038\n    }\n  ],\n  \"compactRandomCutTreeStates\": [\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          153,\n          274729449,\n          778177146,\n          792817978,\n          18480504,\n          483695922,\n          201089312,\n          407829175,\n          950310681,\n          803686685,\n          117207760,\n          590894414,\n          104890356,\n          1066032007,\n          1053856949,\n          16690170,\n          49\n        ],\n        \"cutValueData\": [\n          66,\n          33,\n          -38,\n          92,\n          66,\n          12,\n          -52,\n          -25,\n          66,\n          -126,\n          -127,\n          111,\n          65,\n          34,\n          82,\n          -1,\n          66,\n          60,\n          -7,\n          -37,\n          66,\n          -120,\n          -48,\n          -117,\n          65,\n          45,\n          -20,\n          -73,\n          65,\n          -123,\n          84,\n          -79,\n          66,\n          66,\n          26,\n          -44,\n          66,\n          -118,\n          -89,\n          -48,\n          66,\n          -101,\n          -117,\n          61,\n          65,\n          39,\n          20,\n          24,\n          65,\n          38,\n          111,\n          -1,\n          65,\n          86,\n          85,\n          -117,\n          65,\n          -128,\n          -104,\n          -50,\n          66,\n          -117,\n          -33,\n          -96,\n          65,\n          68,\n          16,\n          118,\n          65,\n          115,\n          42,\n          34,\n          65,\n          -117,\n          -22,\n          -81,\n          65,\n          -97,\n          93,\n          75,\n          66,\n          -122,\n          -25,\n          -15,\n          65,\n          78,\n          -119,\n          85,\n          65,\n          84,\n          -104,\n          101,\n          65,\n          108,\n          111,\n          -61,\n          65,\n          -115,\n          -39,\n          124,\n          65,\n          -125,\n          -41,\n          -117,\n          65,\n          -107,\n          106,\n          49,\n          65,\n          -41,\n          -96,\n          1,\n          65,\n          50,\n          -22,\n          127,\n          65,\n          64,\n          -19,\n          87,\n          65,\n          96,\n          24,\n          101,\n          65,\n          105,\n          -4,\n          -48,\n          65,\n          118,\n          -18,\n          -85,\n          65,\n          123,\n          -6,\n          -99,\n          65,\n          -103,\n          89,\n          -33,\n          65,\n          -100,\n          -103,\n          -97,\n          65,\n          -30,\n          -42,\n          -79,\n          65,\n          -36,\n          -68,\n          -85,\n          65,\n          60,\n          22,\n          -96,\n          65,\n          48,\n          44,\n          -29,\n          65,\n          72,\n          40,\n          -53,\n          65,\n          95,\n          -62,\n          0,\n          65,\n          95,\n          -29,\n          -78,\n          65,\n          112,\n          -76,\n          -79,\n          65,\n          117,\n          -13,\n          -103,\n          65,\n          127,\n          105,\n          103,\n          65,\n          121,\n          127,\n          89,\n          65,\n          -128,\n          127,\n          -7,\n          65,\n          -118,\n          -112,\n          -105,\n          65,\n          -110,\n          21,\n          -126,\n          65,\n          -97,\n          -113,\n          93,\n          65,\n          -104,\n          8,\n          43,\n          65,\n          -81,\n          21,\n          41,\n          65,\n          -42,\n          -62,\n          -109,\n          65,\n          33,\n          -96,\n          -121,\n          65,\n          38,\n          -124,\n          71,\n          65,\n          54,\n          58,\n          -22,\n          65,\n          71,\n          33,\n          -11,\n          65,\n          84,\n          107,\n          -117,\n          65,\n          104,\n          -26,\n          60,\n          65,\n          99,\n          -95,\n          125,\n          65,\n          112,\n          123,\n          -22,\n          65,\n          -126,\n          -54,\n          -40,\n          65,\n          -122,\n          -67,\n          127,\n          65,\n          -122,\n          38,\n          -122,\n          65,\n          -107,\n          103,\n          57,\n          65,\n          -105,\n          120,\n          -80,\n          65,\n          -99,\n          38,\n          91,\n          65,\n          -78,\n          -26,\n          6,\n          65,\n          -61,\n          28,\n          62,\n          65,\n          -45,\n          62,\n          8,\n          65,\n          -38,\n          -56,\n          -92,\n          65,\n          63,\n          105,\n          50,\n          65,\n          109,\n          126,\n          105,\n          65,\n          -124,\n          -83,\n          -33,\n          65,\n          -120,\n          -23,\n          -89,\n          65,\n          -118,\n          62,\n          -94,\n          65,\n          -119,\n          -103,\n          -27,\n          65,\n          -119,\n          -110,\n          54,\n          65,\n          -120,\n          -27,\n          -105,\n          65,\n          -111,\n          3,\n          -112,\n          65,\n          -86,\n          43,\n          100,\n          65,\n          -62,\n          108,\n          7,\n          65,\n          -57,\n          10,\n          -112,\n          65,\n          -40,\n          -94,\n          92,\n          65,\n          59,\n          8,\n          -88,\n          65,\n          108,\n          -23,\n          -112,\n          65,\n          -124,\n          -47,\n          63,\n          65,\n          126,\n          71,\n          -13,\n          65,\n          -127,\n          31,\n          26,\n          65,\n          -117,\n          -78,\n          -1,\n          65,\n          -113,\n          -87,\n          -53,\n          65,\n          -119,\n          -31,\n          93,\n          65,\n          -106,\n          35,\n          -87,\n          65,\n          -81,\n          -24,\n          24,\n          65,\n          -87,\n          57,\n          -109,\n          65,\n          -52,\n          127,\n          -73,\n          65,\n          -63,\n          24,\n          -1,\n          65,\n          -37,\n          -57,\n          7,\n          65,\n          -128,\n          38,\n          88,\n          65,\n          -119,\n          -6,\n          73,\n          65,\n          -96,\n          -93,\n          -57,\n          65,\n          -83,\n          9,\n          -33,\n          65,\n          -82,\n          89,\n          17,\n          65,\n          -87,\n          -80,\n          78,\n          65,\n          -48,\n          125,\n          -23,\n          65,\n          -43,\n          -111,\n          -128,\n          65,\n          -113,\n          -82,\n          -24,\n          65,\n          -91,\n          127,\n          89,\n          65,\n          -93,\n          -67,\n          -122,\n          65,\n          -79,\n          -102,\n          -100,\n          65,\n          -33,\n          -124,\n          -18,\n          65,\n          -48,\n          -92,\n          48,\n          65,\n          -109,\n          4,\n          -10,\n          65,\n          -96,\n          -53,\n          88,\n          65,\n          -86,\n          81,\n          29,\n          65,\n          -76,\n          10,\n          110,\n          65,\n          -75,\n          103,\n          -78,\n          65,\n          -51,\n          -100,\n          48,\n          65,\n          -116,\n          51,\n          -23,\n          65,\n          -101,\n          63,\n          -100,\n          65,\n          -97,\n          -34,\n          116,\n          65,\n          -93,\n          117,\n          -38,\n          65,\n          -90,\n          -8,\n          -105,\n          65,\n          -73,\n          -63,\n          -48,\n          65,\n          -67,\n          75,\n          11,\n          65,\n          -55,\n          35,\n          -68,\n          65,\n          -41,\n          103,\n          10,\n          65,\n          -91,\n          -84,\n          -28,\n          65,\n          -86,\n          85,\n          59,\n          65,\n          -73,\n          95,\n          -65,\n          65,\n          -65,\n          5,\n          106,\n          65,\n          -58,\n          -92,\n          35,\n          65,\n          -56,\n          -108,\n          84,\n          65,\n          -77,\n          -38,\n          36,\n          65,\n          -66,\n          9,\n          32,\n          65,\n          -51,\n          73,\n          12,\n          65,\n          -47,\n          -104,\n          78,\n          65,\n          -70,\n          49,\n          -65,\n          65,\n          -67,\n          -114,\n          70,\n          65,\n          -64,\n          -98,\n          36,\n          65,\n          -47,\n          -123,\n          10,\n          65,\n          -51,\n          -127,\n          112,\n          65,\n          -46,\n          96,\n          -46,\n          65,\n          -65,\n          49,\n          -57,\n          65,\n          -65,\n          -31,\n          -52,\n          65,\n          -60,\n          112,\n          27,\n          65,\n          -74,\n          -6,\n          83,\n          65,\n          -72,\n          8,\n          19,\n          65,\n          -65,\n          46,\n          30,\n          65,\n          -68,\n          75,\n          -25,\n          65,\n          -77,\n          -2,\n          122,\n          65,\n          -67,\n          47,\n          -90\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 153,\n        \"leftIndex\": [\n          0,\n          1,\n          153,\n          1038836463,\n          14053374,\n          157416309,\n          389318960,\n          689594608,\n          1\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          153,\n          1020256509,\n          47599678,\n          13320816,\n          463162548,\n          202107944,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5011849921490949589,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          164,\n          229393532,\n          859171247,\n          278145025,\n          69103497,\n          257169857,\n          600725088,\n          227890209,\n          509115888,\n          193838906,\n          1058721987,\n          15036982,\n          963529275,\n          47975813,\n          773911137,\n          587104727,\n          935590460,\n          1230\n        ],\n        \"cutValueData\": [\n          66,\n          56,\n          -121,\n          -117,\n          66,\n          50,\n          20,\n          -66,\n          66,\n          48,\n          -71,\n          11,\n          65,\n          -17,\n          -106,\n          -47,\n          66,\n          48,\n          -63,\n          69,\n          66,\n          -126,\n          89,\n          -111,\n          65,\n          -87,\n          -14,\n          14,\n          66,\n          18,\n          112,\n          25,\n          66,\n          -118,\n          33,\n          -68,\n          66,\n          -104,\n          -79,\n          4,\n          65,\n          81,\n          83,\n          -81,\n          65,\n          -58,\n          -103,\n          -87,\n          66,\n          29,\n          26,\n          118,\n          66,\n          -123,\n          79,\n          24,\n          65,\n          63,\n          14,\n          -28,\n          65,\n          -124,\n          -88,\n          15,\n          65,\n          -81,\n          -23,\n          -97,\n          65,\n          -34,\n          -126,\n          61,\n          66,\n          -127,\n          1,\n          -107,\n          65,\n          40,\n          5,\n          -59,\n          65,\n          106,\n          -120,\n          36,\n          65,\n          75,\n          -53,\n          83,\n          65,\n          -102,\n          -50,\n          -31,\n          65,\n          -82,\n          -71,\n          -62,\n          65,\n          -75,\n          -4,\n          -19,\n          65,\n          -67,\n          -75,\n          113,\n          65,\n          -39,\n          -125,\n          -5,\n          65,\n          42,\n          -98,\n          26,\n          65,\n          56,\n          -43,\n          -66,\n          65,\n          49,\n          90,\n          53,\n          65,\n          79,\n          -89,\n          -84,\n          65,\n          -121,\n          -10,\n          -113,\n          65,\n          -97,\n          -102,\n          -42,\n          65,\n          -100,\n          -81,\n          67,\n          65,\n          -84,\n          -4,\n          46,\n          65,\n          -72,\n          7,\n          -104,\n          65,\n          -46,\n          63,\n          32,\n          65,\n          45,\n          119,\n          -8,\n          65,\n          34,\n          21,\n          36,\n          65,\n          67,\n          99,\n          97,\n          65,\n          53,\n          103,\n          -35,\n          65,\n          108,\n          35,\n          -79,\n          65,\n          116,\n          102,\n          -34,\n          65,\n          -121,\n          46,\n          -71,\n          65,\n          -106,\n          56,\n          31,\n          65,\n          -96,\n          126,\n          115,\n          65,\n          -88,\n          116,\n          -96,\n          65,\n          -86,\n          -15,\n          -29,\n          65,\n          -70,\n          -95,\n          37,\n          65,\n          -61,\n          70,\n          -106,\n          65,\n          -50,\n          98,\n          -78,\n          65,\n          -45,\n          78,\n          -83,\n          65,\n          44,\n          113,\n          -25,\n          65,\n          32,\n          84,\n          20,\n          65,\n          64,\n          -66,\n          110,\n          65,\n          101,\n          -112,\n          -6,\n          65,\n          123,\n          113,\n          -124,\n          65,\n          -126,\n          -52,\n          -128,\n          65,\n          -101,\n          -44,\n          12,\n          65,\n          -103,\n          -28,\n          -3,\n          65,\n          -83,\n          -12,\n          78,\n          65,\n          -84,\n          121,\n          -104,\n          65,\n          -87,\n          -16,\n          80,\n          65,\n          -71,\n          -50,\n          -20,\n          65,\n          -66,\n          43,\n          -57,\n          65,\n          -58,\n          87,\n          -114,\n          65,\n          -49,\n          -10,\n          -61,\n          65,\n          -42,\n          -112,\n          -124,\n          65,\n          -37,\n          119,\n          84,\n          65,\n          85,\n          -113,\n          89,\n          65,\n          73,\n          -121,\n          93,\n          65,\n          102,\n          90,\n          -121,\n          65,\n          125,\n          56,\n          -92,\n          65,\n          106,\n          26,\n          -64,\n          65,\n          113,\n          22,\n          -3,\n          65,\n          -119,\n          -90,\n          -81,\n          65,\n          -112,\n          -20,\n          -63,\n          65,\n          -98,\n          -2,\n          120,\n          65,\n          -106,\n          -86,\n          -14,\n          65,\n          -92,\n          -71,\n          13,\n          65,\n          -84,\n          40,\n          27,\n          65,\n          -88,\n          -76,\n          20,\n          65,\n          -75,\n          -29,\n          -86,\n          65,\n          -77,\n          19,\n          58,\n          65,\n          -70,\n          -11,\n          -76,\n          65,\n          -61,\n          -108,\n          53,\n          65,\n          -59,\n          23,\n          -78,\n          65,\n          -50,\n          -84,\n          25,\n          65,\n          -48,\n          65,\n          -46,\n          65,\n          -41,\n          -21,\n          -73,\n          65,\n          79,\n          124,\n          -110,\n          65,\n          91,\n          114,\n          84,\n          65,\n          90,\n          63,\n          -72,\n          65,\n          127,\n          -102,\n          7,\n          65,\n          123,\n          -64,\n          17,\n          65,\n          113,\n          -45,\n          113,\n          65,\n          -127,\n          17,\n          -128,\n          65,\n          -119,\n          98,\n          67,\n          65,\n          -98,\n          -127,\n          120,\n          65,\n          -95,\n          33,\n          77,\n          65,\n          -89,\n          -44,\n          53,\n          65,\n          -87,\n          -72,\n          -101,\n          65,\n          -67,\n          -103,\n          -4,\n          65,\n          -66,\n          -16,\n          -25,\n          65,\n          -49,\n          93,\n          66,\n          65,\n          -49,\n          36,\n          -109,\n          65,\n          -41,\n          117,\n          54,\n          65,\n          -34,\n          -111,\n          -114,\n          65,\n          79,\n          69,\n          20,\n          65,\n          86,\n          118,\n          -128,\n          65,\n          83,\n          34,\n          81,\n          65,\n          101,\n          -114,\n          94,\n          65,\n          116,\n          20,\n          -69,\n          65,\n          -126,\n          -126,\n          -105,\n          65,\n          -116,\n          15,\n          -83,\n          65,\n          -111,\n          43,\n          60,\n          65,\n          -106,\n          -84,\n          60,\n          65,\n          -112,\n          71,\n          -29,\n          65,\n          -100,\n          38,\n          -18,\n          65,\n          -95,\n          55,\n          -76,\n          65,\n          -91,\n          17,\n          43,\n          65,\n          -95,\n          -81,\n          6,\n          65,\n          -77,\n          44,\n          55,\n          65,\n          -74,\n          17,\n          45,\n          65,\n          -60,\n          -26,\n          -60,\n          65,\n          -62,\n          46,\n          -50,\n          65,\n          -60,\n          106,\n          -45,\n          65,\n          -55,\n          -26,\n          66,\n          65,\n          -43,\n          46,\n          3,\n          65,\n          60,\n          49,\n          19,\n          65,\n          68,\n          11,\n          -94,\n          65,\n          87,\n          42,\n          -21,\n          65,\n          81,\n          -16,\n          4,\n          65,\n          118,\n          -101,\n          -89,\n          65,\n          -122,\n          -123,\n          -29,\n          65,\n          -117,\n          13,\n          106,\n          65,\n          -111,\n          -102,\n          -84,\n          65,\n          -119,\n          -19,\n          31,\n          65,\n          -89,\n          126,\n          66,\n          65,\n          -76,\n          -50,\n          20,\n          65,\n          -75,\n          90,\n          123,\n          65,\n          -60,\n          68,\n          -61,\n          65,\n          -57,\n          33,\n          0,\n          65,\n          -43,\n          -18,\n          82,\n          65,\n          118,\n          91,\n          -49,\n          65,\n          -113,\n          54,\n          54,\n          65,\n          -110,\n          -99,\n          -10,\n          65,\n          -116,\n          -45,\n          45,\n          65,\n          -111,\n          81,\n          -3,\n          65,\n          -111,\n          -77,\n          -90,\n          65,\n          -89,\n          -96,\n          -94,\n          65,\n          -118,\n          83,\n          -26,\n          65,\n          -122,\n          -2,\n          -72,\n          65,\n          -118,\n          -40,\n          -71,\n          65,\n          -116,\n          14,\n          54,\n          65,\n          -120,\n          54,\n          -106,\n          65,\n          -127,\n          16,\n          1,\n          65,\n          -114,\n          -23,\n          -124,\n          65,\n          -127,\n          -123,\n          0,\n          65,\n          -118,\n          75,\n          -107,\n          65,\n          -119,\n          117,\n          -37,\n          65,\n          -108,\n          61,\n          -102,\n          65,\n          -105,\n          28,\n          -102,\n          65,\n          -120,\n          -114,\n          -80\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 164,\n        \"leftIndex\": [\n          0,\n          1,\n          164,\n          427552107,\n          790464622,\n          134835145,\n          128409025,\n          436535316,\n          26\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          164,\n          862702831,\n          523988478,\n          491480381,\n          18143175,\n          302244118,\n          314\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5850655621547167218,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          168,\n          207670007,\n          680779907,\n          251458080,\n          440530700,\n          250906682,\n          90018652,\n          99108922,\n          1059814860,\n          584613312,\n          507303322,\n          179824349,\n          797970334,\n          822612792,\n          434918827,\n          709987523,\n          733793098,\n          8340633\n        ],\n        \"cutValueData\": [\n          66,\n          -120,\n          125,\n          114,\n          65,\n          -44,\n          64,\n          30,\n          66,\n          -113,\n          -27,\n          99,\n          65,\n          76,\n          22,\n          114,\n          65,\n          -36,\n          -105,\n          -59,\n          66,\n          -126,\n          48,\n          83,\n          66,\n          -101,\n          -127,\n          -112,\n          65,\n          65,\n          121,\n          33,\n          65,\n          -80,\n          -34,\n          45,\n          65,\n          -61,\n          85,\n          -90,\n          66,\n          13,\n          -40,\n          -92,\n          66,\n          119,\n          15,\n          46,\n          65,\n          54,\n          14,\n          73,\n          65,\n          90,\n          18,\n          70,\n          65,\n          70,\n          -95,\n          89,\n          65,\n          -58,\n          -54,\n          108,\n          65,\n          -44,\n          44,\n          127,\n          65,\n          -32,\n          125,\n          40,\n          66,\n          80,\n          -108,\n          -94,\n          65,\n          54,\n          62,\n          -29,\n          65,\n          48,\n          125,\n          -73,\n          65,\n          76,\n          107,\n          -118,\n          65,\n          -91,\n          77,\n          -52,\n          65,\n          -78,\n          -43,\n          84,\n          65,\n          -53,\n          83,\n          -48,\n          65,\n          -51,\n          99,\n          122,\n          65,\n          -33,\n          81,\n          -69,\n          65,\n          -37,\n          -106,\n          -37,\n          66,\n          91,\n          79,\n          13,\n          65,\n          36,\n          115,\n          126,\n          65,\n          62,\n          44,\n          -106,\n          65,\n          51,\n          -34,\n          117,\n          65,\n          57,\n          -83,\n          12,\n          65,\n          -108,\n          100,\n          68,\n          65,\n          -94,\n          126,\n          -123,\n          65,\n          -78,\n          80,\n          29,\n          65,\n          -49,\n          -25,\n          -99,\n          65,\n          -55,\n          -107,\n          71,\n          65,\n          -46,\n          -6,\n          5,\n          65,\n          -48,\n          115,\n          94,\n          65,\n          -35,\n          -90,\n          33,\n          66,\n          99,\n          71,\n          12,\n          65,\n          42,\n          -6,\n          120,\n          65,\n          34,\n          -39,\n          46,\n          65,\n          39,\n          -17,\n          -114,\n          65,\n          74,\n          48,\n          98,\n          65,\n          -115,\n          -122,\n          65,\n          65,\n          -80,\n          -9,\n          28,\n          65,\n          -90,\n          -16,\n          -12,\n          65,\n          -79,\n          -120,\n          75,\n          65,\n          -60,\n          82,\n          -71,\n          65,\n          -59,\n          -39,\n          92,\n          65,\n          -53,\n          9,\n          -13,\n          65,\n          -56,\n          -126,\n          -25,\n          65,\n          -47,\n          -120,\n          -81,\n          66,\n          90,\n          66,\n          -28,\n          65,\n          41,\n          -106,\n          -125,\n          65,\n          39,\n          -93,\n          27,\n          65,\n          45,\n          105,\n          121,\n          65,\n          69,\n          -18,\n          -116,\n          65,\n          75,\n          -123,\n          42,\n          65,\n          98,\n          121,\n          34,\n          65,\n          -120,\n          41,\n          -99,\n          65,\n          -105,\n          25,\n          115,\n          65,\n          -95,\n          -97,\n          25,\n          65,\n          -78,\n          0,\n          50,\n          65,\n          -80,\n          -95,\n          -32,\n          65,\n          -71,\n          -126,\n          -112,\n          65,\n          -57,\n          -74,\n          -103,\n          65,\n          -57,\n          -41,\n          -30,\n          65,\n          -49,\n          23,\n          -76,\n          65,\n          -46,\n          -114,\n          -90,\n          65,\n          -47,\n          33,\n          -66,\n          65,\n          32,\n          -14,\n          111,\n          65,\n          36,\n          -110,\n          27,\n          65,\n          50,\n          -21,\n          102,\n          65,\n          79,\n          -103,\n          -104,\n          65,\n          109,\n          112,\n          25,\n          65,\n          -109,\n          107,\n          -127,\n          65,\n          -106,\n          -42,\n          -74,\n          65,\n          -112,\n          86,\n          -118,\n          65,\n          -84,\n          25,\n          113,\n          65,\n          -76,\n          -69,\n          -62,\n          65,\n          -81,\n          -122,\n          -91,\n          65,\n          -80,\n          80,\n          4,\n          65,\n          -62,\n          -97,\n          -37,\n          65,\n          -64,\n          -65,\n          -54,\n          65,\n          36,\n          -72,\n          38,\n          65,\n          86,\n          -46,\n          46,\n          65,\n          105,\n          -24,\n          57,\n          65,\n          -125,\n          -23,\n          10,\n          65,\n          -115,\n          112,\n          -35,\n          65,\n          -109,\n          -87,\n          31,\n          65,\n          -107,\n          35,\n          -48,\n          65,\n          -106,\n          104,\n          123,\n          65,\n          -109,\n          -49,\n          -109,\n          65,\n          -108,\n          62,\n          -119,\n          65,\n          -82,\n          -58,\n          -87,\n          65,\n          -75,\n          112,\n          -41,\n          65,\n          -75,\n          49,\n          48,\n          65,\n          -80,\n          -116,\n          36,\n          65,\n          -67,\n          -32,\n          75,\n          65,\n          -68,\n          -90,\n          14,\n          65,\n          93,\n          94,\n          111,\n          65,\n          81,\n          -14,\n          44,\n          65,\n          113,\n          70,\n          -25,\n          65,\n          -128,\n          -91,\n          -19,\n          65,\n          -122,\n          87,\n          30,\n          65,\n          -118,\n          -59,\n          75,\n          65,\n          -110,\n          103,\n          -2,\n          65,\n          -106,\n          -60,\n          59,\n          65,\n          -93,\n          105,\n          0,\n          65,\n          -88,\n          46,\n          -49,\n          65,\n          -84,\n          116,\n          88,\n          65,\n          -68,\n          66,\n          88,\n          65,\n          -71,\n          98,\n          -4,\n          65,\n          -67,\n          -123,\n          94,\n          65,\n          -70,\n          47,\n          -59,\n          65,\n          75,\n          91,\n          27,\n          65,\n          82,\n          62,\n          69,\n          65,\n          -121,\n          -72,\n          -40,\n          65,\n          -101,\n          18,\n          -110,\n          65,\n          -126,\n          -52,\n          27,\n          65,\n          -117,\n          -70,\n          -72,\n          65,\n          -114,\n          -103,\n          -2,\n          65,\n          -110,\n          37,\n          4,\n          65,\n          -98,\n          111,\n          39,\n          65,\n          -97,\n          -96,\n          35,\n          65,\n          -101,\n          -61,\n          87,\n          65,\n          -81,\n          88,\n          53,\n          65,\n          -83,\n          -44,\n          -4,\n          65,\n          -85,\n          -66,\n          124,\n          65,\n          -78,\n          -73,\n          100,\n          65,\n          -65,\n          33,\n          102,\n          65,\n          -72,\n          72,\n          -3,\n          65,\n          69,\n          19,\n          -121,\n          65,\n          92,\n          -94,\n          -107,\n          65,\n          124,\n          -37,\n          -113,\n          65,\n          -122,\n          -45,\n          -23,\n          65,\n          -126,\n          69,\n          -113,\n          65,\n          -126,\n          -121,\n          31,\n          65,\n          -124,\n          -89,\n          91,\n          65,\n          -117,\n          98,\n          -9,\n          65,\n          -118,\n          0,\n          63,\n          65,\n          -102,\n          -70,\n          -21,\n          65,\n          -98,\n          -61,\n          -19,\n          65,\n          -103,\n          -3,\n          45,\n          65,\n          -103,\n          -23,\n          3,\n          65,\n          -92,\n          58,\n          -98,\n          65,\n          -87,\n          -73,\n          115,\n          65,\n          -82,\n          107,\n          11,\n          65,\n          115,\n          -59,\n          89,\n          65,\n          115,\n          -110,\n          -104,\n          65,\n          118,\n          -122,\n          -34,\n          65,\n          -127,\n          -14,\n          74,\n          65,\n          -121,\n          124,\n          99,\n          65,\n          -122,\n          32,\n          -24,\n          65,\n          -112,\n          73,\n          -81,\n          65,\n          -102,\n          16,\n          -126,\n          65,\n          -104,\n          -97,\n          -9,\n          65,\n          -92,\n          101,\n          15,\n          65,\n          126,\n          87,\n          -111,\n          65,\n          112,\n          35,\n          10,\n          65,\n          -117,\n          1,\n          -45,\n          65,\n          -101,\n          69,\n          19,\n          65,\n          -97,\n          58,\n          74,\n          65,\n          -121,\n          -35,\n          11,\n          65,\n          -98,\n          70,\n          -76\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 168,\n        \"leftIndex\": [\n          0,\n          1,\n          168,\n          635409855,\n          10465752,\n          324944038,\n          700152013,\n          23724911,\n          16896\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          168,\n          970586015,\n          901360285,\n          836698590,\n          293014721,\n          470157193,\n          4678\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6005716496380461659,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          167,\n          827785531,\n          971763877,\n          80945808,\n          796184691,\n          430416111,\n          957161488,\n          977275382,\n          555333116,\n          708089002,\n          654375449,\n          924548635,\n          859414707,\n          533709645,\n          1012117878,\n          592303265,\n          874474215,\n          1854454\n        ],\n        \"cutValueData\": [\n          65,\n          65,\n          -60,\n          -75,\n          65,\n          63,\n          -57,\n          51,\n          65,\n          -88,\n          -64,\n          -30,\n          65,\n          74,\n          -34,\n          33,\n          65,\n          60,\n          -22,\n          106,\n          65,\n          -98,\n          37,\n          40,\n          66,\n          70,\n          -6,\n          -113,\n          65,\n          40,\n          -33,\n          78,\n          65,\n          52,\n          -111,\n          -60,\n          65,\n          73,\n          -4,\n          -87,\n          65,\n          85,\n          68,\n          -51,\n          65,\n          -96,\n          -15,\n          -127,\n          65,\n          -31,\n          -99,\n          -85,\n          65,\n          46,\n          -128,\n          1,\n          65,\n          34,\n          -74,\n          63,\n          65,\n          70,\n          114,\n          101,\n          65,\n          67,\n          -54,\n          -82,\n          65,\n          92,\n          -16,\n          -2,\n          65,\n          77,\n          -119,\n          -83,\n          65,\n          -107,\n          -123,\n          -80,\n          65,\n          -117,\n          22,\n          -34,\n          65,\n          -96,\n          4,\n          -15,\n          65,\n          -75,\n          -115,\n          -4,\n          66,\n          20,\n          -43,\n          -68,\n          65,\n          60,\n          -102,\n          11,\n          65,\n          51,\n          66,\n          83,\n          65,\n          68,\n          -88,\n          -11,\n          65,\n          65,\n          -112,\n          89,\n          65,\n          -121,\n          120,\n          -8,\n          65,\n          -102,\n          -84,\n          -56,\n          65,\n          -91,\n          66,\n          64,\n          65,\n          -85,\n          -100,\n          -19,\n          65,\n          -83,\n          -32,\n          -79,\n          65,\n          -77,\n          -68,\n          118,\n          65,\n          -38,\n          -96,\n          64,\n          65,\n          61,\n          -11,\n          84,\n          65,\n          54,\n          104,\n          60,\n          65,\n          88,\n          72,\n          106,\n          65,\n          99,\n          111,\n          -80,\n          65,\n          -128,\n          65,\n          33,\n          65,\n          -104,\n          67,\n          -110,\n          65,\n          -102,\n          -29,\n          -40,\n          65,\n          -99,\n          30,\n          -96,\n          65,\n          -83,\n          -63,\n          -24,\n          65,\n          -81,\n          61,\n          46,\n          65,\n          -65,\n          110,\n          -103,\n          65,\n          -74,\n          116,\n          73,\n          65,\n          -68,\n          -117,\n          51,\n          65,\n          -53,\n          -89,\n          -61,\n          65,\n          62,\n          83,\n          -81,\n          65,\n          88,\n          -112,\n          41,\n          65,\n          109,\n          -83,\n          -67,\n          65,\n          -117,\n          96,\n          49,\n          65,\n          -110,\n          -5,\n          -63,\n          65,\n          -116,\n          -19,\n          66,\n          65,\n          -108,\n          52,\n          -80,\n          65,\n          -98,\n          -4,\n          -121,\n          65,\n          -110,\n          108,\n          31,\n          65,\n          -93,\n          -97,\n          114,\n          65,\n          -94,\n          -70,\n          -18,\n          65,\n          -90,\n          103,\n          -91,\n          65,\n          -74,\n          9,\n          126,\n          65,\n          -81,\n          108,\n          -6,\n          65,\n          -54,\n          -102,\n          75,\n          65,\n          45,\n          -103,\n          -81,\n          65,\n          101,\n          6,\n          45,\n          65,\n          127,\n          35,\n          98,\n          65,\n          103,\n          86,\n          9,\n          65,\n          -118,\n          67,\n          12,\n          65,\n          -115,\n          38,\n          -115,\n          65,\n          -108,\n          -66,\n          -15,\n          65,\n          -92,\n          125,\n          -91,\n          65,\n          -82,\n          86,\n          21,\n          65,\n          -88,\n          125,\n          -73,\n          65,\n          -77,\n          51,\n          -113,\n          65,\n          -80,\n          29,\n          -52,\n          65,\n          -61,\n          93,\n          53,\n          65,\n          -39,\n          -111,\n          120,\n          65,\n          94,\n          -6,\n          -44,\n          65,\n          110,\n          0,\n          91,\n          65,\n          120,\n          41,\n          -67,\n          65,\n          104,\n          -114,\n          105,\n          65,\n          -126,\n          -23,\n          -32,\n          65,\n          -123,\n          -20,\n          -85,\n          65,\n          -120,\n          -101,\n          103,\n          65,\n          -111,\n          -4,\n          112,\n          65,\n          -111,\n          -44,\n          57,\n          65,\n          -108,\n          -4,\n          60,\n          65,\n          -98,\n          -61,\n          67,\n          65,\n          -89,\n          -25,\n          -72,\n          65,\n          -89,\n          95,\n          -64,\n          65,\n          -84,\n          60,\n          1,\n          65,\n          -84,\n          52,\n          -15,\n          65,\n          -79,\n          -64,\n          -68,\n          65,\n          -79,\n          31,\n          -48,\n          65,\n          -55,\n          3,\n          -24,\n          65,\n          -57,\n          -63,\n          28,\n          65,\n          -53,\n          -22,\n          112,\n          65,\n          -44,\n          -4,\n          102,\n          65,\n          95,\n          -117,\n          -120,\n          65,\n          100,\n          -124,\n          -53,\n          65,\n          98,\n          95,\n          28,\n          65,\n          104,\n          32,\n          -29,\n          65,\n          -121,\n          -128,\n          -93,\n          65,\n          -126,\n          -42,\n          -33,\n          65,\n          -106,\n          -48,\n          -103,\n          65,\n          -108,\n          61,\n          -28,\n          65,\n          -92,\n          -99,\n          -4,\n          65,\n          -86,\n          -86,\n          -66,\n          65,\n          -78,\n          -46,\n          -82,\n          65,\n          -66,\n          -16,\n          -76,\n          65,\n          -67,\n          -2,\n          26,\n          65,\n          -62,\n          -63,\n          -7,\n          65,\n          -59,\n          124,\n          84,\n          65,\n          -42,\n          83,\n          50,\n          65,\n          94,\n          9,\n          -34,\n          65,\n          -124,\n          101,\n          -120,\n          65,\n          -113,\n          95,\n          17,\n          65,\n          -120,\n          118,\n          8,\n          65,\n          -116,\n          -95,\n          23,\n          65,\n          -112,\n          -94,\n          -80,\n          65,\n          -97,\n          76,\n          -2,\n          65,\n          -80,\n          -36,\n          58,\n          65,\n          -78,\n          -25,\n          38,\n          65,\n          -65,\n          -114,\n          16,\n          65,\n          -66,\n          -69,\n          -121,\n          65,\n          -59,\n          49,\n          36,\n          65,\n          -55,\n          -11,\n          -113,\n          65,\n          -43,\n          100,\n          22,\n          65,\n          -46,\n          67,\n          -86,\n          65,\n          -41,\n          -89,\n          -85,\n          65,\n          118,\n          4,\n          -29,\n          65,\n          -121,\n          33,\n          102,\n          65,\n          -128,\n          69,\n          -95,\n          65,\n          -117,\n          -33,\n          -91,\n          65,\n          -119,\n          -34,\n          65,\n          65,\n          -109,\n          -128,\n          54,\n          65,\n          -102,\n          73,\n          -12,\n          65,\n          -80,\n          -95,\n          -124,\n          65,\n          -70,\n          34,\n          2,\n          65,\n          -70,\n          87,\n          92,\n          65,\n          -57,\n          12,\n          0,\n          65,\n          -53,\n          -40,\n          39,\n          65,\n          -56,\n          -55,\n          19,\n          65,\n          -52,\n          -10,\n          -85,\n          65,\n          -47,\n          -84,\n          13,\n          65,\n          -33,\n          -54,\n          -47,\n          65,\n          127,\n          -66,\n          103,\n          65,\n          115,\n          -126,\n          -83,\n          65,\n          -117,\n          -37,\n          -74,\n          65,\n          -118,\n          7,\n          -122,\n          65,\n          -120,\n          -81,\n          -114,\n          65,\n          -67,\n          0,\n          -125,\n          65,\n          -61,\n          -103,\n          96,\n          65,\n          -45,\n          83,\n          -127,\n          65,\n          -50,\n          112,\n          11,\n          65,\n          -50,\n          22,\n          -101,\n          65,\n          -45,\n          63,\n          104,\n          65,\n          120,\n          -92,\n          -104,\n          65,\n          -113,\n          110,\n          122,\n          65,\n          -115,\n          10,\n          18,\n          65,\n          -43,\n          -25,\n          -112,\n          65,\n          -43,\n          -98,\n          82,\n          65,\n          -114,\n          28,\n          -81,\n          65,\n          -115,\n          90,\n          -7,\n          65,\n          -41,\n          -62,\n          26,\n          65,\n          -40,\n          -106,\n          -58\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 167,\n        \"leftIndex\": [\n          0,\n          1,\n          167,\n          829071359,\n          283835805,\n          72314346,\n          895131888,\n          909127501,\n          2048\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          167,\n          981229239,\n          564270940,\n          329498574,\n          636514964,\n          25193761,\n          3745\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 8744577020609081589,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          163,\n          637052076,\n          1023292457,\n          850537944,\n          671124079,\n          256574027,\n          433249855,\n          173906778,\n          560411735,\n          784158733,\n          655620432,\n          1008347984,\n          39312517,\n          384829375,\n          50322981,\n          1008334898,\n          987135068,\n          250\n        ],\n        \"cutValueData\": [\n          66,\n          99,\n          0,\n          12,\n          66,\n          70,\n          125,\n          -86,\n          66,\n          -124,\n          -102,\n          97,\n          65,\n          -127,\n          92,\n          -60,\n          66,\n          80,\n          -99,\n          -109,\n          66,\n          126,\n          48,\n          107,\n          66,\n          -110,\n          19,\n          -79,\n          65,\n          43,\n          -53,\n          -47,\n          66,\n          21,\n          -110,\n          -17,\n          66,\n          -127,\n          -110,\n          87,\n          65,\n          37,\n          -37,\n          58,\n          65,\n          -121,\n          -74,\n          9,\n          65,\n          -51,\n          35,\n          -55,\n          66,\n          50,\n          -122,\n          -7,\n          66,\n          113,\n          40,\n          -80,\n          65,\n          34,\n          -7,\n          25,\n          65,\n          65,\n          -68,\n          120,\n          65,\n          -116,\n          111,\n          -124,\n          65,\n          -93,\n          -118,\n          80,\n          66,\n          4,\n          -76,\n          -13,\n          65,\n          85,\n          -88,\n          -60,\n          65,\n          65,\n          109,\n          42,\n          65,\n          -118,\n          120,\n          120,\n          65,\n          -57,\n          113,\n          22,\n          65,\n          -56,\n          -111,\n          49,\n          65,\n          57,\n          -122,\n          -54,\n          65,\n          67,\n          0,\n          112,\n          65,\n          113,\n          16,\n          -8,\n          65,\n          -114,\n          13,\n          -31,\n          65,\n          -102,\n          -88,\n          -125,\n          65,\n          -67,\n          -1,\n          -10,\n          65,\n          -49,\n          65,\n          109,\n          65,\n          -43,\n          -3,\n          -10,\n          65,\n          49,\n          -54,\n          -46,\n          65,\n          34,\n          -53,\n          85,\n          65,\n          50,\n          31,\n          0,\n          65,\n          77,\n          -100,\n          48,\n          65,\n          -121,\n          -37,\n          78,\n          65,\n          -117,\n          -108,\n          -38,\n          65,\n          -113,\n          -47,\n          88,\n          65,\n          -106,\n          -37,\n          5,\n          65,\n          -93,\n          39,\n          -1,\n          65,\n          -96,\n          -64,\n          11,\n          65,\n          -79,\n          79,\n          -58,\n          65,\n          -64,\n          125,\n          -95,\n          65,\n          -46,\n          -46,\n          -67,\n          65,\n          -48,\n          47,\n          122,\n          65,\n          -42,\n          -41,\n          -85,\n          65,\n          57,\n          -127,\n          41,\n          65,\n          49,\n          -66,\n          104,\n          65,\n          93,\n          -76,\n          -90,\n          65,\n          125,\n          69,\n          -56,\n          65,\n          105,\n          44,\n          63,\n          65,\n          113,\n          58,\n          -12,\n          65,\n          -126,\n          4,\n          -99,\n          65,\n          -126,\n          -118,\n          47,\n          65,\n          -113,\n          -74,\n          77,\n          65,\n          -107,\n          -22,\n          -64,\n          65,\n          -112,\n          -65,\n          67,\n          65,\n          -103,\n          99,\n          9,\n          65,\n          -90,\n          -108,\n          86,\n          65,\n          -94,\n          112,\n          -85,\n          65,\n          -77,\n          -86,\n          93,\n          65,\n          -68,\n          91,\n          -4,\n          65,\n          -58,\n          110,\n          -24,\n          65,\n          -50,\n          -87,\n          10,\n          65,\n          -46,\n          -72,\n          -50,\n          65,\n          -35,\n          98,\n          59,\n          65,\n          38,\n          81,\n          -37,\n          65,\n          37,\n          65,\n          -83,\n          65,\n          55,\n          113,\n          -80,\n          65,\n          72,\n          -42,\n          121,\n          65,\n          117,\n          -96,\n          -44,\n          65,\n          122,\n          -100,\n          5,\n          65,\n          -124,\n          77,\n          -43,\n          65,\n          -124,\n          -72,\n          123,\n          65,\n          -117,\n          -14,\n          126,\n          65,\n          -118,\n          31,\n          78,\n          65,\n          -116,\n          -127,\n          -107,\n          65,\n          -110,\n          21,\n          -65,\n          65,\n          -102,\n          -38,\n          -112,\n          65,\n          -101,\n          54,\n          28,\n          65,\n          -87,\n          78,\n          -120,\n          65,\n          -80,\n          -60,\n          88,\n          65,\n          -72,\n          127,\n          -89,\n          65,\n          -58,\n          56,\n          -74,\n          65,\n          -41,\n          -107,\n          -56,\n          65,\n          -51,\n          -43,\n          -22,\n          65,\n          -47,\n          13,\n          -91,\n          65,\n          47,\n          17,\n          -19,\n          65,\n          79,\n          -64,\n          27,\n          65,\n          103,\n          85,\n          -62,\n          65,\n          111,\n          -73,\n          -30,\n          65,\n          125,\n          -10,\n          58,\n          65,\n          115,\n          -27,\n          58,\n          65,\n          120,\n          24,\n          -95,\n          65,\n          -118,\n          -65,\n          56,\n          65,\n          -112,\n          32,\n          21,\n          65,\n          -97,\n          -50,\n          89,\n          65,\n          -97,\n          -80,\n          57,\n          65,\n          -89,\n          -127,\n          -48,\n          65,\n          -97,\n          -116,\n          125,\n          65,\n          -96,\n          -55,\n          11,\n          65,\n          -92,\n          -31,\n          92,\n          65,\n          -82,\n          121,\n          -100,\n          65,\n          -80,\n          65,\n          31,\n          65,\n          -80,\n          115,\n          -19,\n          65,\n          -79,\n          99,\n          -4,\n          65,\n          -60,\n          71,\n          58,\n          65,\n          -54,\n          -98,\n          -82,\n          65,\n          -37,\n          -73,\n          29,\n          65,\n          57,\n          -103,\n          -122,\n          65,\n          75,\n          116,\n          3,\n          65,\n          80,\n          -32,\n          56,\n          65,\n          111,\n          84,\n          124,\n          65,\n          -123,\n          -50,\n          -58,\n          65,\n          -113,\n          -95,\n          -47,\n          65,\n          -114,\n          69,\n          -41,\n          65,\n          -112,\n          -13,\n          50,\n          65,\n          -98,\n          63,\n          89,\n          65,\n          -100,\n          -80,\n          116,\n          65,\n          -91,\n          -52,\n          -1,\n          65,\n          -84,\n          -128,\n          -36,\n          65,\n          -78,\n          99,\n          -8,\n          65,\n          -72,\n          95,\n          -98,\n          65,\n          -72,\n          89,\n          -124,\n          65,\n          -66,\n          79,\n          -43,\n          65,\n          -45,\n          92,\n          59,\n          65,\n          -37,\n          69,\n          -33,\n          65,\n          81,\n          -125,\n          88,\n          65,\n          107,\n          -117,\n          4,\n          65,\n          97,\n          8,\n          13,\n          65,\n          -125,\n          82,\n          28,\n          65,\n          -108,\n          -47,\n          -38,\n          65,\n          -106,\n          3,\n          -40,\n          65,\n          -105,\n          68,\n          42,\n          65,\n          -89,\n          -118,\n          -102,\n          65,\n          -79,\n          72,\n          35,\n          65,\n          -73,\n          60,\n          -5,\n          65,\n          -79,\n          127,\n          -60,\n          65,\n          -79,\n          81,\n          26,\n          65,\n          -69,\n          -95,\n          116,\n          65,\n          99,\n          -96,\n          -26,\n          65,\n          -110,\n          12,\n          95,\n          65,\n          -112,\n          -62,\n          99,\n          65,\n          -101,\n          -94,\n          107,\n          65,\n          -93,\n          41,\n          -22,\n          65,\n          -91,\n          43,\n          55,\n          65,\n          -87,\n          32,\n          109,\n          65,\n          -82,\n          -2,\n          -57,\n          65,\n          -75,\n          90,\n          -69,\n          65,\n          -71,\n          -100,\n          -91,\n          65,\n          105,\n          -35,\n          -67,\n          65,\n          -112,\n          37,\n          -111,\n          65,\n          -84,\n          47,\n          110,\n          65,\n          -83,\n          98,\n          78,\n          65,\n          -88,\n          -43,\n          -113,\n          65,\n          -74,\n          -47,\n          -65,\n          65,\n          -73,\n          22,\n          -49,\n          65,\n          -83,\n          -6,\n          -59,\n          65,\n          -83,\n          -46,\n          34,\n          65,\n          -76,\n          -118,\n          -14,\n          65,\n          -75,\n          57,\n          38\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 163,\n        \"leftIndex\": [\n          0,\n          1,\n          163,\n          1056775087,\n          255180751,\n          16019924,\n          630264387,\n          154025988,\n          161\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          163,\n          1004871055,\n          961428951,\n          469513268,\n          680779979,\n          944357935,\n          288\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -8471452289987912583,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          170,\n          776084743,\n          583096645,\n          151265082,\n          690207239,\n          935333819,\n          253749845,\n          132792853,\n          645977550,\n          656330797,\n          278729455,\n          161533650,\n          500535262,\n          125878692,\n          280642008,\n          800928762,\n          789164372,\n          593626072\n        ],\n        \"cutValueData\": [\n          66,\n          7,\n          122,\n          -41,\n          65,\n          -64,\n          -107,\n          113,\n          66,\n          71,\n          -63,\n          84,\n          65,\n          -122,\n          -29,\n          104,\n          65,\n          -46,\n          83,\n          -34,\n          66,\n          34,\n          98,\n          -98,\n          66,\n          112,\n          -71,\n          87,\n          65,\n          117,\n          5,\n          42,\n          65,\n          -20,\n          17,\n          -70,\n          65,\n          -52,\n          -58,\n          53,\n          65,\n          -43,\n          76,\n          41,\n          66,\n          30,\n          -117,\n          -110,\n          66,\n          106,\n          -41,\n          31,\n          65,\n          100,\n          -28,\n          -5,\n          65,\n          -124,\n          -78,\n          -58,\n          65,\n          -75,\n          -50,\n          64,\n          65,\n          -68,\n          -115,\n          -91,\n          65,\n          -55,\n          26,\n          -34,\n          65,\n          -56,\n          -111,\n          47,\n          65,\n          -39,\n          -98,\n          110,\n          66,\n          112,\n          -125,\n          14,\n          65,\n          106,\n          13,\n          91,\n          65,\n          119,\n          -95,\n          -92,\n          65,\n          124,\n          82,\n          -81,\n          65,\n          -127,\n          124,\n          -79,\n          65,\n          -72,\n          53,\n          82,\n          65,\n          -53,\n          21,\n          -109,\n          65,\n          -42,\n          -25,\n          -116,\n          65,\n          -39,\n          -22,\n          27,\n          65,\n          -35,\n          114,\n          -42,\n          65,\n          57,\n          73,\n          94,\n          65,\n          84,\n          -106,\n          -80,\n          65,\n          124,\n          -98,\n          -62,\n          65,\n          125,\n          65,\n          -7,\n          65,\n          -122,\n          1,\n          -114,\n          65,\n          -89,\n          13,\n          90,\n          65,\n          -79,\n          -87,\n          -80,\n          65,\n          -66,\n          124,\n          77,\n          65,\n          -59,\n          -100,\n          85,\n          65,\n          -60,\n          -35,\n          -109,\n          65,\n          -52,\n          81,\n          119,\n          65,\n          -48,\n          90,\n          -103,\n          65,\n          -33,\n          27,\n          40,\n          65,\n          -37,\n          12,\n          -21,\n          65,\n          -33,\n          51,\n          125,\n          65,\n          76,\n          -56,\n          -72,\n          65,\n          49,\n          15,\n          -96,\n          65,\n          99,\n          -20,\n          -65,\n          65,\n          101,\n          6,\n          -7,\n          65,\n          98,\n          109,\n          -29,\n          65,\n          116,\n          72,\n          4,\n          65,\n          -128,\n          7,\n          9,\n          65,\n          -113,\n          119,\n          96,\n          65,\n          -87,\n          -16,\n          -16,\n          65,\n          -71,\n          -80,\n          51,\n          65,\n          -80,\n          19,\n          33,\n          65,\n          -66,\n          24,\n          11,\n          65,\n          -56,\n          72,\n          -95,\n          65,\n          -64,\n          112,\n          -46,\n          65,\n          -52,\n          64,\n          -42,\n          65,\n          -48,\n          -99,\n          122,\n          65,\n          -38,\n          -29,\n          65,\n          65,\n          39,\n          106,\n          -96,\n          65,\n          64,\n          34,\n          87,\n          65,\n          54,\n          35,\n          -121,\n          65,\n          79,\n          30,\n          29,\n          65,\n          92,\n          -30,\n          -28,\n          65,\n          119,\n          -107,\n          80,\n          65,\n          -114,\n          -115,\n          -10,\n          65,\n          -114,\n          -111,\n          -94,\n          65,\n          -103,\n          -48,\n          -28,\n          65,\n          -80,\n          50,\n          -6,\n          65,\n          -79,\n          102,\n          -105,\n          65,\n          -69,\n          -62,\n          92,\n          65,\n          -59,\n          -103,\n          -25,\n          65,\n          -60,\n          -68,\n          22,\n          65,\n          -52,\n          84,\n          -65,\n          65,\n          -42,\n          -54,\n          109,\n          65,\n          46,\n          -110,\n          99,\n          65,\n          45,\n          62,\n          -72,\n          65,\n          52,\n          101,\n          -88,\n          65,\n          75,\n          -8,\n          -14,\n          65,\n          99,\n          40,\n          85,\n          65,\n          124,\n          -128,\n          24,\n          65,\n          -122,\n          -125,\n          19,\n          65,\n          -120,\n          -39,\n          -122,\n          65,\n          -108,\n          60,\n          -29,\n          65,\n          -98,\n          -14,\n          -59,\n          65,\n          -81,\n          59,\n          110,\n          65,\n          -71,\n          47,\n          -93,\n          65,\n          -50,\n          15,\n          53,\n          65,\n          46,\n          97,\n          -45,\n          65,\n          35,\n          0,\n          29,\n          65,\n          62,\n          -93,\n          96,\n          65,\n          67,\n          -47,\n          43,\n          65,\n          67,\n          -71,\n          -45,\n          65,\n          94,\n          29,\n          -104,\n          65,\n          -122,\n          -125,\n          47,\n          65,\n          -128,\n          61,\n          36,\n          65,\n          -115,\n          12,\n          -17,\n          65,\n          -102,\n          -27,\n          -108,\n          65,\n          -98,\n          49,\n          -76,\n          65,\n          -99,\n          59,\n          73,\n          65,\n          -80,\n          24,\n          -45,\n          65,\n          33,\n          117,\n          -121,\n          65,\n          41,\n          48,\n          -112,\n          65,\n          57,\n          8,\n          108,\n          65,\n          54,\n          55,\n          20,\n          65,\n          75,\n          97,\n          63,\n          65,\n          97,\n          -59,\n          9,\n          65,\n          -116,\n          99,\n          107,\n          65,\n          -120,\n          76,\n          -49,\n          65,\n          -111,\n          -22,\n          112,\n          65,\n          -125,\n          7,\n          20,\n          65,\n          -110,\n          95,\n          104,\n          65,\n          -98,\n          -1,\n          76,\n          65,\n          -101,\n          2,\n          -108,\n          65,\n          -85,\n          -60,\n          -7,\n          65,\n          -78,\n          -13,\n          -90,\n          65,\n          55,\n          48,\n          -122,\n          65,\n          88,\n          72,\n          14,\n          65,\n          -119,\n          111,\n          -66,\n          65,\n          -108,\n          124,\n          -125,\n          65,\n          -109,\n          36,\n          -62,\n          65,\n          -108,\n          -26,\n          -125,\n          65,\n          -98,\n          121,\n          75,\n          65,\n          -89,\n          83,\n          17,\n          65,\n          -98,\n          -76,\n          -68,\n          65,\n          -91,\n          -61,\n          -5,\n          65,\n          -82,\n          61,\n          81,\n          65,\n          -83,\n          -67,\n          -128,\n          65,\n          -80,\n          40,\n          70,\n          65,\n          70,\n          90,\n          -18,\n          65,\n          75,\n          29,\n          54,\n          65,\n          -117,\n          -118,\n          104,\n          65,\n          -108,\n          -20,\n          -43,\n          65,\n          -105,\n          -8,\n          94,\n          65,\n          -102,\n          -8,\n          -17,\n          65,\n          -96,\n          12,\n          -52,\n          65,\n          -93,\n          -84,\n          -59,\n          65,\n          -82,\n          -118,\n          -66,\n          65,\n          -66,\n          50,\n          -58,\n          65,\n          -69,\n          -112,\n          7,\n          65,\n          81,\n          -44,\n          -79,\n          65,\n          93,\n          112,\n          -44,\n          65,\n          -118,\n          -14,\n          58,\n          65,\n          -115,\n          -58,\n          -100,\n          65,\n          -109,\n          38,\n          -64,\n          65,\n          -103,\n          -6,\n          28,\n          65,\n          -92,\n          -1,\n          -117,\n          65,\n          -95,\n          99,\n          28,\n          65,\n          -82,\n          108,\n          -109,\n          65,\n          -82,\n          -113,\n          21,\n          65,\n          74,\n          118,\n          -128,\n          65,\n          -117,\n          117,\n          -61,\n          65,\n          -109,\n          -109,\n          -48,\n          65,\n          -96,\n          -80,\n          -36,\n          65,\n          -82,\n          60,\n          -110,\n          65,\n          -86,\n          -116,\n          1,\n          65,\n          -84,\n          -73,\n          -120,\n          65,\n          -89,\n          107,\n          31,\n          65,\n          -86,\n          40,\n          -116,\n          65,\n          88,\n          103,\n          67,\n          65,\n          -89,\n          49,\n          23,\n          65,\n          -83,\n          86,\n          -124,\n          65,\n          -86,\n          -107,\n          61,\n          65,\n          -92,\n          61,\n          18,\n          65,\n          -95,\n          38,\n          -40,\n          65,\n          -81,\n          9,\n          -23,\n          65,\n          -93,\n          59,\n          96\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 170,\n        \"leftIndex\": [\n          0,\n          1,\n          170,\n          1072358367,\n          23302135,\n          241714996,\n          421674508,\n          555539289,\n          51782\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          170,\n          665630655,\n          667265605,\n          248319269,\n          532949866,\n          638359817,\n          131150\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 8010610685156690122,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          169,\n          371568937,\n          79721695,\n          1035367483,\n          188223398,\n          218372933,\n          985791552,\n          648590122,\n          508429535,\n          1057874985,\n          505508042,\n          1021081623,\n          908236254,\n          849287976,\n          941458535,\n          644388413,\n          893733890,\n          74773754\n        ],\n        \"cutValueData\": [\n          65,\n          -104,\n          24,\n          25,\n          65,\n          -113,\n          92,\n          -36,\n          66,\n          112,\n          -39,\n          -37,\n          65,\n          62,\n          -107,\n          -1,\n          65,\n          -122,\n          -121,\n          62,\n          66,\n          2,\n          -80,\n          100,\n          66,\n          -125,\n          -75,\n          -109,\n          65,\n          52,\n          121,\n          74,\n          65,\n          113,\n          69,\n          -51,\n          65,\n          -118,\n          -60,\n          -63,\n          65,\n          -38,\n          -85,\n          -59,\n          66,\n          31,\n          -45,\n          -47,\n          66,\n          -101,\n          -127,\n          2,\n          65,\n          41,\n          102,\n          78,\n          65,\n          43,\n          19,\n          20,\n          65,\n          97,\n          -66,\n          4,\n          65,\n          -126,\n          -33,\n          -90,\n          65,\n          -115,\n          5,\n          -74,\n          65,\n          -39,\n          31,\n          50,\n          66,\n          73,\n          -97,\n          -89,\n          65,\n          33,\n          72,\n          -40,\n          65,\n          36,\n          120,\n          -101,\n          65,\n          95,\n          -91,\n          107,\n          65,\n          109,\n          -116,\n          83,\n          65,\n          -116,\n          55,\n          -68,\n          65,\n          -117,\n          -100,\n          82,\n          65,\n          -86,\n          51,\n          -108,\n          65,\n          -90,\n          118,\n          74,\n          66,\n          50,\n          26,\n          -65,\n          65,\n          36,\n          -101,\n          -35,\n          65,\n          54,\n          27,\n          12,\n          65,\n          93,\n          -45,\n          27,\n          65,\n          100,\n          86,\n          -109,\n          65,\n          -125,\n          115,\n          -31,\n          65,\n          -123,\n          -63,\n          -50,\n          65,\n          -119,\n          -21,\n          -84,\n          65,\n          -102,\n          102,\n          12,\n          65,\n          -94,\n          -111,\n          91,\n          66,\n          64,\n          -95,\n          63,\n          65,\n          42,\n          70,\n          72,\n          65,\n          43,\n          105,\n          -123,\n          65,\n          45,\n          70,\n          90,\n          65,\n          58,\n          -71,\n          80,\n          65,\n          55,\n          -19,\n          -59,\n          65,\n          94,\n          -76,\n          66,\n          65,\n          106,\n          -30,\n          -87,\n          65,\n          98,\n          93,\n          -120,\n          65,\n          -119,\n          60,\n          117,\n          65,\n          -123,\n          107,\n          57,\n          65,\n          -121,\n          32,\n          -122,\n          65,\n          -106,\n          21,\n          -74,\n          65,\n          -91,\n          84,\n          27,\n          65,\n          -90,\n          -120,\n          47,\n          65,\n          -83,\n          -128,\n          -105,\n          65,\n          -63,\n          -83,\n          -15,\n          66,\n          58,\n          116,\n          55,\n          65,\n          46,\n          -18,\n          115,\n          65,\n          39,\n          64,\n          -9,\n          65,\n          62,\n          -42,\n          -77,\n          65,\n          50,\n          -59,\n          -56,\n          65,\n          84,\n          -53,\n          27,\n          65,\n          106,\n          -53,\n          -83,\n          65,\n          87,\n          123,\n          50,\n          65,\n          101,\n          91,\n          -100,\n          65,\n          116,\n          -10,\n          100,\n          65,\n          -122,\n          -32,\n          -23,\n          65,\n          -124,\n          55,\n          -34,\n          65,\n          -112,\n          -24,\n          49,\n          65,\n          -104,\n          -34,\n          -45,\n          65,\n          -100,\n          -95,\n          -120,\n          65,\n          -96,\n          20,\n          -76,\n          65,\n          -90,\n          82,\n          31,\n          65,\n          -89,\n          -94,\n          15,\n          65,\n          -93,\n          -40,\n          -84,\n          65,\n          -56,\n          -80,\n          97,\n          65,\n          -50,\n          47,\n          -82,\n          65,\n          61,\n          76,\n          60,\n          65,\n          77,\n          25,\n          -19,\n          65,\n          70,\n          -4,\n          -55,\n          65,\n          99,\n          -79,\n          -83,\n          65,\n          114,\n          -2,\n          -82,\n          65,\n          121,\n          -127,\n          -41,\n          65,\n          -117,\n          73,\n          -57,\n          65,\n          -111,\n          89,\n          -69,\n          65,\n          -106,\n          74,\n          -48,\n          65,\n          -102,\n          -26,\n          -116,\n          65,\n          -104,\n          90,\n          -67,\n          65,\n          -90,\n          -78,\n          113,\n          65,\n          -99,\n          39,\n          -6,\n          65,\n          -70,\n          -119,\n          46,\n          65,\n          -58,\n          86,\n          -25,\n          65,\n          -43,\n          69,\n          -19,\n          65,\n          75,\n          -70,\n          -79,\n          65,\n          76,\n          90,\n          -1,\n          65,\n          65,\n          -106,\n          -65,\n          65,\n          101,\n          62,\n          -77,\n          65,\n          -126,\n          -116,\n          32,\n          65,\n          -114,\n          87,\n          -92,\n          65,\n          -97,\n          -58,\n          -39,\n          65,\n          -110,\n          108,\n          -51,\n          65,\n          -110,\n          81,\n          17,\n          65,\n          -91,\n          87,\n          54,\n          65,\n          -84,\n          41,\n          -81,\n          65,\n          -58,\n          -73,\n          68,\n          65,\n          -55,\n          -107,\n          22,\n          65,\n          -61,\n          -18,\n          -89,\n          65,\n          -47,\n          120,\n          -80,\n          65,\n          -46,\n          20,\n          103,\n          65,\n          65,\n          70,\n          107,\n          65,\n          92,\n          58,\n          -48,\n          65,\n          66,\n          34,\n          -20,\n          65,\n          90,\n          20,\n          -119,\n          65,\n          90,\n          -34,\n          -127,\n          65,\n          -119,\n          -9,\n          -49,\n          65,\n          -110,\n          62,\n          -83,\n          65,\n          -83,\n          -104,\n          -119,\n          65,\n          -62,\n          -90,\n          5,\n          65,\n          -64,\n          -81,\n          -43,\n          65,\n          -58,\n          -92,\n          -67,\n          65,\n          -51,\n          -55,\n          120,\n          65,\n          -50,\n          -33,\n          -22,\n          65,\n          -54,\n          3,\n          37,\n          65,\n          -41,\n          7,\n          33,\n          65,\n          -47,\n          -54,\n          89,\n          65,\n          -37,\n          77,\n          -11,\n          65,\n          70,\n          -20,\n          -128,\n          65,\n          90,\n          -52,\n          -55,\n          65,\n          69,\n          -36,\n          -56,\n          65,\n          -109,\n          60,\n          -84,\n          65,\n          -93,\n          8,\n          -38,\n          65,\n          -79,\n          29,\n          -115,\n          65,\n          -75,\n          -110,\n          -81,\n          65,\n          -75,\n          -126,\n          41,\n          65,\n          -61,\n          3,\n          -81,\n          65,\n          -46,\n          94,\n          -127,\n          65,\n          -46,\n          -29,\n          23,\n          65,\n          -53,\n          -110,\n          -2,\n          65,\n          -40,\n          93,\n          -99,\n          65,\n          -37,\n          -49,\n          -3,\n          65,\n          71,\n          -42,\n          104,\n          65,\n          65,\n          89,\n          15,\n          65,\n          -116,\n          96,\n          -84,\n          65,\n          -96,\n          7,\n          -72,\n          65,\n          -84,\n          -111,\n          69,\n          65,\n          -83,\n          -28,\n          -99,\n          65,\n          -77,\n          15,\n          28,\n          65,\n          -74,\n          96,\n          -41,\n          65,\n          -69,\n          -90,\n          57,\n          65,\n          -55,\n          -85,\n          16,\n          65,\n          -47,\n          74,\n          2,\n          65,\n          -39,\n          -110,\n          21,\n          65,\n          -73,\n          -126,\n          -114,\n          65,\n          -66,\n          -85,\n          106,\n          65,\n          -75,\n          -77,\n          98,\n          65,\n          -71,\n          60,\n          6,\n          65,\n          -67,\n          -49,\n          -107,\n          65,\n          -84,\n          108,\n          60,\n          65,\n          -79,\n          30,\n          -12,\n          65,\n          -68,\n          -21,\n          59,\n          65,\n          -93,\n          -25,\n          73,\n          65,\n          -81,\n          -2,\n          53,\n          65,\n          -88,\n          24,\n          104,\n          65,\n          -74,\n          62,\n          -32,\n          65,\n          -72,\n          10,\n          -62,\n          65,\n          -86,\n          -28,\n          -42,\n          65,\n          -66,\n          94,\n          -85,\n          65,\n          -81,\n          -74,\n          -48,\n          65,\n          -85,\n          -64,\n          16,\n          65,\n          -84,\n          -107,\n          -24\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 169,\n        \"leftIndex\": [\n          0,\n          1,\n          169,\n          761046959,\n          32696287,\n          556453652,\n          236433967,\n          167791928,\n          90566\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          169,\n          962849663,\n          836100335,\n          835092952,\n          185053339,\n          235216534,\n          132296\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6083579190946894998,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          158,\n          747467411,\n          749462614,\n          939905597,\n          968657267,\n          182631943,\n          79699775,\n          54098181,\n          288327139,\n          311561638,\n          329420228,\n          75867718,\n          752704077,\n          111402505,\n          1006035975,\n          959461119,\n          1559561\n        ],\n        \"cutValueData\": [\n          66,\n          74,\n          -85,\n          -53,\n          66,\n          8,\n          40,\n          -39,\n          66,\n          -96,\n          114,\n          -25,\n          65,\n          -110,\n          87,\n          -82,\n          66,\n          -114,\n          -39,\n          -55,\n          65,\n          65,\n          -73,\n          42,\n          65,\n          -41,\n          32,\n          -2,\n          66,\n          -124,\n          -104,\n          97,\n          66,\n          -115,\n          -9,\n          81,\n          65,\n          63,\n          52,\n          12,\n          65,\n          79,\n          -81,\n          -81,\n          65,\n          -46,\n          -44,\n          95,\n          65,\n          -43,\n          81,\n          -42,\n          65,\n          47,\n          -111,\n          17,\n          65,\n          64,\n          126,\n          21,\n          65,\n          58,\n          67,\n          9,\n          65,\n          126,\n          -49,\n          126,\n          65,\n          -62,\n          9,\n          -21,\n          65,\n          -6,\n          -78,\n          80,\n          65,\n          34,\n          -3,\n          79,\n          65,\n          35,\n          69,\n          76,\n          65,\n          74,\n          41,\n          42,\n          65,\n          52,\n          121,\n          -90,\n          65,\n          87,\n          45,\n          -18,\n          65,\n          -118,\n          125,\n          -124,\n          65,\n          -126,\n          -117,\n          -104,\n          65,\n          -102,\n          -86,\n          -99,\n          65,\n          -61,\n          -38,\n          -115,\n          65,\n          -37,\n          126,\n          0,\n          65,\n          43,\n          -76,\n          -39,\n          65,\n          33,\n          6,\n          51,\n          65,\n          60,\n          -125,\n          -55,\n          65,\n          65,\n          -82,\n          -125,\n          65,\n          71,\n          -120,\n          -31,\n          65,\n          73,\n          -27,\n          -56,\n          65,\n          -126,\n          -7,\n          -102,\n          65,\n          -116,\n          -56,\n          61,\n          65,\n          -103,\n          72,\n          52,\n          65,\n          -91,\n          17,\n          71,\n          65,\n          -44,\n          70,\n          -82,\n          65,\n          -43,\n          -77,\n          103,\n          65,\n          33,\n          20,\n          -4,\n          65,\n          67,\n          53,\n          124,\n          65,\n          81,\n          -113,\n          99,\n          65,\n          70,\n          4,\n          33,\n          65,\n          78,\n          -10,\n          -44,\n          65,\n          98,\n          77,\n          1,\n          65,\n          126,\n          95,\n          0,\n          65,\n          -125,\n          -57,\n          -32,\n          65,\n          -115,\n          12,\n          35,\n          65,\n          -101,\n          24,\n          101,\n          65,\n          -106,\n          16,\n          118,\n          65,\n          -101,\n          68,\n          -112,\n          65,\n          -101,\n          51,\n          64,\n          65,\n          -77,\n          -91,\n          -107,\n          65,\n          -49,\n          122,\n          -1,\n          65,\n          -56,\n          -97,\n          81,\n          65,\n          -43,\n          -25,\n          -123,\n          65,\n          52,\n          65,\n          -121,\n          65,\n          95,\n          82,\n          102,\n          65,\n          102,\n          73,\n          117,\n          65,\n          112,\n          -2,\n          -78,\n          65,\n          124,\n          -70,\n          -56,\n          65,\n          112,\n          99,\n          -71,\n          65,\n          -122,\n          2,\n          -60,\n          65,\n          -113,\n          -100,\n          -11,\n          65,\n          -107,\n          117,\n          58,\n          65,\n          113,\n          85,\n          76,\n          65,\n          -112,\n          0,\n          -120,\n          65,\n          -103,\n          -42,\n          98,\n          65,\n          -100,\n          -9,\n          -117,\n          65,\n          -98,\n          85,\n          110,\n          65,\n          -81,\n          -26,\n          11,\n          65,\n          -88,\n          31,\n          -36,\n          65,\n          -77,\n          -26,\n          62,\n          65,\n          -64,\n          8,\n          -72,\n          65,\n          -49,\n          -19,\n          -108,\n          65,\n          -53,\n          21,\n          106,\n          65,\n          57,\n          -54,\n          -1,\n          65,\n          92,\n          112,\n          -80,\n          65,\n          122,\n          -74,\n          80,\n          65,\n          121,\n          -72,\n          -121,\n          65,\n          -121,\n          -73,\n          -85,\n          65,\n          -121,\n          94,\n          -32,\n          65,\n          -116,\n          44,\n          33,\n          65,\n          -114,\n          46,\n          50,\n          65,\n          -113,\n          4,\n          99,\n          65,\n          -111,\n          -97,\n          -32,\n          65,\n          -106,\n          66,\n          -89,\n          65,\n          -110,\n          12,\n          -76,\n          65,\n          -109,\n          37,\n          114,\n          65,\n          -112,\n          -20,\n          -18,\n          65,\n          -94,\n          102,\n          -114,\n          65,\n          -102,\n          -91,\n          -62,\n          65,\n          -100,\n          -107,\n          -11,\n          65,\n          -88,\n          -14,\n          -11,\n          65,\n          -81,\n          -18,\n          -60,\n          65,\n          -79,\n          -62,\n          -14,\n          65,\n          -67,\n          -79,\n          -27,\n          65,\n          -49,\n          86,\n          94,\n          65,\n          -52,\n          59,\n          49,\n          65,\n          56,\n          -113,\n          -53,\n          65,\n          81,\n          -118,\n          105,\n          65,\n          84,\n          -43,\n          -51,\n          65,\n          104,\n          -118,\n          114,\n          65,\n          118,\n          120,\n          99,\n          65,\n          120,\n          -111,\n          -115,\n          65,\n          -125,\n          76,\n          -24,\n          65,\n          -124,\n          49,\n          -23,\n          65,\n          -128,\n          41,\n          11,\n          65,\n          -116,\n          -1,\n          -54,\n          65,\n          -121,\n          46,\n          -111,\n          65,\n          -120,\n          83,\n          64,\n          65,\n          -100,\n          -69,\n          118,\n          65,\n          -89,\n          -21,\n          -32,\n          65,\n          -91,\n          114,\n          67,\n          65,\n          -98,\n          -110,\n          80,\n          65,\n          -85,\n          114,\n          26,\n          65,\n          -80,\n          124,\n          -92,\n          65,\n          -73,\n          13,\n          -61,\n          65,\n          -66,\n          -19,\n          45,\n          65,\n          -46,\n          44,\n          -71,\n          65,\n          93,\n          66,\n          -69,\n          65,\n          72,\n          1,\n          -35,\n          65,\n          89,\n          -70,\n          -89,\n          65,\n          103,\n          103,\n          -61,\n          65,\n          -124,\n          104,\n          119,\n          65,\n          -122,\n          126,\n          117,\n          65,\n          -119,\n          63,\n          -19,\n          65,\n          -126,\n          -37,\n          -48,\n          65,\n          -107,\n          -19,\n          54,\n          65,\n          -112,\n          79,\n          23,\n          65,\n          -95,\n          92,\n          -71,\n          65,\n          -96,\n          -11,\n          52,\n          65,\n          -83,\n          -52,\n          -30,\n          65,\n          -93,\n          -6,\n          -44,\n          65,\n          -86,\n          98,\n          42,\n          65,\n          -87,\n          88,\n          -69,\n          65,\n          -75,\n          85,\n          80,\n          65,\n          -73,\n          -117,\n          123,\n          65,\n          -62,\n          52,\n          -41,\n          65,\n          -106,\n          -50,\n          -60,\n          65,\n          -95,\n          35,\n          54,\n          65,\n          -94,\n          114,\n          -48,\n          65,\n          -88,\n          123,\n          -56,\n          65,\n          -76,\n          99,\n          78,\n          65,\n          -75,\n          2,\n          73,\n          65,\n          -68,\n          -16,\n          -81,\n          65,\n          -61,\n          -8,\n          85,\n          65,\n          -83,\n          -13,\n          -65,\n          65,\n          -88,\n          37,\n          125,\n          65,\n          -69,\n          102,\n          -76,\n          65,\n          -92,\n          85,\n          -122,\n          65,\n          -79,\n          -48,\n          27,\n          65,\n          -86,\n          87,\n          -29,\n          65,\n          -73,\n          -77,\n          81,\n          65,\n          -81,\n          -109,\n          -114,\n          65,\n          -84,\n          13,\n          14\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 158,\n        \"leftIndex\": [\n          0,\n          1,\n          158,\n          935292543,\n          129701864,\n          99169107,\n          694292968,\n          51947520,\n          24\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          158,\n          239335033,\n          335250430,\n          107838969,\n          794916240,\n          604405761,\n          37\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -3523200327259297085,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          160,\n          32253529,\n          532378052,\n          280559654,\n          218004083,\n          883455014,\n          520289098,\n          149958944,\n          757105688,\n          149698447,\n          748638122,\n          300409020,\n          383952219,\n          938128256,\n          813967500,\n          784019911,\n          221536189\n        ],\n        \"cutValueData\": [\n          65,\n          -49,\n          -81,\n          124,\n          65,\n          85,\n          0,\n          -22,\n          66,\n          -113,\n          -39,\n          -114,\n          65,\n          54,\n          -81,\n          -25,\n          65,\n          -103,\n          91,\n          -79,\n          66,\n          10,\n          -77,\n          -46,\n          66,\n          -97,\n          -19,\n          101,\n          65,\n          47,\n          79,\n          -62,\n          65,\n          56,\n          88,\n          63,\n          65,\n          102,\n          46,\n          65,\n          65,\n          -81,\n          -32,\n          -21,\n          65,\n          -40,\n          -114,\n          -115,\n          66,\n          101,\n          -70,\n          77,\n          65,\n          58,\n          6,\n          -79,\n          65,\n          52,\n          98,\n          -45,\n          65,\n          76,\n          89,\n          -26,\n          65,\n          107,\n          92,\n          -101,\n          65,\n          -116,\n          -115,\n          -12,\n          65,\n          -83,\n          -112,\n          -82,\n          65,\n          -85,\n          -99,\n          46,\n          66,\n          2,\n          90,\n          39,\n          65,\n          -45,\n          112,\n          113,\n          66,\n          125,\n          -15,\n          -123,\n          65,\n          37,\n          41,\n          10,\n          65,\n          62,\n          -10,\n          61,\n          65,\n          45,\n          68,\n          107,\n          65,\n          59,\n          -35,\n          -89,\n          65,\n          77,\n          33,\n          30,\n          65,\n          90,\n          -91,\n          54,\n          65,\n          108,\n          78,\n          -47,\n          65,\n          120,\n          59,\n          -118,\n          65,\n          -94,\n          22,\n          -41,\n          65,\n          -84,\n          3,\n          94,\n          65,\n          -83,\n          67,\n          -93,\n          65,\n          -16,\n          43,\n          108,\n          65,\n          -44,\n          -115,\n          21,\n          66,\n          14,\n          -8,\n          -31,\n          66,\n          -106,\n          85,\n          1,\n          65,\n          46,\n          77,\n          14,\n          65,\n          49,\n          77,\n          75,\n          65,\n          60,\n          63,\n          41,\n          65,\n          52,\n          104,\n          -54,\n          65,\n          69,\n          -62,\n          51,\n          65,\n          68,\n          -81,\n          -112,\n          65,\n          113,\n          -113,\n          44,\n          65,\n          117,\n          51,\n          -67,\n          65,\n          -114,\n          -55,\n          -27,\n          65,\n          -111,\n          46,\n          79,\n          65,\n          -104,\n          -19,\n          -20,\n          65,\n          -94,\n          -117,\n          77,\n          65,\n          -95,\n          15,\n          -12,\n          65,\n          -90,\n          -11,\n          61,\n          65,\n          -63,\n          -61,\n          -113,\n          65,\n          -48,\n          -98,\n          -107,\n          65,\n          -24,\n          -82,\n          -98,\n          65,\n          35,\n          11,\n          67,\n          65,\n          32,\n          104,\n          -75,\n          65,\n          44,\n          -43,\n          -107,\n          65,\n          77,\n          68,\n          -62,\n          65,\n          66,\n          95,\n          -65,\n          65,\n          65,\n          -101,\n          92,\n          65,\n          101,\n          -34,\n          53,\n          65,\n          116,\n          -74,\n          123,\n          65,\n          -118,\n          -67,\n          -98,\n          65,\n          -117,\n          33,\n          -99,\n          65,\n          -115,\n          -45,\n          -106,\n          65,\n          -107,\n          125,\n          7,\n          65,\n          -95,\n          -61,\n          -54,\n          65,\n          -92,\n          -121,\n          -19,\n          65,\n          -94,\n          -26,\n          84,\n          65,\n          -71,\n          -71,\n          -83,\n          65,\n          -57,\n          114,\n          -101,\n          65,\n          -52,\n          -95,\n          37,\n          65,\n          -37,\n          -64,\n          -85,\n          65,\n          52,\n          -52,\n          -3,\n          65,\n          78,\n          -123,\n          -40,\n          65,\n          86,\n          100,\n          88,\n          65,\n          67,\n          -21,\n          127,\n          65,\n          123,\n          29,\n          -26,\n          65,\n          112,\n          -111,\n          87,\n          65,\n          -126,\n          96,\n          -29,\n          65,\n          -106,\n          33,\n          69,\n          65,\n          -106,\n          101,\n          -116,\n          65,\n          -89,\n          -104,\n          -25,\n          65,\n          -91,\n          127,\n          31,\n          65,\n          -86,\n          33,\n          -56,\n          65,\n          -67,\n          86,\n          -41,\n          65,\n          -50,\n          92,\n          85,\n          65,\n          -42,\n          -77,\n          -63,\n          65,\n          -47,\n          -102,\n          91,\n          65,\n          64,\n          20,\n          29,\n          65,\n          99,\n          -79,\n          114,\n          65,\n          -124,\n          -18,\n          88,\n          65,\n          -120,\n          107,\n          33,\n          65,\n          -105,\n          105,\n          45,\n          65,\n          -104,\n          61,\n          107,\n          65,\n          -101,\n          109,\n          -58,\n          65,\n          -89,\n          62,\n          76,\n          65,\n          -92,\n          -56,\n          -89,\n          65,\n          -77,\n          -64,\n          -110,\n          65,\n          -71,\n          50,\n          -26,\n          65,\n          -74,\n          -69,\n          -15,\n          65,\n          -57,\n          -17,\n          25,\n          65,\n          -51,\n          -122,\n          88,\n          65,\n          -53,\n          57,\n          77,\n          65,\n          -41,\n          95,\n          -121,\n          65,\n          -43,\n          29,\n          -51,\n          65,\n          -40,\n          60,\n          75,\n          65,\n          -122,\n          42,\n          -10,\n          65,\n          -125,\n          -56,\n          -119,\n          65,\n          -107,\n          46,\n          -111,\n          65,\n          -110,\n          -109,\n          -105,\n          65,\n          -97,\n          -114,\n          -76,\n          65,\n          -93,\n          -37,\n          -112,\n          65,\n          -89,\n          -27,\n          18,\n          65,\n          -90,\n          70,\n          -39,\n          65,\n          -98,\n          24,\n          -104,\n          65,\n          -77,\n          89,\n          -38,\n          65,\n          -60,\n          -28,\n          103,\n          65,\n          -70,\n          -31,\n          -107,\n          65,\n          -58,\n          103,\n          -37,\n          65,\n          -50,\n          20,\n          -38,\n          65,\n          -62,\n          39,\n          -1,\n          65,\n          -61,\n          46,\n          -113,\n          65,\n          -53,\n          30,\n          109,\n          65,\n          -49,\n          108,\n          16,\n          65,\n          -48,\n          125,\n          35,\n          65,\n          -46,\n          32,\n          -114,\n          65,\n          -127,\n          -81,\n          40,\n          65,\n          122,\n          -59,\n          113,\n          65,\n          -121,\n          81,\n          -117,\n          65,\n          -127,\n          -59,\n          56,\n          65,\n          -110,\n          -18,\n          -28,\n          65,\n          -101,\n          -10,\n          -110,\n          65,\n          -79,\n          -50,\n          -103,\n          65,\n          -76,\n          56,\n          -108,\n          65,\n          -59,\n          123,\n          33,\n          65,\n          -59,\n          -52,\n          114,\n          65,\n          -62,\n          33,\n          -27,\n          65,\n          -50,\n          45,\n          -82,\n          65,\n          -56,\n          -23,\n          121,\n          65,\n          -124,\n          62,\n          63,\n          65,\n          -117,\n          -99,\n          -68,\n          65,\n          -127,\n          -102,\n          -71,\n          65,\n          -101,\n          -5,\n          96,\n          65,\n          -99,\n          85,\n          -11,\n          65,\n          -81,\n          -104,\n          6,\n          65,\n          -78,\n          -106,\n          28,\n          65,\n          -72,\n          10,\n          -91,\n          65,\n          -51,\n          -115,\n          -102,\n          65,\n          -54,\n          -45,\n          103,\n          65,\n          -50,\n          -68,\n          124,\n          65,\n          117,\n          -99,\n          90,\n          65,\n          -120,\n          78,\n          32,\n          65,\n          -102,\n          -48,\n          -75,\n          65,\n          -90,\n          57,\n          -17,\n          65,\n          -87,\n          -46,\n          -55,\n          65,\n          -73,\n          -70,\n          -49,\n          65,\n          -73,\n          12,\n          110,\n          65,\n          -74,\n          18,\n          -21\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 160,\n        \"leftIndex\": [\n          0,\n          1,\n          160,\n          622325439,\n          835441983,\n          530851213,\n          143588724,\n          377809234,\n          64\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          160,\n          257941439,\n          542847275,\n          1055431784,\n          137188580,\n          103573537,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -303810271618516607,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          154,\n          797383228,\n          977551878,\n          879546020,\n          852272575,\n          829811334,\n          115498508,\n          169937353,\n          538066278,\n          822076399,\n          520380110,\n          270561155,\n          815792955,\n          893599489,\n          352063743,\n          706034139,\n          131\n        ],\n        \"cutValueData\": [\n          65,\n          125,\n          99,\n          120,\n          65,\n          111,\n          24,\n          94,\n          66,\n          59,\n          23,\n          64,\n          65,\n          77,\n          95,\n          95,\n          65,\n          107,\n          64,\n          -15,\n          65,\n          -39,\n          35,\n          -63,\n          66,\n          -122,\n          -76,\n          -95,\n          65,\n          64,\n          9,\n          -114,\n          65,\n          89,\n          -84,\n          26,\n          65,\n          104,\n          103,\n          -94,\n          65,\n          127,\n          -115,\n          25,\n          65,\n          -64,\n          68,\n          59,\n          65,\n          39,\n          64,\n          42,\n          65,\n          88,\n          96,\n          61,\n          65,\n          87,\n          -23,\n          27,\n          65,\n          99,\n          11,\n          5,\n          65,\n          93,\n          -110,\n          -120,\n          65,\n          111,\n          60,\n          -61,\n          65,\n          122,\n          46,\n          -31,\n          65,\n          -78,\n          20,\n          -37,\n          65,\n          -50,\n          -90,\n          -47,\n          65,\n          38,\n          93,\n          97,\n          65,\n          56,\n          -37,\n          -25,\n          65,\n          75,\n          -81,\n          -31,\n          65,\n          93,\n          54,\n          -69,\n          65,\n          107,\n          -17,\n          -57,\n          65,\n          -92,\n          91,\n          -85,\n          65,\n          -63,\n          5,\n          110,\n          65,\n          -60,\n          -89,\n          -72,\n          65,\n          -52,\n          71,\n          -30,\n          65,\n          38,\n          46,\n          -84,\n          65,\n          51,\n          113,\n          -119,\n          65,\n          60,\n          62,\n          -77,\n          65,\n          64,\n          25,\n          113,\n          65,\n          85,\n          59,\n          -110,\n          65,\n          76,\n          100,\n          -111,\n          65,\n          104,\n          117,\n          -117,\n          65,\n          -111,\n          77,\n          -81,\n          65,\n          -85,\n          126,\n          20,\n          65,\n          -68,\n          57,\n          116,\n          65,\n          -49,\n          93,\n          -123,\n          65,\n          -51,\n          -67,\n          112,\n          65,\n          -50,\n          -115,\n          4,\n          65,\n          -43,\n          97,\n          -64,\n          65,\n          34,\n          -124,\n          -73,\n          65,\n          63,\n          32,\n          0,\n          65,\n          60,\n          104,\n          104,\n          65,\n          70,\n          -86,\n          47,\n          65,\n          74,\n          89,\n          91,\n          65,\n          80,\n          100,\n          -80,\n          65,\n          97,\n          34,\n          50,\n          65,\n          -113,\n          75,\n          91,\n          65,\n          -93,\n          -122,\n          70,\n          65,\n          -96,\n          -90,\n          -64,\n          65,\n          -87,\n          67,\n          -45,\n          65,\n          -72,\n          56,\n          -58,\n          65,\n          -85,\n          57,\n          14,\n          65,\n          -54,\n          -126,\n          51,\n          65,\n          -60,\n          38,\n          90,\n          65,\n          -33,\n          33,\n          -50,\n          65,\n          -44,\n          -26,\n          -48,\n          65,\n          33,\n          -39,\n          118,\n          65,\n          33,\n          -56,\n          -39,\n          65,\n          63,\n          -75,\n          -35,\n          65,\n          75,\n          -35,\n          -26,\n          65,\n          75,\n          -128,\n          7,\n          65,\n          83,\n          -99,\n          51,\n          65,\n          -124,\n          -26,\n          18,\n          65,\n          -120,\n          109,\n          111,\n          65,\n          -98,\n          32,\n          -45,\n          65,\n          -100,\n          -125,\n          -1,\n          65,\n          -91,\n          -69,\n          92,\n          65,\n          -83,\n          -72,\n          82,\n          65,\n          -96,\n          88,\n          -61,\n          65,\n          -74,\n          -87,\n          98,\n          65,\n          -73,\n          78,\n          96,\n          65,\n          -67,\n          56,\n          -41,\n          65,\n          -53,\n          28,\n          -74,\n          65,\n          -47,\n          120,\n          24,\n          65,\n          -43,\n          -123,\n          -74,\n          65,\n          -34,\n          -66,\n          106,\n          65,\n          44,\n          -28,\n          13,\n          65,\n          52,\n          -111,\n          91,\n          65,\n          54,\n          -71,\n          63,\n          65,\n          63,\n          12,\n          4,\n          65,\n          -115,\n          24,\n          51,\n          65,\n          -115,\n          66,\n          -5,\n          65,\n          -120,\n          -67,\n          -18,\n          65,\n          -105,\n          -46,\n          39,\n          65,\n          -94,\n          -2,\n          -32,\n          65,\n          -86,\n          107,\n          -59,\n          65,\n          -92,\n          -47,\n          -68,\n          65,\n          -82,\n          37,\n          107,\n          65,\n          -81,\n          16,\n          109,\n          65,\n          -75,\n          4,\n          -100,\n          65,\n          -82,\n          -65,\n          -16,\n          65,\n          -79,\n          -63,\n          27,\n          65,\n          -76,\n          -46,\n          72,\n          65,\n          -47,\n          77,\n          -38,\n          65,\n          -44,\n          8,\n          86,\n          65,\n          -46,\n          49,\n          -63,\n          65,\n          -38,\n          -61,\n          -57,\n          65,\n          57,\n          -109,\n          122,\n          65,\n          -128,\n          96,\n          92,\n          65,\n          -114,\n          -55,\n          -115,\n          65,\n          -118,\n          69,\n          -9,\n          65,\n          -121,\n          -96,\n          -69,\n          65,\n          -107,\n          -118,\n          -76,\n          65,\n          -101,\n          74,\n          -79,\n          65,\n          -108,\n          56,\n          109,\n          65,\n          -100,\n          5,\n          84,\n          65,\n          -106,\n          98,\n          -48,\n          65,\n          -94,\n          -101,\n          -20,\n          65,\n          -86,\n          -118,\n          41,\n          65,\n          -89,\n          99,\n          -54,\n          65,\n          -73,\n          -84,\n          -70,\n          65,\n          -77,\n          -62,\n          71,\n          65,\n          -73,\n          -46,\n          -127,\n          65,\n          -78,\n          48,\n          -11,\n          65,\n          -59,\n          81,\n          -73,\n          65,\n          -52,\n          -127,\n          90,\n          65,\n          -52,\n          72,\n          -49,\n          65,\n          -41,\n          -87,\n          -48,\n          65,\n          -39,\n          -24,\n          109,\n          65,\n          -35,\n          38,\n          7,\n          65,\n          -47,\n          14,\n          -117,\n          65,\n          53,\n          -9,\n          -58,\n          65,\n          122,\n          -69,\n          29,\n          65,\n          -126,\n          92,\n          -101,\n          65,\n          -121,\n          12,\n          53,\n          65,\n          -126,\n          -80,\n          -82,\n          65,\n          -108,\n          15,\n          10,\n          65,\n          -116,\n          58,\n          -109,\n          65,\n          -110,\n          -54,\n          112,\n          65,\n          -104,\n          -97,\n          71,\n          65,\n          -100,\n          -39,\n          27,\n          65,\n          -95,\n          21,\n          -7,\n          65,\n          -103,\n          49,\n          -117,\n          65,\n          -82,\n          -23,\n          51,\n          65,\n          -75,\n          -47,\n          -86,\n          65,\n          -65,\n          62,\n          -115,\n          65,\n          118,\n          123,\n          7,\n          65,\n          119,\n          3,\n          -45,\n          65,\n          -124,\n          43,\n          125,\n          65,\n          -123,\n          41,\n          112,\n          65,\n          -120,\n          39,\n          -31,\n          65,\n          -119,\n          -95,\n          -21,\n          65,\n          -114,\n          40,\n          -19,\n          65,\n          -108,\n          -77,\n          53,\n          65,\n          -110,\n          73,\n          107,\n          65,\n          -99,\n          103,\n          -38,\n          65,\n          -65,\n          -9,\n          -57,\n          65,\n          -121,\n          51,\n          -11,\n          65,\n          -106,\n          27,\n          75\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 154,\n        \"leftIndex\": [\n          0,\n          1,\n          154,\n          494460863,\n          602208188,\n          1041617553,\n          571911425,\n          16919168,\n          0\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          154,\n          1056480159,\n          367354828,\n          1049457559,\n          70630381,\n          68186496,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -4385096218651858391,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          156,\n          582016427,\n          534283153,\n          939413062,\n          215229185,\n          925060108,\n          527564300,\n          1012455218,\n          152895228,\n          101168792,\n          593820895,\n          973107374,\n          839604246,\n          271380495,\n          600365836,\n          132329831,\n          241223\n        ],\n        \"cutValueData\": [\n          66,\n          32,\n          34,\n          -2,\n          65,\n          -106,\n          -100,\n          -91,\n          66,\n          53,\n          72,\n          -13,\n          65,\n          122,\n          70,\n          16,\n          65,\n          -98,\n          45,\n          58,\n          66,\n          51,\n          26,\n          47,\n          65,\n          81,\n          1,\n          -6,\n          65,\n          -113,\n          2,\n          9,\n          65,\n          -117,\n          -105,\n          -62,\n          65,\n          -63,\n          127,\n          -84,\n          66,\n          102,\n          -111,\n          -73,\n          65,\n          74,\n          -89,\n          -106,\n          65,\n          107,\n          72,\n          -118,\n          65,\n          -128,\n          83,\n          25,\n          65,\n          -114,\n          -70,\n          -9,\n          65,\n          -103,\n          61,\n          -26,\n          65,\n          -98,\n          -1,\n          121,\n          66,\n          5,\n          67,\n          121,\n          66,\n          -88,\n          -31,\n          -122,\n          65,\n          66,\n          75,\n          49,\n          65,\n          84,\n          21,\n          6,\n          65,\n          96,\n          -101,\n          110,\n          65,\n          119,\n          6,\n          -119,\n          65,\n          -105,\n          -58,\n          -75,\n          65,\n          -119,\n          80,\n          -62,\n          65,\n          -113,\n          -38,\n          -10,\n          65,\n          -108,\n          55,\n          -57,\n          65,\n          -109,\n          -100,\n          53,\n          65,\n          -108,\n          13,\n          -50,\n          65,\n          -68,\n          -17,\n          10,\n          65,\n          -39,\n          50,\n          63,\n          65,\n          -16,\n          -75,\n          48,\n          66,\n          125,\n          65,\n          78,\n          65,\n          71,\n          -65,\n          -122,\n          65,\n          94,\n          -72,\n          -39,\n          65,\n          78,\n          -11,\n          -89,\n          65,\n          89,\n          -32,\n          -63,\n          65,\n          86,\n          64,\n          105,\n          65,\n          102,\n          -24,\n          125,\n          65,\n          -126,\n          52,\n          81,\n          65,\n          -125,\n          23,\n          -31,\n          65,\n          -115,\n          -65,\n          -99,\n          65,\n          -112,\n          -31,\n          12,\n          65,\n          -105,\n          -109,\n          -116,\n          65,\n          -109,\n          33,\n          98,\n          65,\n          -97,\n          29,\n          52,\n          65,\n          -72,\n          -93,\n          79,\n          65,\n          -65,\n          62,\n          -23,\n          65,\n          -12,\n          -122,\n          9,\n          66,\n          33,\n          -7,\n          100,\n          66,\n          -104,\n          93,\n          112,\n          65,\n          59,\n          -28,\n          -65,\n          65,\n          81,\n          98,\n          -110,\n          65,\n          68,\n          35,\n          -112,\n          65,\n          127,\n          -18,\n          -39,\n          65,\n          114,\n          -1,\n          45,\n          65,\n          113,\n          -106,\n          -67,\n          65,\n          -125,\n          -108,\n          15,\n          65,\n          -128,\n          -102,\n          115,\n          65,\n          -117,\n          -101,\n          57,\n          65,\n          -120,\n          31,\n          -118,\n          65,\n          -119,\n          95,\n          -56,\n          65,\n          -112,\n          88,\n          -112,\n          65,\n          -97,\n          82,\n          -90,\n          65,\n          -80,\n          -92,\n          -24,\n          65,\n          -79,\n          -78,\n          20,\n          65,\n          -58,\n          12,\n          20,\n          65,\n          -64,\n          -60,\n          -99,\n          66,\n          -115,\n          -85,\n          112,\n          65,\n          32,\n          18,\n          -24,\n          65,\n          77,\n          69,\n          97,\n          65,\n          69,\n          -70,\n          -89,\n          65,\n          110,\n          36,\n          78,\n          65,\n          108,\n          5,\n          -8,\n          65,\n          118,\n          37,\n          122,\n          65,\n          126,\n          52,\n          23,\n          65,\n          -120,\n          122,\n          88,\n          65,\n          -116,\n          -10,\n          47,\n          65,\n          -110,\n          83,\n          -62,\n          65,\n          -112,\n          -92,\n          -55,\n          65,\n          -127,\n          -10,\n          127,\n          65,\n          -77,\n          -65,\n          93,\n          65,\n          -71,\n          122,\n          108,\n          65,\n          -58,\n          -12,\n          108,\n          65,\n          -59,\n          79,\n          -7,\n          65,\n          -54,\n          -89,\n          106,\n          65,\n          43,\n          -5,\n          17,\n          65,\n          32,\n          -87,\n          124,\n          65,\n          72,\n          -111,\n          123,\n          65,\n          -124,\n          -23,\n          38,\n          65,\n          -114,\n          16,\n          94,\n          65,\n          -108,\n          -122,\n          -2,\n          65,\n          -110,\n          -27,\n          -116,\n          65,\n          -87,\n          8,\n          55,\n          65,\n          -65,\n          -35,\n          13,\n          65,\n          -64,\n          23,\n          54,\n          65,\n          -49,\n          60,\n          -61,\n          65,\n          -42,\n          -111,\n          43,\n          65,\n          34,\n          16,\n          -43,\n          65,\n          59,\n          22,\n          -34,\n          65,\n          62,\n          30,\n          -70,\n          65,\n          67,\n          -70,\n          -34,\n          65,\n          -106,\n          9,\n          103,\n          65,\n          -88,\n          -65,\n          9,\n          65,\n          -78,\n          60,\n          51,\n          65,\n          -77,\n          21,\n          -119,\n          65,\n          -61,\n          85,\n          -59,\n          65,\n          -55,\n          100,\n          -101,\n          65,\n          -56,\n          -9,\n          -122,\n          65,\n          -47,\n          -22,\n          15,\n          65,\n          -38,\n          35,\n          20,\n          65,\n          43,\n          -116,\n          -2,\n          65,\n          62,\n          126,\n          -6,\n          65,\n          57,\n          111,\n          123,\n          65,\n          -91,\n          -5,\n          -6,\n          65,\n          -89,\n          -57,\n          -42,\n          65,\n          -88,\n          -76,\n          62,\n          65,\n          -84,\n          -107,\n          -83,\n          65,\n          -80,\n          -127,\n          -48,\n          65,\n          -77,\n          -59,\n          70,\n          65,\n          -61,\n          75,\n          12,\n          65,\n          -46,\n          24,\n          61,\n          65,\n          -55,\n          5,\n          123,\n          65,\n          -52,\n          76,\n          13,\n          65,\n          -47,\n          83,\n          119,\n          65,\n          -41,\n          16,\n          13,\n          65,\n          63,\n          108,\n          -116,\n          65,\n          -93,\n          3,\n          124,\n          65,\n          -95,\n          94,\n          -47,\n          65,\n          -75,\n          117,\n          83,\n          65,\n          -85,\n          22,\n          -89,\n          65,\n          -79,\n          -48,\n          -96,\n          65,\n          -77,\n          -74,\n          43,\n          65,\n          -53,\n          -37,\n          20,\n          65,\n          -46,\n          -20,\n          50,\n          65,\n          -46,\n          82,\n          66,\n          65,\n          57,\n          120,\n          -76,\n          65,\n          -91,\n          88,\n          -123,\n          65,\n          -93,\n          85,\n          -81,\n          65,\n          -91,\n          83,\n          35,\n          65,\n          -94,\n          63,\n          -28,\n          65,\n          -87,\n          -113,\n          -68,\n          65,\n          -87,\n          56,\n          38,\n          65,\n          -79,\n          42,\n          -91,\n          65,\n          -50,\n          12,\n          -1,\n          65,\n          -47,\n          124,\n          113,\n          65,\n          -96,\n          -118,\n          -26,\n          65,\n          -99,\n          80,\n          -124,\n          65,\n          -86,\n          -74,\n          -124,\n          65,\n          -87,\n          19,\n          110,\n          65,\n          -59,\n          55,\n          95,\n          65,\n          -99,\n          105,\n          6,\n          65,\n          -85,\n          10,\n          -76,\n          65,\n          -87,\n          23,\n          70,\n          65,\n          -104,\n          6,\n          -80,\n          65,\n          -96,\n          -14,\n          6\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 156,\n        \"leftIndex\": [\n          0,\n          1,\n          156,\n          637467355,\n          41414169,\n          313132744,\n          94038010,\n          612773842,\n          18\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          156,\n          729022463,\n          522390806,\n          505709214,\n          253550792,\n          554189200,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2218594253454401235,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          152,\n          828103086,\n          892908737,\n          587253797,\n          120578392,\n          1053440845,\n          989099550,\n          950733798,\n          990502692,\n          834503110,\n          653899539,\n          654314506,\n          762310831,\n          681661110,\n          117946417,\n          473889786,\n          6\n        ],\n        \"cutValueData\": [\n          66,\n          -121,\n          47,\n          -89,\n          66,\n          36,\n          -7,\n          113,\n          66,\n          -88,\n          20,\n          -109,\n          65,\n          -71,\n          17,\n          -122,\n          66,\n          106,\n          -106,\n          70,\n          66,\n          -112,\n          1,\n          118,\n          65,\n          -89,\n          -42,\n          -5,\n          65,\n          -49,\n          16,\n          33,\n          66,\n          -123,\n          -36,\n          -29,\n          65,\n          117,\n          88,\n          18,\n          65,\n          -108,\n          105,\n          85,\n          65,\n          -60,\n          -80,\n          75,\n          65,\n          -4,\n          -27,\n          -90,\n          66,\n          -110,\n          -111,\n          92,\n          65,\n          52,\n          88,\n          -53,\n          65,\n          -123,\n          -71,\n          52,\n          65,\n          -86,\n          -38,\n          -9,\n          65,\n          -70,\n          -66,\n          -92,\n          65,\n          -64,\n          -38,\n          44,\n          65,\n          -63,\n          -93,\n          109,\n          65,\n          -43,\n          102,\n          3,\n          65,\n          50,\n          -33,\n          -77,\n          65,\n          111,\n          81,\n          -72,\n          65,\n          118,\n          -128,\n          120,\n          65,\n          -94,\n          -80,\n          -9,\n          65,\n          -76,\n          0,\n          -45,\n          65,\n          -68,\n          -27,\n          66,\n          65,\n          -85,\n          26,\n          -113,\n          65,\n          -64,\n          49,\n          127,\n          65,\n          -53,\n          -95,\n          -28,\n          65,\n          -41,\n          15,\n          -9,\n          65,\n          -34,\n          50,\n          -23,\n          65,\n          41,\n          15,\n          67,\n          65,\n          44,\n          -8,\n          -58,\n          65,\n          104,\n          59,\n          -45,\n          65,\n          96,\n          56,\n          -36,\n          65,\n          127,\n          55,\n          -28,\n          65,\n          -105,\n          -84,\n          -39,\n          65,\n          -78,\n          55,\n          -58,\n          65,\n          -67,\n          127,\n          1,\n          65,\n          -72,\n          -88,\n          59,\n          65,\n          -61,\n          -105,\n          101,\n          65,\n          -57,\n          125,\n          -71,\n          65,\n          -52,\n          -20,\n          101,\n          65,\n          -55,\n          -44,\n          43,\n          65,\n          -49,\n          -77,\n          -68,\n          65,\n          -48,\n          14,\n          -7,\n          65,\n          -36,\n          -90,\n          110,\n          65,\n          -33,\n          38,\n          24,\n          65,\n          43,\n          68,\n          -48,\n          65,\n          56,\n          -107,\n          35,\n          65,\n          91,\n          -27,\n          -17,\n          65,\n          126,\n          -33,\n          -27,\n          65,\n          113,\n          -21,\n          -35,\n          65,\n          -128,\n          58,\n          46,\n          65,\n          -108,\n          49,\n          -128,\n          65,\n          -105,\n          31,\n          -115,\n          65,\n          -94,\n          82,\n          14,\n          65,\n          -74,\n          -17,\n          125,\n          65,\n          -85,\n          39,\n          -59,\n          65,\n          -78,\n          -91,\n          45,\n          65,\n          -63,\n          -19,\n          -77,\n          65,\n          -55,\n          -98,\n          79,\n          65,\n          -55,\n          -36,\n          69,\n          65,\n          -40,\n          61,\n          78,\n          65,\n          -41,\n          118,\n          13,\n          65,\n          54,\n          82,\n          0,\n          65,\n          48,\n          57,\n          -28,\n          65,\n          84,\n          -115,\n          -1,\n          65,\n          92,\n          124,\n          -41,\n          65,\n          98,\n          -66,\n          -100,\n          65,\n          127,\n          -28,\n          -16,\n          65,\n          -126,\n          -30,\n          61,\n          65,\n          -102,\n          116,\n          20,\n          65,\n          -120,\n          67,\n          -49,\n          65,\n          -110,\n          49,\n          80,\n          65,\n          -97,\n          -9,\n          -52,\n          65,\n          -92,\n          -29,\n          50,\n          65,\n          -73,\n          53,\n          72,\n          65,\n          -68,\n          8,\n          27,\n          65,\n          -76,\n          -27,\n          108,\n          65,\n          -55,\n          -118,\n          49,\n          65,\n          -52,\n          -102,\n          3,\n          65,\n          -43,\n          -12,\n          36,\n          65,\n          -46,\n          -72,\n          120,\n          65,\n          40,\n          -12,\n          0,\n          65,\n          51,\n          110,\n          -41,\n          65,\n          82,\n          -13,\n          71,\n          65,\n          86,\n          -116,\n          -56,\n          65,\n          -125,\n          -67,\n          -122,\n          65,\n          -115,\n          -39,\n          31,\n          65,\n          -106,\n          -33,\n          121,\n          65,\n          -109,\n          17,\n          89,\n          65,\n          -104,\n          -30,\n          -51,\n          65,\n          -89,\n          -74,\n          -71,\n          65,\n          -90,\n          -68,\n          109,\n          65,\n          -81,\n          -56,\n          -61,\n          65,\n          -68,\n          -71,\n          -83,\n          65,\n          -65,\n          112,\n          70,\n          65,\n          -76,\n          31,\n          2,\n          65,\n          -48,\n          -75,\n          111,\n          65,\n          -38,\n          -9,\n          95,\n          65,\n          33,\n          79,\n          114,\n          65,\n          64,\n          113,\n          68,\n          65,\n          126,\n          -68,\n          40,\n          65,\n          -117,\n          6,\n          103,\n          65,\n          -115,\n          -95,\n          0,\n          65,\n          -106,\n          -83,\n          108,\n          65,\n          -93,\n          -115,\n          -29,\n          65,\n          -93,\n          -19,\n          70,\n          65,\n          -81,\n          -32,\n          -7,\n          65,\n          -85,\n          126,\n          112,\n          65,\n          -87,\n          3,\n          102,\n          65,\n          -88,\n          127,\n          126,\n          65,\n          -44,\n          44,\n          118,\n          65,\n          50,\n          56,\n          -79,\n          65,\n          73,\n          -87,\n          6,\n          65,\n          79,\n          -17,\n          -80,\n          65,\n          -120,\n          -43,\n          -13,\n          65,\n          -121,\n          -108,\n          -117,\n          65,\n          -120,\n          29,\n          3,\n          65,\n          -110,\n          -120,\n          41,\n          65,\n          -90,\n          21,\n          125,\n          65,\n          -98,\n          42,\n          -55,\n          65,\n          -96,\n          -39,\n          40,\n          65,\n          -84,\n          -126,\n          72,\n          65,\n          -73,\n          65,\n          -28,\n          65,\n          -78,\n          70,\n          -10,\n          65,\n          62,\n          114,\n          -69,\n          65,\n          77,\n          -99,\n          17,\n          65,\n          67,\n          93,\n          -94,\n          65,\n          -125,\n          29,\n          -13,\n          65,\n          -122,\n          -51,\n          -13,\n          65,\n          -119,\n          116,\n          57,\n          65,\n          -111,\n          -36,\n          -41,\n          65,\n          -105,\n          -126,\n          -87,\n          65,\n          -93,\n          61,\n          -58,\n          65,\n          -85,\n          -102,\n          -16,\n          65,\n          79,\n          55,\n          -125,\n          65,\n          50,\n          -58,\n          -84,\n          65,\n          66,\n          109,\n          -39,\n          65,\n          72,\n          96,\n          3,\n          65,\n          -126,\n          -72,\n          -112,\n          65,\n          -113,\n          -90,\n          61,\n          65,\n          -114,\n          -8,\n          105,\n          65,\n          -97,\n          -119,\n          119,\n          65,\n          -97,\n          -124,\n          63,\n          65,\n          79,\n          -6,\n          -2,\n          65,\n          -109,\n          -95,\n          -55,\n          65,\n          -98,\n          -111,\n          112,\n          65,\n          -102,\n          -52,\n          -26,\n          65,\n          -96,\n          8,\n          19\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 152,\n        \"leftIndex\": [\n          0,\n          1,\n          152,\n          595517135,\n          376581047,\n          18330468,\n          472688889,\n          33899589,\n          1\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          152,\n          1056755691,\n          1068598247,\n          655065640,\n          415740001,\n          50409219,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 7575133640203251999,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          157,\n          866410190,\n          601875750,\n          36443360,\n          255686071,\n          469648838,\n          403040790,\n          790453781,\n          885203761,\n          484456532,\n          301957886,\n          52112197,\n          285212204,\n          401115601,\n          61099873,\n          1009364995,\n          719756\n        ],\n        \"cutValueData\": [\n          66,\n          -126,\n          -111,\n          29,\n          65,\n          -98,\n          -29,\n          -106,\n          66,\n          125,\n          -79,\n          85,\n          65,\n          53,\n          -71,\n          96,\n          66,\n          1,\n          -67,\n          27,\n          66,\n          -121,\n          -98,\n          -124,\n          65,\n          57,\n          11,\n          -52,\n          65,\n          -103,\n          102,\n          64,\n          65,\n          -94,\n          -120,\n          -128,\n          66,\n          70,\n          34,\n          -57,\n          66,\n          -114,\n          43,\n          121,\n          66,\n          -105,\n          53,\n          123,\n          65,\n          33,\n          84,\n          109,\n          65,\n          59,\n          35,\n          37,\n          65,\n          -101,\n          12,\n          70,\n          65,\n          -95,\n          34,\n          -70,\n          65,\n          -109,\n          97,\n          -10,\n          65,\n          -45,\n          37,\n          -92,\n          66,\n          14,\n          100,\n          3,\n          66,\n          99,\n          0,\n          -73,\n          65,\n          33,\n          29,\n          -21,\n          65,\n          53,\n          75,\n          30,\n          65,\n          33,\n          -12,\n          93,\n          65,\n          -99,\n          -21,\n          48,\n          65,\n          -93,\n          -73,\n          27,\n          65,\n          -55,\n          65,\n          -68,\n          65,\n          -48,\n          111,\n          105,\n          65,\n          -1,\n          -110,\n          50,\n          66,\n          25,\n          112,\n          47,\n          66,\n          85,\n          -80,\n          -16,\n          65,\n          46,\n          33,\n          -18,\n          65,\n          -117,\n          125,\n          15,\n          65,\n          -93,\n          -6,\n          67,\n          65,\n          -90,\n          13,\n          115,\n          65,\n          -54,\n          22,\n          55,\n          65,\n          -54,\n          -13,\n          -55,\n          65,\n          -37,\n          -105,\n          36,\n          65,\n          -35,\n          64,\n          -19,\n          66,\n          41,\n          21,\n          -63,\n          65,\n          100,\n          92,\n          76,\n          65,\n          -111,\n          12,\n          -45,\n          65,\n          -95,\n          -4,\n          -77,\n          65,\n          -102,\n          -51,\n          29,\n          65,\n          -71,\n          126,\n          -108,\n          65,\n          -50,\n          -86,\n          122,\n          65,\n          -57,\n          3,\n          -22,\n          65,\n          -60,\n          6,\n          111,\n          65,\n          -39,\n          -1,\n          -55,\n          65,\n          -35,\n          32,\n          -112,\n          65,\n          77,\n          3,\n          74,\n          65,\n          116,\n          103,\n          -86,\n          65,\n          -113,\n          119,\n          32,\n          65,\n          -108,\n          -125,\n          -68,\n          65,\n          -100,\n          17,\n          -119,\n          65,\n          -97,\n          81,\n          53,\n          65,\n          -87,\n          99,\n          -30,\n          65,\n          -61,\n          -71,\n          -9,\n          65,\n          -43,\n          -119,\n          -54,\n          65,\n          -48,\n          10,\n          14,\n          65,\n          -37,\n          -62,\n          -84,\n          65,\n          64,\n          -96,\n          -62,\n          65,\n          89,\n          -94,\n          -30,\n          65,\n          112,\n          27,\n          -6,\n          65,\n          -115,\n          -69,\n          -16,\n          65,\n          -116,\n          -98,\n          77,\n          65,\n          -111,\n          81,\n          -45,\n          65,\n          -104,\n          125,\n          -49,\n          65,\n          -102,\n          91,\n          12,\n          65,\n          -87,\n          64,\n          -73,\n          65,\n          -67,\n          -119,\n          -93,\n          65,\n          -70,\n          -81,\n          -98,\n          65,\n          -62,\n          49,\n          -80,\n          65,\n          -48,\n          26,\n          66,\n          65,\n          -41,\n          -49,\n          75,\n          65,\n          -44,\n          86,\n          73,\n          65,\n          -36,\n          -90,\n          -100,\n          65,\n          61,\n          70,\n          -99,\n          65,\n          71,\n          -104,\n          55,\n          65,\n          98,\n          51,\n          -89,\n          65,\n          107,\n          62,\n          -114,\n          65,\n          121,\n          57,\n          42,\n          65,\n          123,\n          -109,\n          -84,\n          65,\n          -106,\n          66,\n          90,\n          65,\n          -105,\n          -29,\n          13,\n          65,\n          -109,\n          99,\n          -2,\n          65,\n          -83,\n          38,\n          45,\n          65,\n          -94,\n          -6,\n          61,\n          65,\n          -80,\n          -84,\n          19,\n          65,\n          -72,\n          -40,\n          -91,\n          65,\n          -63,\n          0,\n          105,\n          65,\n          -51,\n          8,\n          -125,\n          65,\n          -54,\n          -94,\n          -59,\n          65,\n          -41,\n          -114,\n          -56,\n          65,\n          55,\n          114,\n          -104,\n          65,\n          74,\n          -5,\n          -92,\n          65,\n          67,\n          92,\n          124,\n          65,\n          104,\n          -92,\n          46,\n          65,\n          83,\n          106,\n          78,\n          65,\n          114,\n          -40,\n          -97,\n          65,\n          -124,\n          -44,\n          -8,\n          65,\n          -114,\n          9,\n          -125,\n          65,\n          -112,\n          -41,\n          55,\n          65,\n          -110,\n          117,\n          -65,\n          65,\n          -89,\n          32,\n          -32,\n          65,\n          -83,\n          104,\n          -103,\n          65,\n          -88,\n          -97,\n          -99,\n          65,\n          -78,\n          -64,\n          119,\n          65,\n          -88,\n          27,\n          -44,\n          65,\n          -67,\n          43,\n          -41,\n          65,\n          -57,\n          -24,\n          57,\n          65,\n          -59,\n          -125,\n          5,\n          65,\n          -54,\n          -46,\n          107,\n          65,\n          -36,\n          -79,\n          -114,\n          65,\n          73,\n          -64,\n          -42,\n          65,\n          67,\n          85,\n          124,\n          65,\n          69,\n          89,\n          122,\n          65,\n          89,\n          -125,\n          58,\n          65,\n          72,\n          99,\n          -17,\n          65,\n          83,\n          -71,\n          -36,\n          65,\n          117,\n          125,\n          53,\n          65,\n          -126,\n          72,\n          -54,\n          65,\n          -126,\n          -57,\n          22,\n          65,\n          -117,\n          111,\n          25,\n          65,\n          -95,\n          62,\n          99,\n          65,\n          -89,\n          -124,\n          117,\n          65,\n          -88,\n          -14,\n          -49,\n          65,\n          -75,\n          -120,\n          -115,\n          65,\n          -67,\n          3,\n          24,\n          65,\n          -72,\n          -126,\n          -121,\n          65,\n          59,\n          -43,\n          23,\n          65,\n          78,\n          125,\n          83,\n          65,\n          90,\n          -24,\n          61,\n          65,\n          99,\n          104,\n          -73,\n          65,\n          -125,\n          4,\n          -51,\n          65,\n          -121,\n          -104,\n          88,\n          65,\n          -84,\n          52,\n          15,\n          65,\n          -75,\n          15,\n          -118,\n          65,\n          -63,\n          -30,\n          56,\n          65,\n          48,\n          -26,\n          111,\n          65,\n          77,\n          112,\n          9,\n          65,\n          93,\n          107,\n          68,\n          65,\n          103,\n          103,\n          -50,\n          65,\n          -127,\n          -25,\n          -8,\n          65,\n          120,\n          98,\n          123,\n          65,\n          -125,\n          88,\n          16,\n          65,\n          -120,\n          -99,\n          70,\n          65,\n          -88,\n          18,\n          -125,\n          65,\n          -73,\n          81,\n          -14,\n          65,\n          -76,\n          -64,\n          35,\n          65,\n          121,\n          -56,\n          -31,\n          65,\n          -127,\n          -66,\n          -50,\n          65,\n          -114,\n          91,\n          49,\n          65,\n          -81,\n          -40,\n          104,\n          65,\n          -75,\n          -1,\n          89,\n          65,\n          -127,\n          -56,\n          0,\n          65,\n          -81,\n          -21,\n          93,\n          65,\n          -119,\n          -74,\n          18\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 157,\n        \"leftIndex\": [\n          0,\n          1,\n          157,\n          387372027,\n          1048194750,\n          442317679,\n          344551001,\n          436333250,\n          4\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          157,\n          122627071,\n          242953906,\n          1051927846,\n          134357356,\n          12643586,\n          18\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2303288239725547002,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          161,\n          226961351,\n          929827151,\n          44570955,\n          640526152,\n          880438976,\n          935694327,\n          100924351,\n          988904923,\n          14434166,\n          620923090,\n          192472920,\n          27135989,\n          77176154,\n          722999568,\n          873898353,\n          960685176,\n          6\n        ],\n        \"cutValueData\": [\n          66,\n          -119,\n          61,\n          33,\n          66,\n          31,\n          36,\n          -95,\n          66,\n          -113,\n          52,\n          40,\n          66,\n          25,\n          61,\n          62,\n          66,\n          103,\n          -42,\n          86,\n          66,\n          -87,\n          -25,\n          31,\n          66,\n          2,\n          -22,\n          52,\n          66,\n          40,\n          -37,\n          -57,\n          66,\n          80,\n          -6,\n          -25,\n          65,\n          -58,\n          125,\n          -34,\n          65,\n          -91,\n          -94,\n          -17,\n          65,\n          -36,\n          91,\n          -12,\n          65,\n          66,\n          -76,\n          90,\n          65,\n          -84,\n          -76,\n          39,\n          65,\n          -56,\n          -34,\n          -42,\n          65,\n          -38,\n          70,\n          39,\n          65,\n          75,\n          -3,\n          106,\n          65,\n          87,\n          89,\n          -60,\n          65,\n          -87,\n          -126,\n          -30,\n          65,\n          -70,\n          -1,\n          -74,\n          65,\n          -55,\n          -101,\n          -84,\n          65,\n          -43,\n          -39,\n          127,\n          65,\n          52,\n          87,\n          89,\n          65,\n          76,\n          39,\n          111,\n          65,\n          88,\n          106,\n          -88,\n          65,\n          95,\n          -13,\n          -88,\n          65,\n          -92,\n          67,\n          2,\n          65,\n          -85,\n          42,\n          -74,\n          65,\n          -72,\n          -54,\n          -66,\n          65,\n          -76,\n          34,\n          14,\n          65,\n          -62,\n          -108,\n          -5,\n          65,\n          -50,\n          -72,\n          -41,\n          65,\n          -54,\n          -76,\n          -40,\n          65,\n          -40,\n          -69,\n          119,\n          65,\n          42,\n          79,\n          72,\n          65,\n          48,\n          -15,\n          -27,\n          65,\n          78,\n          48,\n          60,\n          65,\n          85,\n          -71,\n          -105,\n          65,\n          -109,\n          46,\n          72,\n          65,\n          -91,\n          -107,\n          8,\n          65,\n          -88,\n          46,\n          15,\n          65,\n          -78,\n          86,\n          -124,\n          65,\n          -69,\n          -48,\n          111,\n          65,\n          -49,\n          38,\n          84,\n          65,\n          -49,\n          -38,\n          -13,\n          65,\n          -59,\n          -57,\n          -38,\n          65,\n          -35,\n          16,\n          -80,\n          65,\n          -41,\n          108,\n          -49,\n          65,\n          35,\n          -47,\n          79,\n          65,\n          62,\n          -63,\n          -81,\n          65,\n          70,\n          -70,\n          -88,\n          65,\n          73,\n          -59,\n          -93,\n          65,\n          -106,\n          100,\n          -113,\n          65,\n          -110,\n          -122,\n          -126,\n          65,\n          -102,\n          -123,\n          -47,\n          65,\n          -95,\n          -49,\n          -7,\n          65,\n          -85,\n          1,\n          -120,\n          65,\n          -76,\n          -27,\n          117,\n          65,\n          -72,\n          36,\n          -96,\n          65,\n          -51,\n          100,\n          57,\n          65,\n          -66,\n          -73,\n          -74,\n          65,\n          -54,\n          -95,\n          -60,\n          65,\n          -47,\n          -46,\n          18,\n          65,\n          -47,\n          -39,\n          -5,\n          65,\n          38,\n          56,\n          -109,\n          65,\n          49,\n          -28,\n          -60,\n          65,\n          48,\n          -90,\n          54,\n          65,\n          55,\n          22,\n          -124,\n          65,\n          68,\n          95,\n          -41,\n          65,\n          71,\n          83,\n          50,\n          65,\n          -116,\n          106,\n          -72,\n          65,\n          -99,\n          -40,\n          18,\n          65,\n          -97,\n          94,\n          -83,\n          65,\n          -94,\n          -31,\n          -15,\n          65,\n          -73,\n          73,\n          73,\n          65,\n          -74,\n          55,\n          23,\n          65,\n          -77,\n          124,\n          124,\n          65,\n          -80,\n          76,\n          -28,\n          65,\n          -67,\n          -31,\n          -56,\n          65,\n          -61,\n          -86,\n          -5,\n          65,\n          -50,\n          -92,\n          13,\n          65,\n          -36,\n          -59,\n          -31,\n          65,\n          39,\n          54,\n          119,\n          65,\n          32,\n          -33,\n          92,\n          65,\n          55,\n          78,\n          27,\n          65,\n          79,\n          116,\n          -4,\n          65,\n          125,\n          -61,\n          16,\n          65,\n          -113,\n          43,\n          30,\n          65,\n          -102,\n          15,\n          6,\n          65,\n          -113,\n          -112,\n          109,\n          65,\n          -107,\n          64,\n          106,\n          65,\n          -86,\n          -5,\n          -111,\n          65,\n          -83,\n          -97,\n          -76,\n          65,\n          -80,\n          60,\n          126,\n          65,\n          -67,\n          -65,\n          64,\n          65,\n          -79,\n          -25,\n          -120,\n          65,\n          -64,\n          46,\n          -17,\n          65,\n          -47,\n          -86,\n          55,\n          65,\n          111,\n          83,\n          87,\n          65,\n          -118,\n          55,\n          115,\n          65,\n          -111,\n          -104,\n          65,\n          65,\n          -106,\n          113,\n          62,\n          65,\n          -105,\n          10,\n          60,\n          65,\n          -100,\n          -34,\n          -95,\n          65,\n          -103,\n          -86,\n          95,\n          65,\n          -93,\n          -90,\n          -41,\n          65,\n          -75,\n          60,\n          -60,\n          65,\n          -79,\n          110,\n          -58,\n          65,\n          -74,\n          -123,\n          -3,\n          65,\n          -68,\n          -32,\n          -83,\n          65,\n          -64,\n          -24,\n          53,\n          65,\n          84,\n          55,\n          46,\n          65,\n          103,\n          -6,\n          -47,\n          65,\n          -117,\n          -46,\n          2,\n          65,\n          -124,\n          -95,\n          88,\n          65,\n          -114,\n          -114,\n          57,\n          65,\n          -115,\n          41,\n          -6,\n          65,\n          -106,\n          -50,\n          105,\n          65,\n          -100,\n          -82,\n          -35,\n          65,\n          -91,\n          11,\n          40,\n          65,\n          -100,\n          -11,\n          68,\n          65,\n          -95,\n          -100,\n          -66,\n          65,\n          -85,\n          -123,\n          9,\n          65,\n          -57,\n          25,\n          -35,\n          65,\n          102,\n          73,\n          15,\n          65,\n          108,\n          108,\n          60,\n          65,\n          104,\n          59,\n          -92,\n          65,\n          122,\n          113,\n          -111,\n          65,\n          -124,\n          65,\n          84,\n          65,\n          -121,\n          -33,\n          -40,\n          65,\n          -111,\n          125,\n          -81,\n          65,\n          -100,\n          -54,\n          -45,\n          65,\n          -97,\n          80,\n          -96,\n          65,\n          -102,\n          19,\n          60,\n          65,\n          -101,\n          2,\n          -46,\n          65,\n          -88,\n          45,\n          109,\n          65,\n          -83,\n          20,\n          48,\n          65,\n          97,\n          -47,\n          -29,\n          65,\n          103,\n          -104,\n          -68,\n          65,\n          124,\n          123,\n          -65,\n          65,\n          123,\n          37,\n          -12,\n          65,\n          -127,\n          55,\n          54,\n          65,\n          -118,\n          3,\n          -5,\n          65,\n          -116,\n          -28,\n          91,\n          65,\n          -109,\n          -97,\n          96,\n          65,\n          -98,\n          -20,\n          -89,\n          65,\n          -98,\n          -62,\n          96,\n          65,\n          -101,\n          -70,\n          -50,\n          65,\n          -90,\n          -106,\n          -68,\n          65,\n          -94,\n          -14,\n          2,\n          65,\n          -86,\n          107,\n          -57,\n          65,\n          -125,\n          114,\n          -23,\n          65,\n          -125,\n          -86,\n          -86,\n          65,\n          -120,\n          26,\n          -51,\n          65,\n          -104,\n          98,\n          -111,\n          65,\n          -90,\n          -29,\n          -70,\n          65,\n          -121,\n          54,\n          57,\n          65,\n          -124,\n          -85,\n          71,\n          65,\n          -121,\n          -7,\n          -85,\n          65,\n          -124,\n          2,\n          81,\n          65,\n          -125,\n          37,\n          -49\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 161,\n        \"leftIndex\": [\n          0,\n          1,\n          161,\n          394231387,\n          215853849,\n          738319430,\n          552710009,\n          68199825,\n          76\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          161,\n          914325007,\n          498510709,\n          739595480,\n          350268169,\n          271701895,\n          516\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7052110192596428648,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          158,\n          678555297,\n          808367964,\n          149986988,\n          605099107,\n          871364235,\n          351107547,\n          135858030,\n          1061060801,\n          26888826,\n          283728926,\n          265670822,\n          403489305,\n          807399008,\n          484439018,\n          50363814,\n          8680769\n        ],\n        \"cutValueData\": [\n          66,\n          90,\n          -8,\n          -20,\n          65,\n          -11,\n          -46,\n          -6,\n          66,\n          -124,\n          81,\n          25,\n          65,\n          -53,\n          88,\n          -88,\n          66,\n          81,\n          -111,\n          91,\n          66,\n          114,\n          11,\n          -7,\n          66,\n          -98,\n          84,\n          -99,\n          65,\n          -112,\n          88,\n          91,\n          65,\n          -62,\n          35,\n          111,\n          66,\n          61,\n          -76,\n          94,\n          66,\n          -103,\n          76,\n          -115,\n          65,\n          42,\n          -2,\n          26,\n          65,\n          -56,\n          -76,\n          58,\n          65,\n          -53,\n          68,\n          3,\n          65,\n          -41,\n          -92,\n          -100,\n          66,\n          45,\n          59,\n          -5,\n          66,\n          -111,\n          42,\n          -84,\n          65,\n          46,\n          -66,\n          -79,\n          65,\n          86,\n          -16,\n          25,\n          65,\n          -87,\n          -47,\n          -23,\n          65,\n          -51,\n          -36,\n          22,\n          65,\n          -41,\n          -111,\n          125,\n          65,\n          42,\n          -47,\n          116,\n          65,\n          65,\n          -63,\n          -123,\n          65,\n          106,\n          -46,\n          3,\n          65,\n          -101,\n          -91,\n          53,\n          65,\n          -73,\n          108,\n          -52,\n          65,\n          -41,\n          -99,\n          122,\n          65,\n          -38,\n          -11,\n          -34,\n          65,\n          56,\n          -85,\n          -117,\n          65,\n          94,\n          -16,\n          -19,\n          65,\n          81,\n          -42,\n          -113,\n          65,\n          -114,\n          -34,\n          -6,\n          65,\n          -104,\n          -109,\n          25,\n          65,\n          -83,\n          4,\n          -100,\n          65,\n          -78,\n          8,\n          -76,\n          65,\n          -67,\n          53,\n          -3,\n          65,\n          -53,\n          86,\n          98,\n          65,\n          51,\n          120,\n          76,\n          65,\n          63,\n          -114,\n          94,\n          65,\n          78,\n          -99,\n          3,\n          65,\n          90,\n          94,\n          0,\n          65,\n          92,\n          -117,\n          34,\n          65,\n          106,\n          -41,\n          39,\n          65,\n          125,\n          16,\n          70,\n          65,\n          -98,\n          89,\n          122,\n          65,\n          -101,\n          -120,\n          -112,\n          65,\n          -91,\n          -72,\n          52,\n          65,\n          -100,\n          69,\n          -9,\n          65,\n          -89,\n          -73,\n          -68,\n          65,\n          -81,\n          47,\n          125,\n          65,\n          -76,\n          -43,\n          105,\n          65,\n          -66,\n          -85,\n          -75,\n          65,\n          -50,\n          47,\n          -24,\n          65,\n          32,\n          1,\n          30,\n          65,\n          55,\n          -47,\n          44,\n          65,\n          58,\n          -32,\n          -37,\n          65,\n          57,\n          82,\n          120,\n          65,\n          65,\n          116,\n          -99,\n          65,\n          78,\n          -101,\n          114,\n          65,\n          73,\n          -85,\n          58,\n          65,\n          89,\n          -94,\n          -104,\n          65,\n          124,\n          5,\n          -78,\n          65,\n          104,\n          30,\n          117,\n          65,\n          -116,\n          -71,\n          -80,\n          65,\n          -114,\n          -116,\n          84,\n          65,\n          -104,\n          -18,\n          -29,\n          65,\n          -101,\n          -6,\n          54,\n          65,\n          -74,\n          -55,\n          13,\n          65,\n          -99,\n          14,\n          -20,\n          65,\n          -79,\n          35,\n          122,\n          65,\n          -66,\n          40,\n          -25,\n          65,\n          -72,\n          -15,\n          -45,\n          65,\n          54,\n          -100,\n          5,\n          65,\n          46,\n          35,\n          43,\n          65,\n          42,\n          -32,\n          60,\n          65,\n          32,\n          0,\n          78,\n          65,\n          70,\n          -128,\n          -71,\n          65,\n          95,\n          -104,\n          78,\n          65,\n          108,\n          -119,\n          -28,\n          65,\n          111,\n          -124,\n          118,\n          65,\n          96,\n          54,\n          -108,\n          65,\n          112,\n          97,\n          27,\n          65,\n          -115,\n          24,\n          -10,\n          65,\n          -109,\n          -6,\n          -101,\n          65,\n          -102,\n          -84,\n          2,\n          65,\n          -104,\n          -107,\n          75,\n          65,\n          -81,\n          -10,\n          47,\n          65,\n          -91,\n          -66,\n          126,\n          65,\n          -88,\n          13,\n          23,\n          65,\n          -77,\n          23,\n          -71,\n          65,\n          -68,\n          119,\n          107,\n          65,\n          -78,\n          -34,\n          -125,\n          65,\n          -66,\n          -28,\n          100,\n          65,\n          35,\n          -84,\n          113,\n          65,\n          34,\n          -42,\n          103,\n          65,\n          68,\n          -63,\n          25,\n          65,\n          82,\n          54,\n          -23,\n          65,\n          -124,\n          -14,\n          16,\n          65,\n          112,\n          61,\n          -114,\n          65,\n          -122,\n          50,\n          -55,\n          65,\n          -118,\n          -70,\n          108,\n          65,\n          -111,\n          29,\n          -127,\n          65,\n          -103,\n          -4,\n          -8,\n          65,\n          -95,\n          -37,\n          -47,\n          65,\n          -85,\n          -36,\n          92,\n          65,\n          -84,\n          69,\n          104,\n          65,\n          -80,\n          124,\n          -123,\n          65,\n          -76,\n          -48,\n          -94,\n          65,\n          -71,\n          126,\n          -11,\n          65,\n          -69,\n          86,\n          -93,\n          65,\n          73,\n          7,\n          12,\n          65,\n          107,\n          8,\n          115,\n          65,\n          114,\n          -48,\n          -17,\n          65,\n          119,\n          -122,\n          -63,\n          65,\n          -123,\n          32,\n          -17,\n          65,\n          -113,\n          43,\n          -95,\n          65,\n          -110,\n          98,\n          -122,\n          65,\n          -96,\n          127,\n          -99,\n          65,\n          -90,\n          -18,\n          81,\n          65,\n          -89,\n          115,\n          64,\n          65,\n          -85,\n          41,\n          92,\n          65,\n          -82,\n          -22,\n          -121,\n          65,\n          -85,\n          -110,\n          -106,\n          65,\n          -80,\n          -71,\n          -30,\n          65,\n          -78,\n          -102,\n          -69,\n          65,\n          -80,\n          -68,\n          42,\n          65,\n          -57,\n          -50,\n          76,\n          65,\n          126,\n          -55,\n          -72,\n          65,\n          120,\n          98,\n          -96,\n          65,\n          -125,\n          -93,\n          -24,\n          65,\n          -120,\n          70,\n          119,\n          65,\n          -116,\n          99,\n          94,\n          65,\n          -108,\n          -24,\n          46,\n          65,\n          -90,\n          -47,\n          -59,\n          65,\n          -104,\n          -95,\n          -22,\n          65,\n          -84,\n          36,\n          -92,\n          65,\n          -60,\n          -120,\n          -37,\n          65,\n          -61,\n          -33,\n          -59,\n          65,\n          -127,\n          -68,\n          -95,\n          65,\n          -119,\n          -94,\n          -68,\n          65,\n          -119,\n          -37,\n          -39,\n          65,\n          -119,\n          77,\n          0,\n          65,\n          -105,\n          -124,\n          24,\n          65,\n          -117,\n          -45,\n          -21,\n          65,\n          -82,\n          -7,\n          -34,\n          65,\n          -85,\n          105,\n          -106,\n          65,\n          -72,\n          -88,\n          -3,\n          65,\n          -58,\n          -41,\n          62,\n          65,\n          -56,\n          -52,\n          95,\n          65,\n          -125,\n          -4,\n          101,\n          65,\n          -113,\n          97,\n          55,\n          65,\n          -106,\n          -40,\n          -14,\n          65,\n          -91,\n          -82,\n          53,\n          65,\n          -70,\n          -58,\n          -11,\n          65,\n          -69,\n          97,\n          -42,\n          65,\n          -93,\n          21,\n          86,\n          65,\n          -64,\n          -50,\n          121\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 158,\n        \"leftIndex\": [\n          0,\n          1,\n          158,\n          797728735,\n          97646463,\n          682130776,\n          512709890,\n          311629952,\n          8\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          158,\n          666782095,\n          756836223,\n          904010474,\n          974997192,\n          143047808,\n          32\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -7613200544347028649,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          159,\n          493603855,\n          999138685,\n          908948587,\n          195652832,\n          572209594,\n          88854975,\n          123080265,\n          670799892,\n          653949563,\n          502984317,\n          119402491,\n          704925700,\n          966292116,\n          568532303,\n          939131048,\n          85199327\n        ],\n        \"cutValueData\": [\n          66,\n          55,\n          -23,\n          -17,\n          65,\n          -62,\n          -124,\n          12,\n          66,\n          -126,\n          19,\n          -61,\n          65,\n          -92,\n          -117,\n          -88,\n          66,\n          5,\n          -117,\n          113,\n          66,\n          118,\n          -88,\n          69,\n          66,\n          -103,\n          67,\n          -38,\n          65,\n          95,\n          -25,\n          -31,\n          65,\n          -56,\n          120,\n          -95,\n          65,\n          -27,\n          -27,\n          99,\n          66,\n          38,\n          -59,\n          2,\n          66,\n          -120,\n          -47,\n          84,\n          65,\n          64,\n          -2,\n          91,\n          65,\n          -113,\n          -85,\n          35,\n          65,\n          -83,\n          -32,\n          77,\n          65,\n          -49,\n          116,\n          82,\n          65,\n          41,\n          54,\n          -25,\n          65,\n          94,\n          123,\n          -15,\n          65,\n          103,\n          78,\n          91,\n          65,\n          -125,\n          -101,\n          -92,\n          65,\n          -83,\n          120,\n          77,\n          65,\n          -58,\n          99,\n          -41,\n          65,\n          -52,\n          41,\n          68,\n          65,\n          -38,\n          -50,\n          -64,\n          65,\n          40,\n          -82,\n          -126,\n          65,\n          60,\n          87,\n          -105,\n          65,\n          64,\n          -75,\n          27,\n          65,\n          91,\n          58,\n          -86,\n          65,\n          -104,\n          -121,\n          -17,\n          65,\n          -118,\n          -58,\n          98,\n          65,\n          -92,\n          -80,\n          -33,\n          65,\n          -73,\n          1,\n          125,\n          65,\n          -58,\n          -63,\n          -79,\n          65,\n          -64,\n          -78,\n          -71,\n          65,\n          -60,\n          26,\n          -72,\n          65,\n          -53,\n          -121,\n          -75,\n          65,\n          35,\n          -3,\n          -99,\n          65,\n          34,\n          18,\n          -126,\n          65,\n          54,\n          -28,\n          -121,\n          65,\n          62,\n          -6,\n          101,\n          65,\n          71,\n          9,\n          -39,\n          65,\n          100,\n          -76,\n          104,\n          65,\n          90,\n          -29,\n          -116,\n          65,\n          112,\n          48,\n          -21,\n          65,\n          -98,\n          -67,\n          76,\n          65,\n          -87,\n          -60,\n          13,\n          65,\n          -83,\n          110,\n          21,\n          65,\n          -73,\n          -93,\n          -127,\n          65,\n          -75,\n          -126,\n          -108,\n          65,\n          -63,\n          -33,\n          -116,\n          65,\n          -57,\n          62,\n          -49,\n          65,\n          -46,\n          40,\n          114,\n          65,\n          -54,\n          21,\n          41,\n          65,\n          35,\n          97,\n          -18,\n          65,\n          61,\n          100,\n          54,\n          65,\n          64,\n          78,\n          21,\n          65,\n          67,\n          -24,\n          -15,\n          65,\n          86,\n          17,\n          -14,\n          65,\n          96,\n          -64,\n          60,\n          65,\n          -125,\n          -90,\n          -107,\n          65,\n          -127,\n          38,\n          107,\n          65,\n          -99,\n          29,\n          11,\n          65,\n          -104,\n          16,\n          -35,\n          65,\n          -86,\n          83,\n          65,\n          65,\n          -83,\n          -128,\n          51,\n          65,\n          -86,\n          -29,\n          -80,\n          65,\n          -81,\n          4,\n          82,\n          65,\n          -76,\n          -127,\n          -80,\n          65,\n          -69,\n          5,\n          -54,\n          65,\n          -63,\n          102,\n          102,\n          65,\n          -49,\n          13,\n          62,\n          65,\n          -44,\n          -77,\n          -66,\n          65,\n          55,\n          72,\n          54,\n          65,\n          60,\n          -124,\n          76,\n          65,\n          79,\n          -40,\n          -101,\n          65,\n          88,\n          -111,\n          15,\n          65,\n          109,\n          78,\n          13,\n          65,\n          111,\n          -94,\n          -2,\n          65,\n          -118,\n          113,\n          -100,\n          65,\n          -122,\n          -123,\n          9,\n          65,\n          -109,\n          65,\n          103,\n          65,\n          -106,\n          21,\n          -29,\n          65,\n          -90,\n          -16,\n          -41,\n          65,\n          -94,\n          -7,\n          -127,\n          65,\n          -93,\n          -127,\n          -26,\n          65,\n          -88,\n          -70,\n          -87,\n          65,\n          -74,\n          123,\n          72,\n          65,\n          -86,\n          -123,\n          -18,\n          65,\n          -67,\n          113,\n          24,\n          65,\n          -41,\n          29,\n          -92,\n          65,\n          -43,\n          11,\n          10,\n          65,\n          52,\n          -2,\n          -20,\n          65,\n          34,\n          28,\n          74,\n          65,\n          54,\n          -11,\n          -27,\n          65,\n          70,\n          86,\n          -36,\n          65,\n          81,\n          -79,\n          -80,\n          65,\n          102,\n          99,\n          -104,\n          65,\n          119,\n          66,\n          -58,\n          65,\n          121,\n          94,\n          -17,\n          65,\n          -127,\n          -101,\n          -93,\n          65,\n          -126,\n          93,\n          49,\n          65,\n          -123,\n          27,\n          103,\n          65,\n          -114,\n          -116,\n          96,\n          65,\n          -102,\n          104,\n          124,\n          65,\n          -104,\n          -123,\n          -33,\n          65,\n          -95,\n          19,\n          97,\n          65,\n          -80,\n          11,\n          113,\n          65,\n          -68,\n          -89,\n          -5,\n          65,\n          -47,\n          -99,\n          89,\n          65,\n          -38,\n          36,\n          -25,\n          65,\n          34,\n          -84,\n          34,\n          65,\n          79,\n          -74,\n          -25,\n          65,\n          67,\n          -20,\n          -21,\n          65,\n          73,\n          118,\n          -42,\n          65,\n          103,\n          -93,\n          -86,\n          65,\n          107,\n          -63,\n          80,\n          65,\n          98,\n          117,\n          74,\n          65,\n          122,\n          98,\n          42,\n          65,\n          -125,\n          -111,\n          55,\n          65,\n          -122,\n          -51,\n          -22,\n          65,\n          -114,\n          78,\n          -111,\n          65,\n          -105,\n          47,\n          116,\n          65,\n          -108,\n          -7,\n          -80,\n          65,\n          -111,\n          -116,\n          62,\n          65,\n          -107,\n          -114,\n          8,\n          65,\n          -94,\n          -13,\n          42,\n          65,\n          -97,\n          119,\n          33,\n          65,\n          -91,\n          91,\n          25,\n          65,\n          -66,\n          -26,\n          66,\n          65,\n          -57,\n          63,\n          40,\n          65,\n          -67,\n          -31,\n          -124,\n          65,\n          -43,\n          -64,\n          -17,\n          65,\n          35,\n          107,\n          -108,\n          65,\n          60,\n          -75,\n          78,\n          65,\n          115,\n          33,\n          78,\n          65,\n          117,\n          78,\n          119,\n          65,\n          -121,\n          118,\n          48,\n          65,\n          -110,\n          70,\n          -80,\n          65,\n          -108,\n          18,\n          52,\n          65,\n          -105,\n          18,\n          -101,\n          65,\n          -106,\n          11,\n          40,\n          65,\n          -97,\n          -101,\n          -80,\n          65,\n          -92,\n          -56,\n          -91,\n          65,\n          -86,\n          -42,\n          -19,\n          65,\n          -83,\n          -32,\n          -90,\n          65,\n          -67,\n          -2,\n          46,\n          65,\n          -63,\n          -106,\n          37,\n          65,\n          -52,\n          101,\n          -121,\n          65,\n          47,\n          -14,\n          -35,\n          65,\n          -125,\n          20,\n          -19,\n          65,\n          -115,\n          78,\n          -79,\n          65,\n          -113,\n          59,\n          118,\n          65,\n          -112,\n          117,\n          -77,\n          65,\n          -103,\n          3,\n          -18,\n          65,\n          -53,\n          -101,\n          118,\n          65,\n          33,\n          -52,\n          8,\n          65,\n          -126,\n          16,\n          -66,\n          65,\n          -115,\n          120,\n          -73,\n          65,\n          -105,\n          86,\n          77\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 159,\n        \"leftIndex\": [\n          0,\n          1,\n          159,\n          469234591,\n          1028121447,\n          811530955,\n          2616522,\n          395907,\n          1\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          159,\n          795734207,\n          21460523,\n          274630743,\n          169015721,\n          940676317,\n          4\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 3272495365777409116,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          170,\n          237220975,\n          619446502,\n          1060895789,\n          576375097,\n          906341353,\n          603266710,\n          977175568,\n          764181511,\n          200040320,\n          1070647016,\n          624794860,\n          679096514,\n          64451588,\n          119598281,\n          508949507,\n          1070296763,\n          159902282\n        ],\n        \"cutValueData\": [\n          66,\n          -113,\n          61,\n          113,\n          65,\n          -77,\n          51,\n          90,\n          66,\n          -107,\n          13,\n          -39,\n          65,\n          113,\n          -38,\n          32,\n          66,\n          72,\n          111,\n          70,\n          66,\n          -102,\n          7,\n          -83,\n          65,\n          115,\n          101,\n          7,\n          65,\n          -99,\n          -79,\n          -108,\n          65,\n          -54,\n          82,\n          -72,\n          66,\n          93,\n          -102,\n          113,\n          65,\n          91,\n          -59,\n          -21,\n          65,\n          116,\n          83,\n          -128,\n          65,\n          -121,\n          -124,\n          -118,\n          65,\n          -84,\n          -89,\n          -72,\n          65,\n          -117,\n          33,\n          -13,\n          66,\n          17,\n          -51,\n          -103,\n          66,\n          102,\n          -48,\n          -25,\n          66,\n          -116,\n          107,\n          -12,\n          65,\n          69,\n          -75,\n          24,\n          65,\n          93,\n          -63,\n          9,\n          65,\n          118,\n          10,\n          -49,\n          65,\n          -124,\n          -107,\n          61,\n          65,\n          -106,\n          38,\n          -106,\n          65,\n          -101,\n          -69,\n          -16,\n          65,\n          -93,\n          -45,\n          125,\n          65,\n          -55,\n          -9,\n          -79,\n          66,\n          8,\n          6,\n          -71,\n          65,\n          58,\n          4,\n          -35,\n          65,\n          69,\n          -10,\n          -3,\n          65,\n          114,\n          -120,\n          -68,\n          65,\n          122,\n          6,\n          122,\n          65,\n          -121,\n          58,\n          17,\n          65,\n          -124,\n          90,\n          122,\n          65,\n          -111,\n          -102,\n          -119,\n          65,\n          -112,\n          7,\n          -99,\n          65,\n          -97,\n          8,\n          45,\n          65,\n          -92,\n          52,\n          3,\n          65,\n          -82,\n          51,\n          56,\n          65,\n          -79,\n          -127,\n          108,\n          65,\n          -73,\n          -78,\n          -128,\n          65,\n          -39,\n          115,\n          -50,\n          66,\n          9,\n          -6,\n          75,\n          65,\n          54,\n          -87,\n          -72,\n          65,\n          53,\n          -40,\n          -47,\n          65,\n          87,\n          102,\n          -45,\n          65,\n          86,\n          -24,\n          71,\n          65,\n          90,\n          -116,\n          -128,\n          65,\n          105,\n          -13,\n          60,\n          65,\n          123,\n          35,\n          -30,\n          65,\n          -121,\n          -59,\n          -90,\n          65,\n          -123,\n          -10,\n          97,\n          65,\n          -105,\n          -88,\n          -55,\n          65,\n          -110,\n          -78,\n          -6,\n          65,\n          -94,\n          -124,\n          -120,\n          65,\n          -93,\n          -88,\n          -12,\n          65,\n          -91,\n          24,\n          -91,\n          65,\n          -89,\n          103,\n          -6,\n          65,\n          -86,\n          93,\n          -13,\n          65,\n          -62,\n          -124,\n          80,\n          65,\n          -37,\n          -32,\n          104,\n          65,\n          48,\n          -9,\n          -71,\n          65,\n          54,\n          93,\n          -122,\n          65,\n          78,\n          -60,\n          -107,\n          65,\n          69,\n          1,\n          83,\n          65,\n          81,\n          -121,\n          74,\n          65,\n          87,\n          80,\n          -68,\n          65,\n          97,\n          -103,\n          53,\n          65,\n          98,\n          -75,\n          32,\n          65,\n          98,\n          -96,\n          68,\n          65,\n          100,\n          110,\n          83,\n          65,\n          115,\n          -107,\n          -110,\n          65,\n          -114,\n          -80,\n          -56,\n          65,\n          -115,\n          -23,\n          27,\n          65,\n          -98,\n          54,\n          90,\n          65,\n          -105,\n          -54,\n          42,\n          65,\n          -100,\n          5,\n          70,\n          65,\n          -90,\n          124,\n          -38,\n          65,\n          -91,\n          -99,\n          34,\n          65,\n          -84,\n          107,\n          70,\n          65,\n          -88,\n          -72,\n          -90,\n          65,\n          -76,\n          21,\n          -122,\n          65,\n          -57,\n          -17,\n          69,\n          65,\n          -63,\n          19,\n          47,\n          65,\n          -40,\n          95,\n          77,\n          65,\n          38,\n          66,\n          -83,\n          65,\n          46,\n          -110,\n          -109,\n          65,\n          54,\n          108,\n          -119,\n          65,\n          65,\n          -60,\n          -72,\n          65,\n          68,\n          51,\n          72,\n          65,\n          72,\n          -59,\n          -121,\n          65,\n          81,\n          -36,\n          102,\n          65,\n          110,\n          120,\n          -109,\n          65,\n          110,\n          -21,\n          16,\n          65,\n          125,\n          -120,\n          67,\n          65,\n          112,\n          86,\n          -98,\n          65,\n          -128,\n          107,\n          88,\n          65,\n          -114,\n          60,\n          -61,\n          65,\n          -119,\n          -62,\n          -5,\n          65,\n          -99,\n          108,\n          -26,\n          65,\n          -94,\n          -85,\n          -87,\n          65,\n          -91,\n          77,\n          32,\n          65,\n          -89,\n          -77,\n          -125,\n          65,\n          -93,\n          47,\n          -102,\n          65,\n          -88,\n          -82,\n          -82,\n          65,\n          -96,\n          -125,\n          90,\n          65,\n          -85,\n          -27,\n          -104,\n          65,\n          -74,\n          -83,\n          -6,\n          65,\n          -76,\n          -117,\n          66,\n          65,\n          -69,\n          -11,\n          100,\n          65,\n          -59,\n          75,\n          78,\n          65,\n          -54,\n          -110,\n          103,\n          65,\n          -49,\n          -15,\n          59,\n          65,\n          39,\n          -28,\n          62,\n          65,\n          42,\n          109,\n          83,\n          65,\n          55,\n          54,\n          102,\n          65,\n          59,\n          -9,\n          -106,\n          65,\n          77,\n          -106,\n          67,\n          65,\n          65,\n          -99,\n          -53,\n          65,\n          67,\n          7,\n          -105,\n          65,\n          108,\n          -114,\n          -50,\n          65,\n          104,\n          34,\n          -51,\n          65,\n          -121,\n          109,\n          -128,\n          65,\n          -121,\n          102,\n          -104,\n          65,\n          -121,\n          107,\n          26,\n          65,\n          -117,\n          106,\n          54,\n          65,\n          -107,\n          -11,\n          -9,\n          65,\n          -119,\n          30,\n          41,\n          65,\n          -105,\n          44,\n          -54,\n          65,\n          -97,\n          102,\n          -83,\n          65,\n          -91,\n          37,\n          87,\n          65,\n          -95,\n          100,\n          -103,\n          65,\n          -94,\n          112,\n          105,\n          65,\n          -87,\n          44,\n          -86,\n          65,\n          -79,\n          -58,\n          91,\n          65,\n          -80,\n          38,\n          -24,\n          65,\n          -68,\n          74,\n          -74,\n          65,\n          -54,\n          59,\n          -42,\n          65,\n          -56,\n          42,\n          31,\n          65,\n          -58,\n          -110,\n          -115,\n          65,\n          -63,\n          69,\n          -7,\n          65,\n          -42,\n          -91,\n          44,\n          65,\n          44,\n          -78,\n          47,\n          65,\n          48,\n          -4,\n          55,\n          65,\n          73,\n          14,\n          2,\n          65,\n          83,\n          -109,\n          76,\n          65,\n          101,\n          -37,\n          -82,\n          65,\n          126,\n          31,\n          -30,\n          65,\n          -125,\n          -75,\n          -128,\n          65,\n          -113,\n          -27,\n          -94,\n          65,\n          -118,\n          -28,\n          -116,\n          65,\n          -115,\n          106,\n          -87,\n          65,\n          -100,\n          123,\n          -15,\n          65,\n          -88,\n          27,\n          -112,\n          65,\n          -73,\n          -121,\n          57,\n          65,\n          -60,\n          22,\n          -24,\n          65,\n          -71,\n          76,\n          46,\n          65,\n          -50,\n          81,\n          -101,\n          65,\n          -43,\n          95,\n          100,\n          65,\n          -41,\n          -109,\n          -83,\n          65,\n          34,\n          74,\n          49,\n          65,\n          42,\n          -28,\n          -34,\n          65,\n          -124,\n          -101,\n          82,\n          65,\n          -118,\n          -44,\n          -47,\n          65,\n          -50,\n          -17,\n          -114,\n          65,\n          -43,\n          -36,\n          55,\n          65,\n          -55,\n          -45,\n          -58,\n          65,\n          41,\n          5,\n          -72,\n          65,\n          -51,\n          58,\n          81,\n          65,\n          -44,\n          4,\n          62,\n          65,\n          -41,\n          113,\n          49\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 170,\n        \"leftIndex\": [\n          0,\n          1,\n          170,\n          1073004511,\n          999798234,\n          570299295,\n          276570850,\n          3179122,\n          33984\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          170,\n          1038907355,\n          456503898,\n          794285704,\n          741695985,\n          809029648,\n          41024\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2980812803043547139,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          169,\n          268713,\n          61687872,\n          803564534,\n          1046203934,\n          389950222,\n          431832853,\n          942191220,\n          693869819,\n          815365331,\n          668449621,\n          687014684,\n          953391403,\n          395923185,\n          708214781,\n          887950063,\n          130390598,\n          101249503\n        ],\n        \"cutValueData\": [\n          66,\n          -104,\n          -85,\n          -28,\n          66,\n          73,\n          -29,\n          115,\n          66,\n          -89,\n          -125,\n          -20,\n          65,\n          -122,\n          10,\n          36,\n          66,\n          93,\n          113,\n          -45,\n          65,\n          71,\n          -6,\n          -7,\n          66,\n          16,\n          -51,\n          74,\n          66,\n          101,\n          20,\n          -25,\n          65,\n          60,\n          53,\n          -6,\n          65,\n          122,\n          -79,\n          -72,\n          65,\n          -103,\n          2,\n          -78,\n          66,\n          23,\n          6,\n          122,\n          66,\n          96,\n          93,\n          -4,\n          65,\n          46,\n          98,\n          6,\n          65,\n          43,\n          96,\n          -77,\n          65,\n          68,\n          -60,\n          91,\n          65,\n          127,\n          -88,\n          -111,\n          65,\n          -97,\n          43,\n          -114,\n          65,\n          -63,\n          -3,\n          7,\n          65,\n          33,\n          -111,\n          -38,\n          65,\n          47,\n          -105,\n          89,\n          65,\n          61,\n          82,\n          -38,\n          65,\n          54,\n          81,\n          124,\n          65,\n          112,\n          72,\n          122,\n          65,\n          -127,\n          -1,\n          18,\n          65,\n          -125,\n          56,\n          -34,\n          65,\n          -114,\n          118,\n          48,\n          65,\n          -62,\n          -97,\n          -8,\n          65,\n          -87,\n          43,\n          121,\n          65,\n          -30,\n          -82,\n          49,\n          65,\n          36,\n          -112,\n          -8,\n          65,\n          36,\n          -37,\n          -79,\n          65,\n          33,\n          -111,\n          -124,\n          65,\n          45,\n          80,\n          -7,\n          65,\n          69,\n          -24,\n          -34,\n          65,\n          78,\n          -85,\n          18,\n          65,\n          -128,\n          10,\n          -114,\n          65,\n          112,\n          -18,\n          -24,\n          65,\n          126,\n          1,\n          59,\n          65,\n          -91,\n          -82,\n          26,\n          65,\n          -110,\n          -45,\n          45,\n          65,\n          -106,\n          -113,\n          127,\n          65,\n          -66,\n          -2,\n          82,\n          65,\n          -89,\n          -82,\n          -127,\n          65,\n          -87,\n          52,\n          -107,\n          65,\n          -44,\n          -29,\n          22,\n          65,\n          33,\n          59,\n          -79,\n          65,\n          32,\n          30,\n          54,\n          65,\n          63,\n          -21,\n          -83,\n          65,\n          52,\n          111,\n          -75,\n          65,\n          79,\n          53,\n          -60,\n          65,\n          75,\n          55,\n          111,\n          65,\n          75,\n          -85,\n          118,\n          65,\n          114,\n          -47,\n          115,\n          65,\n          112,\n          -42,\n          -96,\n          65,\n          114,\n          -71,\n          -120,\n          65,\n          100,\n          -29,\n          -29,\n          65,\n          118,\n          -11,\n          8,\n          65,\n          -121,\n          -59,\n          -37,\n          65,\n          -113,\n          47,\n          -108,\n          65,\n          -107,\n          8,\n          -74,\n          65,\n          -102,\n          -58,\n          69,\n          65,\n          -99,\n          -9,\n          23,\n          65,\n          -90,\n          -82,\n          -60,\n          65,\n          -82,\n          -77,\n          -40,\n          65,\n          -90,\n          -59,\n          79,\n          65,\n          -70,\n          125,\n          36,\n          65,\n          -57,\n          59,\n          93,\n          65,\n          -59,\n          43,\n          84,\n          65,\n          43,\n          9,\n          -111,\n          65,\n          56,\n          -19,\n          64,\n          65,\n          79,\n          68,\n          -63,\n          65,\n          66,\n          -42,\n          -2,\n          65,\n          78,\n          -78,\n          43,\n          65,\n          90,\n          -30,\n          119,\n          65,\n          -126,\n          -7,\n          -47,\n          65,\n          -116,\n          -6,\n          86,\n          65,\n          -120,\n          49,\n          6,\n          65,\n          -106,\n          -88,\n          -42,\n          65,\n          -108,\n          69,\n          69,\n          65,\n          -111,\n          43,\n          -60,\n          65,\n          -108,\n          63,\n          -76,\n          65,\n          -97,\n          32,\n          -51,\n          65,\n          -106,\n          -22,\n          -73,\n          65,\n          -103,\n          -66,\n          74,\n          65,\n          -94,\n          8,\n          71,\n          65,\n          -85,\n          -112,\n          -121,\n          65,\n          -84,\n          -113,\n          -65,\n          65,\n          -86,\n          -43,\n          -113,\n          65,\n          -67,\n          57,\n          -10,\n          65,\n          -47,\n          -114,\n          15,\n          65,\n          -42,\n          -87,\n          -105,\n          65,\n          48,\n          -63,\n          23,\n          65,\n          81,\n          13,\n          7,\n          65,\n          97,\n          -9,\n          6,\n          65,\n          114,\n          -79,\n          0,\n          65,\n          -118,\n          36,\n          -5,\n          65,\n          -108,\n          62,\n          -34,\n          65,\n          -120,\n          22,\n          95,\n          65,\n          -108,\n          -11,\n          121,\n          65,\n          -102,\n          -118,\n          -16,\n          65,\n          -101,\n          -95,\n          112,\n          65,\n          -89,\n          10,\n          49,\n          65,\n          -93,\n          -9,\n          32,\n          65,\n          -75,\n          32,\n          -53,\n          65,\n          -74,\n          -60,\n          -124,\n          65,\n          -75,\n          17,\n          126,\n          65,\n          -76,\n          81,\n          -68,\n          65,\n          -59,\n          -76,\n          74,\n          65,\n          -50,\n          -128,\n          -58,\n          65,\n          -42,\n          -75,\n          -43,\n          65,\n          -40,\n          79,\n          -91,\n          65,\n          59,\n          -102,\n          -53,\n          65,\n          80,\n          -124,\n          19,\n          65,\n          108,\n          -66,\n          58,\n          65,\n          99,\n          64,\n          107,\n          65,\n          -116,\n          -54,\n          -65,\n          65,\n          -114,\n          94,\n          6,\n          65,\n          -110,\n          47,\n          -66,\n          65,\n          -102,\n          20,\n          27,\n          65,\n          -100,\n          -96,\n          111,\n          65,\n          -89,\n          33,\n          97,\n          65,\n          -92,\n          -76,\n          -31,\n          65,\n          -90,\n          97,\n          49,\n          65,\n          -85,\n          -99,\n          -85,\n          65,\n          -77,\n          60,\n          -107,\n          65,\n          -74,\n          -73,\n          -123,\n          65,\n          -72,\n          123,\n          115,\n          65,\n          -71,\n          -109,\n          -12,\n          65,\n          -58,\n          67,\n          -76,\n          65,\n          -63,\n          -114,\n          -59,\n          65,\n          -53,\n          -54,\n          -63,\n          65,\n          -39,\n          96,\n          92,\n          65,\n          -35,\n          -46,\n          122,\n          65,\n          80,\n          -109,\n          -29,\n          65,\n          -118,\n          3,\n          -29,\n          65,\n          -103,\n          -8,\n          -61,\n          65,\n          -83,\n          78,\n          33,\n          65,\n          -83,\n          29,\n          -24,\n          65,\n          -81,\n          -60,\n          33,\n          65,\n          -75,\n          -2,\n          -106,\n          65,\n          -80,\n          -39,\n          3,\n          65,\n          -73,\n          -64,\n          -13,\n          65,\n          -58,\n          -30,\n          -85,\n          65,\n          -49,\n          -114,\n          18,\n          65,\n          -49,\n          56,\n          -109,\n          65,\n          82,\n          -80,\n          125,\n          65,\n          99,\n          -19,\n          -127,\n          65,\n          -104,\n          -94,\n          39,\n          65,\n          -98,\n          38,\n          28,\n          65,\n          -92,\n          -61,\n          104,\n          65,\n          -87,\n          74,\n          23,\n          65,\n          -80,\n          80,\n          49,\n          65,\n          -72,\n          109,\n          -17,\n          65,\n          -51,\n          100,\n          13,\n          65,\n          -55,\n          39,\n          -106,\n          65,\n          -55,\n          101,\n          75,\n          65,\n          97,\n          -3,\n          106,\n          65,\n          -94,\n          -90,\n          33,\n          65,\n          -80,\n          19,\n          -38,\n          65,\n          -65,\n          94,\n          -118,\n          65,\n          -66,\n          -53,\n          102,\n          65,\n          -63,\n          -86,\n          -98,\n          65,\n          -45,\n          -49,\n          63,\n          65,\n          -97,\n          54,\n          -61,\n          65,\n          -78,\n          -112,\n          29,\n          65,\n          -79,\n          -18,\n          32,\n          65,\n          -75,\n          5,\n          -54,\n          65,\n          -69,\n          41,\n          92\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 169,\n        \"leftIndex\": [\n          0,\n          1,\n          169,\n          1068476395,\n          817162107,\n          911524959,\n          153962771,\n          35078250,\n          68876\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          169,\n          482338683,\n          541388276,\n          847462907,\n          3095390,\n          725699928,\n          1112\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 4827200799539851007,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          168,\n          602289332,\n          304359344,\n          1052939443,\n          135491070,\n          151455303,\n          670046361,\n          328615073,\n          1031202669,\n          377003933,\n          2334910,\n          643748342,\n          283638278,\n          591300592,\n          991935926,\n          662865773,\n          412944657,\n          8432878\n        ],\n        \"cutValueData\": [\n          66,\n          33,\n          122,\n          -57,\n          65,\n          -21,\n          15,\n          -91,\n          66,\n          79,\n          32,\n          -100,\n          65,\n          45,\n          -37,\n          -113,\n          66,\n          1,\n          0,\n          118,\n          66,\n          67,\n          15,\n          86,\n          66,\n          115,\n          -56,\n          -70,\n          65,\n          60,\n          -30,\n          0,\n          65,\n          -85,\n          42,\n          103,\n          65,\n          -27,\n          -95,\n          -3,\n          66,\n          6,\n          86,\n          95,\n          66,\n          -108,\n          57,\n          -119,\n          65,\n          33,\n          -58,\n          100,\n          65,\n          78,\n          86,\n          -79,\n          65,\n          -44,\n          102,\n          13,\n          66,\n          -110,\n          -90,\n          2,\n          65,\n          47,\n          -26,\n          -59,\n          65,\n          78,\n          117,\n          -68,\n          65,\n          125,\n          -24,\n          -33,\n          65,\n          -87,\n          -22,\n          -119,\n          65,\n          -38,\n          7,\n          85,\n          65,\n          73,\n          -125,\n          -84,\n          65,\n          77,\n          105,\n          -2,\n          65,\n          100,\n          -54,\n          127,\n          65,\n          -117,\n          34,\n          27,\n          65,\n          -58,\n          70,\n          58,\n          65,\n          -76,\n          51,\n          -33,\n          65,\n          -33,\n          -16,\n          -87,\n          65,\n          43,\n          -61,\n          27,\n          65,\n          61,\n          66,\n          106,\n          65,\n          104,\n          106,\n          -105,\n          65,\n          106,\n          23,\n          37,\n          65,\n          -125,\n          -64,\n          -17,\n          65,\n          -83,\n          85,\n          26,\n          65,\n          -74,\n          26,\n          70,\n          65,\n          -74,\n          -1,\n          -36,\n          65,\n          -66,\n          -17,\n          93,\n          65,\n          -45,\n          -98,\n          -30,\n          65,\n          -46,\n          -23,\n          103,\n          65,\n          58,\n          -71,\n          -28,\n          65,\n          76,\n          104,\n          -128,\n          65,\n          92,\n          -22,\n          -125,\n          65,\n          121,\n          -63,\n          -7,\n          65,\n          126,\n          33,\n          75,\n          65,\n          -127,\n          81,\n          -81,\n          65,\n          -121,\n          -27,\n          -20,\n          65,\n          -83,\n          41,\n          0,\n          65,\n          -87,\n          -99,\n          -13,\n          65,\n          -73,\n          -4,\n          116,\n          65,\n          -70,\n          102,\n          121,\n          65,\n          -57,\n          -93,\n          -63,\n          65,\n          -42,\n          77,\n          -24,\n          65,\n          58,\n          -55,\n          -93,\n          65,\n          59,\n          87,\n          -55,\n          65,\n          95,\n          54,\n          -36,\n          65,\n          68,\n          -35,\n          2,\n          65,\n          104,\n          -99,\n          52,\n          65,\n          -124,\n          69,\n          -3,\n          65,\n          120,\n          55,\n          -28,\n          65,\n          -125,\n          18,\n          -111,\n          65,\n          118,\n          -94,\n          -96,\n          65,\n          -100,\n          32,\n          -13,\n          65,\n          -99,\n          -100,\n          -40,\n          65,\n          -82,\n          -79,\n          26,\n          65,\n          -83,\n          -79,\n          113,\n          65,\n          -79,\n          41,\n          -60,\n          65,\n          -66,\n          -90,\n          -32,\n          65,\n          -75,\n          23,\n          -102,\n          65,\n          -66,\n          -107,\n          113,\n          65,\n          -67,\n          -127,\n          -31,\n          65,\n          -59,\n          56,\n          87,\n          65,\n          -42,\n          -109,\n          80,\n          65,\n          48,\n          -18,\n          83,\n          65,\n          59,\n          126,\n          84,\n          65,\n          52,\n          -82,\n          -12,\n          65,\n          79,\n          -10,\n          -29,\n          65,\n          88,\n          -42,\n          55,\n          65,\n          80,\n          86,\n          41,\n          65,\n          106,\n          43,\n          102,\n          65,\n          113,\n          -38,\n          -51,\n          65,\n          116,\n          -126,\n          37,\n          65,\n          -121,\n          20,\n          -21,\n          65,\n          -125,\n          -65,\n          66,\n          65,\n          -119,\n          58,\n          23,\n          65,\n          -103,\n          -23,\n          24,\n          65,\n          -95,\n          -12,\n          -78,\n          65,\n          -81,\n          127,\n          84,\n          65,\n          -73,\n          13,\n          -22,\n          65,\n          -50,\n          -114,\n          -14,\n          65,\n          -61,\n          -118,\n          -9,\n          65,\n          -42,\n          36,\n          81,\n          65,\n          -44,\n          -122,\n          119,\n          65,\n          52,\n          -19,\n          -128,\n          65,\n          87,\n          98,\n          -120,\n          65,\n          75,\n          76,\n          -59,\n          65,\n          93,\n          122,\n          -87,\n          65,\n          83,\n          126,\n          98,\n          65,\n          98,\n          -124,\n          -24,\n          65,\n          -118,\n          94,\n          80,\n          65,\n          -128,\n          -113,\n          91,\n          65,\n          -113,\n          104,\n          50,\n          65,\n          -113,\n          43,\n          -97,\n          65,\n          -107,\n          34,\n          -70,\n          65,\n          -99,\n          19,\n          -32,\n          65,\n          -90,\n          74,\n          -102,\n          65,\n          -86,\n          122,\n          -22,\n          65,\n          -87,\n          76,\n          -64,\n          65,\n          -73,\n          17,\n          40,\n          65,\n          -66,\n          -57,\n          -68,\n          65,\n          -59,\n          -47,\n          90,\n          65,\n          -62,\n          -95,\n          -39,\n          65,\n          -55,\n          -84,\n          -54,\n          65,\n          108,\n          -19,\n          -23,\n          65,\n          -121,\n          -90,\n          -103,\n          65,\n          -109,\n          53,\n          127,\n          65,\n          -127,\n          13,\n          -18,\n          65,\n          -106,\n          -110,\n          75,\n          65,\n          -92,\n          -43,\n          126,\n          65,\n          -90,\n          86,\n          -59,\n          65,\n          -96,\n          4,\n          122,\n          65,\n          -86,\n          89,\n          -55,\n          65,\n          -67,\n          -26,\n          -69,\n          65,\n          -59,\n          46,\n          -68,\n          65,\n          -61,\n          104,\n          4,\n          65,\n          -53,\n          49,\n          -73,\n          65,\n          -43,\n          -93,\n          108,\n          65,\n          94,\n          111,\n          -73,\n          65,\n          96,\n          71,\n          -66,\n          65,\n          -115,\n          -63,\n          -51,\n          65,\n          -118,\n          31,\n          -115,\n          65,\n          -112,\n          71,\n          -75,\n          65,\n          -110,\n          -14,\n          70,\n          65,\n          -99,\n          -67,\n          27,\n          65,\n          -97,\n          -9,\n          -69,\n          65,\n          -89,\n          106,\n          13,\n          65,\n          -96,\n          -69,\n          -109,\n          65,\n          -70,\n          21,\n          -103,\n          65,\n          -69,\n          48,\n          -48,\n          65,\n          -56,\n          77,\n          -121,\n          65,\n          -54,\n          -97,\n          -113,\n          65,\n          85,\n          2,\n          -105,\n          65,\n          86,\n          95,\n          23,\n          65,\n          -116,\n          84,\n          23,\n          65,\n          -116,\n          -48,\n          -106,\n          65,\n          -114,\n          21,\n          49,\n          65,\n          -105,\n          -1,\n          -46,\n          65,\n          -102,\n          13,\n          -80,\n          65,\n          -100,\n          101,\n          18,\n          65,\n          -100,\n          6,\n          -115,\n          65,\n          -94,\n          3,\n          14,\n          65,\n          -67,\n          63,\n          122,\n          65,\n          -59,\n          -104,\n          -107,\n          65,\n          -49,\n          -38,\n          -94,\n          65,\n          -53,\n          -48,\n          50,\n          65,\n          -43,\n          -99,\n          -59,\n          65,\n          -118,\n          -65,\n          14,\n          65,\n          -118,\n          37,\n          64,\n          65,\n          -108,\n          -28,\n          38,\n          65,\n          -101,\n          -40,\n          97,\n          65,\n          -99,\n          -64,\n          -18,\n          65,\n          -95,\n          -30,\n          -121,\n          65,\n          -62,\n          -120,\n          14,\n          65,\n          -63,\n          -113,\n          49,\n          65,\n          -51,\n          -36,\n          -6,\n          65,\n          -53,\n          99,\n          1,\n          65,\n          -46,\n          115,\n          -28,\n          65,\n          -105,\n          -6,\n          91,\n          65,\n          -90,\n          125,\n          83\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 168,\n        \"leftIndex\": [\n          0,\n          1,\n          168,\n          268333471,\n          1071507055,\n          399188998,\n          359164161,\n          210636755,\n          526\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          168,\n          495872351,\n          582995687,\n          797085236,\n          911633473,\n          420109332,\n          1041\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5119692285027370374,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          163,\n          182614597,\n          1005896320,\n          641439601,\n          62824495,\n          573699302,\n          616799464,\n          830539293,\n          187812197,\n          994754499,\n          914424517,\n          835516816,\n          88108534,\n          1067369174,\n          24477568,\n          179175470,\n          1067010705,\n          126\n        ],\n        \"cutValueData\": [\n          65,\n          73,\n          -113,\n          59,\n          65,\n          69,\n          -64,\n          -52,\n          66,\n          -124,\n          1,\n          117,\n          65,\n          62,\n          -21,\n          -89,\n          65,\n          54,\n          118,\n          -70,\n          66,\n          78,\n          27,\n          -47,\n          66,\n          -106,\n          105,\n          -105,\n          65,\n          51,\n          109,\n          41,\n          65,\n          52,\n          -43,\n          60,\n          65,\n          69,\n          6,\n          -64,\n          65,\n          -126,\n          32,\n          -15,\n          66,\n          70,\n          64,\n          -107,\n          65,\n          47,\n          -23,\n          74,\n          65,\n          32,\n          54,\n          -30,\n          65,\n          55,\n          85,\n          21,\n          65,\n          60,\n          51,\n          56,\n          65,\n          127,\n          62,\n          -70,\n          65,\n          -45,\n          10,\n          19,\n          66,\n          120,\n          118,\n          94,\n          65,\n          40,\n          -63,\n          99,\n          65,\n          51,\n          46,\n          -121,\n          65,\n          58,\n          21,\n          -35,\n          65,\n          99,\n          91,\n          4,\n          65,\n          118,\n          20,\n          87,\n          65,\n          -81,\n          111,\n          -60,\n          66,\n          26,\n          -70,\n          37,\n          66,\n          109,\n          5,\n          92,\n          65,\n          47,\n          -25,\n          -42,\n          65,\n          41,\n          55,\n          -83,\n          65,\n          34,\n          -102,\n          -127,\n          65,\n          41,\n          -52,\n          70,\n          65,\n          84,\n          6,\n          -9,\n          65,\n          103,\n          -98,\n          -78,\n          65,\n          114,\n          59,\n          -3,\n          65,\n          -127,\n          68,\n          79,\n          65,\n          -119,\n          30,\n          5,\n          65,\n          -70,\n          92,\n          -59,\n          65,\n          -33,\n          123,\n          -120,\n          66,\n          49,\n          78,\n          -82,\n          65,\n          37,\n          -98,\n          -64,\n          65,\n          42,\n          92,\n          29,\n          65,\n          93,\n          38,\n          100,\n          65,\n          111,\n          110,\n          -121,\n          65,\n          120,\n          -72,\n          -25,\n          65,\n          125,\n          99,\n          59,\n          65,\n          -114,\n          41,\n          37,\n          65,\n          -120,\n          -101,\n          -67,\n          65,\n          -77,\n          -92,\n          -57,\n          65,\n          -51,\n          60,\n          13,\n          65,\n          -35,\n          88,\n          -69,\n          65,\n          -41,\n          31,\n          115,\n          65,\n          43,\n          -58,\n          -113,\n          65,\n          71,\n          53,\n          -49,\n          65,\n          83,\n          80,\n          127,\n          65,\n          120,\n          15,\n          -67,\n          65,\n          -126,\n          -58,\n          -99,\n          65,\n          -105,\n          33,\n          83,\n          65,\n          -85,\n          10,\n          76,\n          65,\n          -76,\n          21,\n          -87,\n          65,\n          -49,\n          -24,\n          37,\n          65,\n          -52,\n          58,\n          -104,\n          65,\n          -37,\n          -34,\n          111,\n          65,\n          -64,\n          32,\n          32,\n          65,\n          81,\n          -105,\n          -109,\n          65,\n          91,\n          -86,\n          50,\n          65,\n          120,\n          120,\n          -21,\n          65,\n          116,\n          64,\n          75,\n          65,\n          -127,\n          -83,\n          -107,\n          65,\n          -118,\n          116,\n          -98,\n          65,\n          -95,\n          -6,\n          -71,\n          65,\n          -108,\n          50,\n          -64,\n          65,\n          -87,\n          -110,\n          -24,\n          65,\n          -75,\n          69,\n          -105,\n          65,\n          -70,\n          67,\n          -10,\n          65,\n          -72,\n          79,\n          106,\n          65,\n          -52,\n          67,\n          31,\n          65,\n          -56,\n          -13,\n          -112,\n          65,\n          -43,\n          29,\n          -32,\n          65,\n          -43,\n          92,\n          59,\n          65,\n          68,\n          120,\n          -43,\n          65,\n          88,\n          -90,\n          50,\n          65,\n          104,\n          41,\n          -94,\n          65,\n          -120,\n          59,\n          -108,\n          65,\n          -125,\n          58,\n          -6,\n          65,\n          -125,\n          72,\n          124,\n          65,\n          -123,\n          -46,\n          -105,\n          65,\n          -108,\n          -97,\n          -6,\n          65,\n          -109,\n          54,\n          -124,\n          65,\n          -91,\n          41,\n          75,\n          65,\n          -93,\n          12,\n          73,\n          65,\n          -86,\n          -47,\n          13,\n          65,\n          -79,\n          -19,\n          -95,\n          65,\n          -74,\n          113,\n          -9,\n          65,\n          -68,\n          126,\n          66,\n          65,\n          -78,\n          -95,\n          34,\n          65,\n          -64,\n          121,\n          -85,\n          65,\n          -47,\n          -61,\n          107,\n          65,\n          -48,\n          85,\n          96,\n          65,\n          -47,\n          63,\n          -121,\n          65,\n          -33,\n          -16,\n          112,\n          65,\n          80,\n          0,\n          -38,\n          65,\n          82,\n          24,\n          -2,\n          65,\n          98,\n          109,\n          -118,\n          65,\n          -125,\n          -86,\n          -123,\n          65,\n          -112,\n          56,\n          122,\n          65,\n          -111,\n          93,\n          -14,\n          65,\n          -110,\n          -109,\n          14,\n          65,\n          -108,\n          117,\n          -28,\n          65,\n          -95,\n          66,\n          -30,\n          65,\n          -92,\n          48,\n          -24,\n          65,\n          -79,\n          97,\n          -65,\n          65,\n          -79,\n          -27,\n          -127,\n          65,\n          -79,\n          -52,\n          89,\n          65,\n          -60,\n          -119,\n          93,\n          65,\n          -54,\n          98,\n          88,\n          65,\n          -55,\n          105,\n          69,\n          65,\n          -37,\n          -80,\n          87,\n          65,\n          -50,\n          114,\n          16,\n          65,\n          -48,\n          86,\n          10,\n          65,\n          -48,\n          54,\n          -63,\n          65,\n          87,\n          -52,\n          88,\n          65,\n          83,\n          97,\n          59,\n          65,\n          -121,\n          67,\n          126,\n          65,\n          -116,\n          -99,\n          90,\n          65,\n          -105,\n          -33,\n          2,\n          65,\n          -102,\n          -34,\n          -35,\n          65,\n          -117,\n          -74,\n          111,\n          65,\n          -111,\n          -73,\n          56,\n          65,\n          -104,\n          -15,\n          -124,\n          65,\n          -91,\n          5,\n          -127,\n          65,\n          -85,\n          77,\n          -100,\n          65,\n          -68,\n          74,\n          -98,\n          65,\n          -57,\n          86,\n          -28,\n          65,\n          -55,\n          25,\n          48,\n          65,\n          80,\n          -80,\n          -94,\n          65,\n          -113,\n          -79,\n          83,\n          65,\n          -106,\n          -72,\n          -37,\n          65,\n          -108,\n          74,\n          -66,\n          65,\n          -101,\n          86,\n          -16,\n          65,\n          -98,\n          -38,\n          8,\n          65,\n          -83,\n          -85,\n          -63,\n          65,\n          -61,\n          88,\n          114,\n          65,\n          -55,\n          125,\n          83,\n          65,\n          -115,\n          2,\n          52,\n          65,\n          -99,\n          -108,\n          -116,\n          65,\n          -101,\n          82,\n          56,\n          65,\n          -101,\n          65,\n          -87,\n          65,\n          -91,\n          108,\n          -120,\n          65,\n          -83,\n          116,\n          55,\n          65,\n          -69,\n          78,\n          21,\n          65,\n          -62,\n          5,\n          -31,\n          65,\n          -114,\n          118,\n          -17,\n          65,\n          -102,\n          -111,\n          11,\n          65,\n          -99,\n          -86,\n          117,\n          65,\n          -98,\n          87,\n          -32,\n          65,\n          -97,\n          -22,\n          -8,\n          65,\n          -92,\n          18,\n          -101,\n          65,\n          -95,\n          47,\n          6,\n          65,\n          -51,\n          -97,\n          -69,\n          65,\n          -49,\n          -10,\n          -55,\n          65,\n          -106,\n          -17,\n          52,\n          65,\n          -107,\n          -47,\n          -15,\n          65,\n          -99,\n          -105,\n          122\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 163,\n        \"leftIndex\": [\n          0,\n          1,\n          163,\n          199169471,\n          790535410,\n          874938075,\n          17942506,\n          250415385,\n          5\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          163,\n          601570735,\n          1070022886,\n          480140497,\n          42427688,\n          236720912,\n          35\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -2774403119332030375,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          171,\n          749729686,\n          905164191,\n          894559559,\n          867085271,\n          189446049,\n          190084643,\n          502374915,\n          237662142,\n          335850525,\n          346322672,\n          6044538,\n          406751648,\n          321352130,\n          454487303,\n          114875193,\n          400523056,\n          670842561,\n          6\n        ],\n        \"cutValueData\": [\n          65,\n          -102,\n          63,\n          -74,\n          65,\n          92,\n          -2,\n          -53,\n          66,\n          59,\n          13,\n          -111,\n          65,\n          47,\n          -10,\n          51,\n          65,\n          -106,\n          14,\n          -69,\n          66,\n          9,\n          -58,\n          -128,\n          66,\n          -120,\n          53,\n          -40,\n          65,\n          36,\n          -118,\n          -7,\n          65,\n          57,\n          118,\n          120,\n          65,\n          126,\n          -91,\n          68,\n          65,\n          -98,\n          64,\n          -33,\n          65,\n          -43,\n          -91,\n          -16,\n          66,\n          20,\n          43,\n          65,\n          66,\n          48,\n          -65,\n          68,\n          66,\n          -100,\n          99,\n          -57,\n          65,\n          40,\n          -55,\n          -93,\n          65,\n          41,\n          -60,\n          -125,\n          65,\n          60,\n          -91,\n          -47,\n          65,\n          62,\n          -100,\n          91,\n          65,\n          115,\n          -73,\n          102,\n          65,\n          -125,\n          -68,\n          51,\n          65,\n          -95,\n          101,\n          113,\n          65,\n          -99,\n          -2,\n          -14,\n          65,\n          -43,\n          -19,\n          126,\n          66,\n          9,\n          6,\n          53,\n          66,\n          28,\n          28,\n          -41,\n          66,\n          81,\n          27,\n          50,\n          65,\n          48,\n          49,\n          110,\n          65,\n          63,\n          -17,\n          -98,\n          65,\n          80,\n          69,\n          3,\n          65,\n          82,\n          21,\n          118,\n          65,\n          99,\n          -15,\n          -96,\n          65,\n          117,\n          24,\n          118,\n          65,\n          -119,\n          8,\n          119,\n          65,\n          -103,\n          71,\n          -15,\n          65,\n          -99,\n          -41,\n          73,\n          65,\n          -83,\n          -75,\n          -85,\n          65,\n          -39,\n          -9,\n          -121,\n          66,\n          38,\n          10,\n          -100,\n          66,\n          -124,\n          107,\n          86,\n          65,\n          77,\n          -94,\n          73,\n          65,\n          84,\n          -111,\n          -94,\n          65,\n          91,\n          -71,\n          -87,\n          65,\n          96,\n          32,\n          -8,\n          65,\n          -125,\n          -31,\n          75,\n          65,\n          -126,\n          -85,\n          -76,\n          65,\n          -106,\n          -93,\n          -90,\n          65,\n          -119,\n          93,\n          112,\n          65,\n          -98,\n          107,\n          36,\n          65,\n          -93,\n          32,\n          -101,\n          65,\n          -112,\n          -34,\n          8,\n          65,\n          -66,\n          6,\n          -76,\n          65,\n          -36,\n          32,\n          -70,\n          65,\n          -29,\n          -97,\n          -53,\n          66,\n          112,\n          -122,\n          -45,\n          66,\n          126,\n          21,\n          59,\n          65,\n          66,\n          11,\n          -91,\n          65,\n          74,\n          -72,\n          -94,\n          65,\n          90,\n          -90,\n          101,\n          65,\n          120,\n          25,\n          83,\n          65,\n          126,\n          -70,\n          45,\n          65,\n          120,\n          94,\n          -73,\n          65,\n          -125,\n          -98,\n          32,\n          65,\n          -124,\n          86,\n          -20,\n          65,\n          -99,\n          79,\n          116,\n          65,\n          -107,\n          30,\n          -116,\n          65,\n          -99,\n          19,\n          120,\n          65,\n          -95,\n          62,\n          17,\n          65,\n          -65,\n          17,\n          49,\n          65,\n          -36,\n          -54,\n          108,\n          65,\n          75,\n          12,\n          18,\n          65,\n          57,\n          70,\n          13,\n          65,\n          69,\n          0,\n          -56,\n          65,\n          103,\n          14,\n          -110,\n          65,\n          107,\n          85,\n          -94,\n          65,\n          113,\n          2,\n          38,\n          65,\n          121,\n          -128,\n          27,\n          65,\n          -122,\n          -31,\n          -13,\n          65,\n          -114,\n          -6,\n          18,\n          65,\n          -114,\n          19,\n          71,\n          65,\n          -111,\n          -16,\n          114,\n          65,\n          -101,\n          -21,\n          87,\n          65,\n          -118,\n          96,\n          -100,\n          65,\n          -90,\n          64,\n          48,\n          65,\n          -71,\n          -118,\n          29,\n          65,\n          -59,\n          -100,\n          -100,\n          65,\n          60,\n          123,\n          70,\n          65,\n          77,\n          58,\n          -104,\n          65,\n          106,\n          92,\n          26,\n          65,\n          103,\n          -36,\n          69,\n          65,\n          -124,\n          -98,\n          -18,\n          65,\n          -116,\n          -35,\n          -81,\n          65,\n          -113,\n          -87,\n          117,\n          65,\n          -119,\n          -72,\n          85,\n          65,\n          -113,\n          -64,\n          57,\n          65,\n          -104,\n          50,\n          50,\n          65,\n          -114,\n          -5,\n          52,\n          65,\n          -110,\n          -121,\n          59,\n          65,\n          -91,\n          126,\n          99,\n          65,\n          -87,\n          -117,\n          58,\n          65,\n          -74,\n          105,\n          -56,\n          65,\n          -77,\n          -118,\n          115,\n          65,\n          -49,\n          3,\n          43,\n          65,\n          -62,\n          -4,\n          53,\n          65,\n          59,\n          -50,\n          -53,\n          65,\n          55,\n          3,\n          -7,\n          65,\n          75,\n          -67,\n          -87,\n          65,\n          -116,\n          106,\n          100,\n          65,\n          -105,\n          62,\n          104,\n          65,\n          -107,\n          -28,\n          111,\n          65,\n          -111,\n          88,\n          23,\n          65,\n          -105,\n          -90,\n          -41,\n          65,\n          -112,\n          -8,\n          -32,\n          65,\n          -110,\n          -87,\n          3,\n          65,\n          -97,\n          56,\n          -37,\n          65,\n          -92,\n          -68,\n          -43,\n          65,\n          -79,\n          -95,\n          72,\n          65,\n          -75,\n          55,\n          35,\n          65,\n          -76,\n          -127,\n          43,\n          65,\n          -72,\n          120,\n          89,\n          65,\n          -70,\n          127,\n          -97,\n          65,\n          -53,\n          17,\n          -86,\n          65,\n          -49,\n          -94,\n          85,\n          65,\n          70,\n          -22,\n          68,\n          65,\n          83,\n          -32,\n          126,\n          65,\n          -113,\n          68,\n          2,\n          65,\n          -110,\n          -8,\n          -25,\n          65,\n          -105,\n          24,\n          -95,\n          65,\n          -106,\n          -41,\n          -10,\n          65,\n          -100,\n          75,\n          -33,\n          65,\n          -103,\n          -73,\n          -115,\n          65,\n          -82,\n          59,\n          -76,\n          65,\n          -91,\n          -35,\n          -31,\n          65,\n          -85,\n          73,\n          94,\n          65,\n          -84,\n          -118,\n          9,\n          65,\n          -69,\n          46,\n          79,\n          65,\n          -80,\n          5,\n          108,\n          65,\n          -72,\n          90,\n          -110,\n          65,\n          -60,\n          45,\n          -98,\n          65,\n          -55,\n          -10,\n          -10,\n          65,\n          -45,\n          -50,\n          58,\n          65,\n          71,\n          -83,\n          57,\n          65,\n          -84,\n          86,\n          -28,\n          65,\n          -84,\n          -53,\n          119,\n          65,\n          -76,\n          -66,\n          52,\n          65,\n          -75,\n          -50,\n          -68,\n          65,\n          -80,\n          -29,\n          36,\n          65,\n          -61,\n          121,\n          -45,\n          65,\n          -66,\n          6,\n          -48,\n          65,\n          -51,\n          -36,\n          -82,\n          65,\n          -62,\n          -123,\n          24,\n          65,\n          -82,\n          -82,\n          23,\n          65,\n          -81,\n          56,\n          -125,\n          65,\n          -87,\n          -39,\n          -119,\n          65,\n          -67,\n          -29,\n          -119,\n          65,\n          -69,\n          34,\n          -92,\n          65,\n          -79,\n          74,\n          -59,\n          65,\n          -64,\n          24,\n          -107,\n          65,\n          -61,\n          67,\n          6,\n          65,\n          -56,\n          6,\n          61,\n          65,\n          -95,\n          -87,\n          -89,\n          65,\n          -84,\n          -87,\n          118,\n          65,\n          -80,\n          -94,\n          48,\n          65,\n          -68,\n          -35,\n          3,\n          65,\n          -61,\n          -91,\n          24,\n          65,\n          -50,\n          55,\n          57,\n          65,\n          -94,\n          -94,\n          96,\n          65,\n          -80,\n          126,\n          16,\n          65,\n          -60,\n          54,\n          35,\n          65,\n          -59,\n          -12,\n          71,\n          65,\n          -77,\n          -27,\n          -33\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 171,\n        \"leftIndex\": [\n          0,\n          1,\n          171,\n          540680191,\n          748799705,\n          132006965,\n          522729396,\n          601530382,\n          16530\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          171,\n          654195711,\n          406040317,\n          264851933,\n          421113236,\n          352530437,\n          142985\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -8863940819723135715,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          151,\n          497364640,\n          762175472,\n          124580849,\n          404278442,\n          862744592,\n          619203502,\n          545280122,\n          672428672,\n          1015808255,\n          604209191,\n          869523384,\n          1041079814,\n          842308777,\n          538422268,\n          535798055,\n          6\n        ],\n        \"cutValueData\": [\n          66,\n          79,\n          61,\n          53,\n          65,\n          41,\n          -118,\n          -39,\n          66,\n          -104,\n          -64,\n          -73,\n          65,\n          45,\n          126,\n          -9,\n          65,\n          -48,\n          -119,\n          -110,\n          66,\n          -124,\n          -30,\n          -17,\n          65,\n          32,\n          -36,\n          -12,\n          65,\n          34,\n          11,\n          30,\n          65,\n          -64,\n          -107,\n          -44,\n          65,\n          -39,\n          71,\n          -119,\n          65,\n          45,\n          100,\n          -14,\n          65,\n          40,\n          78,\n          5,\n          65,\n          118,\n          -99,\n          57,\n          65,\n          -15,\n          49,\n          110,\n          65,\n          -36,\n          28,\n          -84,\n          66,\n          2,\n          78,\n          36,\n          65,\n          61,\n          99,\n          -8,\n          65,\n          -102,\n          18,\n          -5,\n          65,\n          -49,\n          -17,\n          -117,\n          65,\n          -43,\n          -25,\n          19,\n          65,\n          -42,\n          -23,\n          -127,\n          65,\n          -37,\n          27,\n          108,\n          66,\n          26,\n          -11,\n          -20,\n          65,\n          41,\n          82,\n          -73,\n          65,\n          64,\n          15,\n          -118,\n          65,\n          -123,\n          -54,\n          86,\n          65,\n          -80,\n          -61,\n          56,\n          65,\n          -57,\n          114,\n          74,\n          65,\n          -47,\n          102,\n          92,\n          65,\n          -39,\n          -87,\n          -106,\n          65,\n          45,\n          -79,\n          80,\n          65,\n          42,\n          -63,\n          -60,\n          65,\n          74,\n          -106,\n          88,\n          65,\n          116,\n          85,\n          -82,\n          65,\n          -122,\n          -91,\n          28,\n          65,\n          -115,\n          -128,\n          -102,\n          65,\n          -99,\n          -37,\n          21,\n          65,\n          -59,\n          13,\n          -12,\n          65,\n          -68,\n          -127,\n          26,\n          65,\n          -53,\n          -86,\n          77,\n          65,\n          -54,\n          -28,\n          -55,\n          65,\n          -53,\n          -67,\n          13,\n          65,\n          -44,\n          -102,\n          100,\n          65,\n          48,\n          89,\n          97,\n          65,\n          47,\n          -55,\n          78,\n          65,\n          54,\n          59,\n          -119,\n          65,\n          113,\n          -24,\n          -85,\n          65,\n          121,\n          28,\n          14,\n          65,\n          -126,\n          -15,\n          -89,\n          65,\n          -112,\n          -53,\n          -59,\n          65,\n          -106,\n          79,\n          -69,\n          65,\n          -103,\n          103,\n          37,\n          65,\n          -81,\n          -74,\n          -21,\n          65,\n          -73,\n          -40,\n          56,\n          65,\n          -60,\n          -96,\n          -4,\n          65,\n          -49,\n          -88,\n          22,\n          65,\n          -55,\n          -55,\n          89,\n          65,\n          -54,\n          -93,\n          85,\n          65,\n          -42,\n          -124,\n          -106,\n          65,\n          55,\n          115,\n          -69,\n          65,\n          62,\n          96,\n          -120,\n          65,\n          56,\n          -56,\n          -7,\n          65,\n          86,\n          -66,\n          27,\n          65,\n          105,\n          -25,\n          111,\n          65,\n          112,\n          -40,\n          89,\n          65,\n          -117,\n          80,\n          114,\n          65,\n          -100,\n          -46,\n          115,\n          65,\n          -106,\n          -59,\n          -48,\n          65,\n          -92,\n          -40,\n          -76,\n          65,\n          -96,\n          30,\n          -35,\n          65,\n          -95,\n          -1,\n          -92,\n          65,\n          -67,\n          6,\n          127,\n          65,\n          -76,\n          -2,\n          -28,\n          65,\n          -64,\n          94,\n          113,\n          65,\n          -58,\n          -101,\n          109,\n          65,\n          -64,\n          15,\n          -61,\n          65,\n          -43,\n          -64,\n          -62,\n          65,\n          41,\n          -10,\n          50,\n          65,\n          61,\n          120,\n          72,\n          65,\n          108,\n          -6,\n          -87,\n          65,\n          -123,\n          -2,\n          -71,\n          65,\n          -106,\n          106,\n          86,\n          65,\n          -105,\n          -124,\n          71,\n          65,\n          -102,\n          -110,\n          82,\n          65,\n          -96,\n          -46,\n          -2,\n          65,\n          -88,\n          86,\n          -12,\n          65,\n          -81,\n          -103,\n          -23,\n          65,\n          -79,\n          87,\n          -81,\n          65,\n          -72,\n          68,\n          -114,\n          65,\n          -55,\n          -86,\n          126,\n          65,\n          -58,\n          31,\n          -47,\n          65,\n          -50,\n          43,\n          -50,\n          65,\n          -54,\n          19,\n          -81,\n          65,\n          -53,\n          -128,\n          -115,\n          65,\n          61,\n          110,\n          -37,\n          65,\n          88,\n          14,\n          122,\n          65,\n          90,\n          109,\n          -90,\n          65,\n          117,\n          -103,\n          105,\n          65,\n          -113,\n          -92,\n          -101,\n          65,\n          -119,\n          98,\n          71,\n          65,\n          -107,\n          125,\n          -45,\n          65,\n          -106,\n          -66,\n          42,\n          65,\n          -95,\n          36,\n          -79,\n          65,\n          -102,\n          -34,\n          30,\n          65,\n          -93,\n          37,\n          120,\n          65,\n          -89,\n          73,\n          -72,\n          65,\n          -88,\n          10,\n          30,\n          65,\n          -80,\n          100,\n          15,\n          65,\n          -72,\n          87,\n          36,\n          65,\n          -64,\n          -77,\n          -5,\n          65,\n          54,\n          62,\n          27,\n          65,\n          73,\n          -96,\n          25,\n          65,\n          76,\n          -84,\n          27,\n          65,\n          87,\n          113,\n          90,\n          65,\n          101,\n          -88,\n          -43,\n          65,\n          -123,\n          105,\n          -62,\n          65,\n          -117,\n          42,\n          72,\n          65,\n          -112,\n          24,\n          7,\n          65,\n          -102,\n          15,\n          84,\n          65,\n          -99,\n          -118,\n          9,\n          65,\n          -101,\n          -13,\n          -91,\n          65,\n          -94,\n          95,\n          114,\n          65,\n          -93,\n          63,\n          36,\n          65,\n          -91,\n          -47,\n          -56,\n          65,\n          -81,\n          70,\n          -73,\n          65,\n          -84,\n          60,\n          80,\n          65,\n          -70,\n          16,\n          -78,\n          65,\n          75,\n          -128,\n          -55,\n          65,\n          91,\n          118,\n          -119,\n          65,\n          84,\n          -19,\n          -42,\n          65,\n          109,\n          -24,\n          -56,\n          65,\n          -121,\n          92,\n          124,\n          65,\n          -122,\n          121,\n          77,\n          65,\n          -116,\n          -127,\n          16,\n          65,\n          -108,\n          -15,\n          71,\n          65,\n          -102,\n          44,\n          -28,\n          65,\n          -103,\n          -32,\n          4,\n          65,\n          -98,\n          -40,\n          105,\n          65,\n          -103,\n          108,\n          102,\n          65,\n          -83,\n          121,\n          78,\n          65,\n          -83,\n          40,\n          113,\n          65,\n          80,\n          86,\n          -67,\n          65,\n          -127,\n          115,\n          94,\n          65,\n          -124,\n          64,\n          26,\n          65,\n          -117,\n          -38,\n          -35,\n          65,\n          -119,\n          51,\n          53,\n          65,\n          -112,\n          -31,\n          -48,\n          65,\n          -95,\n          -77,\n          89,\n          65,\n          -96,\n          104,\n          107,\n          65,\n          -86,\n          12,\n          41,\n          65,\n          -124,\n          0,\n          47\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 151,\n        \"leftIndex\": [\n          0,\n          1,\n          151,\n          1065874399,\n          920356606,\n          401156708,\n          499816368,\n          706818,\n          0\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          151,\n          529519387,\n          49466738,\n          395294356,\n          440448608,\n          4331569,\n          0\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 857711434791583806,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          155,\n          992889064,\n          310923725,\n          403928644,\n          282503518,\n          990434604,\n          941095609,\n          1059012664,\n          869265428,\n          414776841,\n          199683952,\n          33893286,\n          567145559,\n          516346987,\n          747111319,\n          548360191,\n          31606\n        ],\n        \"cutValueData\": [\n          66,\n          52,\n          -32,\n          -12,\n          65,\n          120,\n          3,\n          -63,\n          66,\n          -100,\n          68,\n          -63,\n          65,\n          116,\n          -38,\n          -16,\n          65,\n          -49,\n          67,\n          -2,\n          66,\n          102,\n          44,\n          -5,\n          65,\n          78,\n          54,\n          -102,\n          65,\n          -99,\n          -49,\n          -94,\n          66,\n          4,\n          0,\n          113,\n          66,\n          -106,\n          2,\n          89,\n          65,\n          71,\n          75,\n          -62,\n          65,\n          87,\n          58,\n          62,\n          65,\n          -88,\n          -65,\n          7,\n          65,\n          -77,\n          -41,\n          -72,\n          65,\n          -21,\n          22,\n          38,\n          66,\n          92,\n          -100,\n          127,\n          65,\n          76,\n          -104,\n          -64,\n          65,\n          69,\n          49,\n          93,\n          65,\n          79,\n          -102,\n          46,\n          65,\n          93,\n          -4,\n          5,\n          65,\n          -97,\n          120,\n          -89,\n          65,\n          -102,\n          -79,\n          -44,\n          65,\n          -77,\n          -105,\n          25,\n          65,\n          -40,\n          -33,\n          -82,\n          65,\n          49,\n          -29,\n          -85,\n          65,\n          90,\n          18,\n          -86,\n          65,\n          64,\n          -127,\n          -78,\n          65,\n          102,\n          86,\n          35,\n          65,\n          125,\n          70,\n          -12,\n          65,\n          -104,\n          -3,\n          -38,\n          65,\n          -103,\n          -86,\n          38,\n          65,\n          -60,\n          114,\n          93,\n          65,\n          -36,\n          -38,\n          -61,\n          65,\n          49,\n          -107,\n          11,\n          65,\n          41,\n          126,\n          -66,\n          65,\n          83,\n          20,\n          41,\n          65,\n          84,\n          127,\n          -69,\n          65,\n          93,\n          -127,\n          -90,\n          65,\n          -114,\n          -7,\n          17,\n          65,\n          -99,\n          49,\n          -16,\n          65,\n          -99,\n          124,\n          82,\n          65,\n          -95,\n          -109,\n          -84,\n          65,\n          -68,\n          2,\n          54,\n          65,\n          -49,\n          17,\n          112,\n          65,\n          -41,\n          34,\n          118,\n          65,\n          59,\n          105,\n          -123,\n          65,\n          55,\n          -9,\n          55,\n          65,\n          82,\n          21,\n          54,\n          65,\n          82,\n          -105,\n          44,\n          65,\n          105,\n          -126,\n          -9,\n          65,\n          109,\n          88,\n          0,\n          65,\n          91,\n          35,\n          -9,\n          65,\n          115,\n          -56,\n          -5,\n          65,\n          -116,\n          -39,\n          -85,\n          65,\n          -90,\n          -43,\n          47,\n          65,\n          -106,\n          -50,\n          -14,\n          65,\n          -102,\n          55,\n          41,\n          65,\n          -93,\n          87,\n          110,\n          65,\n          -83,\n          50,\n          -69,\n          65,\n          -86,\n          7,\n          -82,\n          65,\n          -61,\n          -108,\n          -124,\n          65,\n          -57,\n          -75,\n          -54,\n          65,\n          -54,\n          -116,\n          64,\n          65,\n          -47,\n          78,\n          16,\n          65,\n          43,\n          -17,\n          121,\n          65,\n          45,\n          -52,\n          117,\n          65,\n          74,\n          -9,\n          66,\n          65,\n          64,\n          -35,\n          7,\n          65,\n          91,\n          -111,\n          -63,\n          65,\n          112,\n          58,\n          -90,\n          65,\n          -114,\n          -64,\n          91,\n          65,\n          -111,\n          -87,\n          -77,\n          65,\n          -96,\n          -102,\n          -42,\n          65,\n          -92,\n          -8,\n          -1,\n          65,\n          -89,\n          58,\n          107,\n          65,\n          -104,\n          -68,\n          68,\n          65,\n          -88,\n          -50,\n          -96,\n          65,\n          -93,\n          -124,\n          -12,\n          65,\n          -73,\n          16,\n          -101,\n          65,\n          -71,\n          -106,\n          -51,\n          65,\n          -63,\n          89,\n          83,\n          65,\n          -52,\n          -121,\n          -103,\n          65,\n          -52,\n          -50,\n          66,\n          65,\n          -43,\n          -98,\n          67,\n          65,\n          -47,\n          -53,\n          121,\n          65,\n          -45,\n          55,\n          14,\n          65,\n          -38,\n          40,\n          -28,\n          65,\n          33,\n          17,\n          29,\n          65,\n          39,\n          28,\n          28,\n          65,\n          58,\n          -6,\n          -97,\n          65,\n          68,\n          -60,\n          -37,\n          65,\n          73,\n          -31,\n          -72,\n          65,\n          -105,\n          23,\n          -30,\n          65,\n          -123,\n          69,\n          89,\n          65,\n          -106,\n          54,\n          105,\n          65,\n          -109,\n          -47,\n          -2,\n          65,\n          -94,\n          88,\n          -3,\n          65,\n          -97,\n          54,\n          -27,\n          65,\n          -91,\n          92,\n          -9,\n          65,\n          -90,\n          -49,\n          -109,\n          65,\n          -77,\n          -42,\n          -98,\n          65,\n          -80,\n          10,\n          -23,\n          65,\n          -76,\n          -16,\n          77,\n          65,\n          -69,\n          -121,\n          -5,\n          65,\n          -68,\n          -21,\n          28,\n          65,\n          -56,\n          88,\n          119,\n          65,\n          -41,\n          -50,\n          -72,\n          65,\n          -55,\n          109,\n          -8,\n          65,\n          33,\n          -4,\n          -87,\n          65,\n          58,\n          -96,\n          109,\n          65,\n          95,\n          62,\n          -111,\n          65,\n          -120,\n          -119,\n          -93,\n          65,\n          -119,\n          96,\n          -60,\n          65,\n          -115,\n          -43,\n          112,\n          65,\n          -118,\n          -122,\n          -76,\n          65,\n          -108,\n          -98,\n          -33,\n          65,\n          -106,\n          91,\n          -23,\n          65,\n          -102,\n          98,\n          117,\n          65,\n          -94,\n          -112,\n          76,\n          65,\n          -96,\n          -107,\n          79,\n          65,\n          -91,\n          -23,\n          -64,\n          65,\n          -88,\n          105,\n          26,\n          65,\n          -84,\n          43,\n          -88,\n          65,\n          -84,\n          49,\n          -111,\n          65,\n          -69,\n          -122,\n          35,\n          65,\n          -80,\n          44,\n          92,\n          65,\n          -65,\n          -89,\n          -116,\n          65,\n          -59,\n          51,\n          -65,\n          65,\n          -54,\n          -121,\n          -16,\n          65,\n          -46,\n          -78,\n          34,\n          65,\n          68,\n          26,\n          80,\n          65,\n          -124,\n          4,\n          63,\n          65,\n          -105,\n          111,\n          -40,\n          65,\n          -119,\n          51,\n          8,\n          65,\n          -106,\n          44,\n          46,\n          65,\n          -111,\n          61,\n          20,\n          65,\n          -94,\n          127,\n          -109,\n          65,\n          -89,\n          54,\n          100,\n          65,\n          -81,\n          -15,\n          -98,\n          65,\n          -73,\n          41,\n          125,\n          65,\n          -78,\n          91,\n          111,\n          65,\n          -71,\n          66,\n          70,\n          65,\n          -54,\n          45,\n          47,\n          65,\n          -39,\n          -52,\n          -19,\n          65,\n          -127,\n          37,\n          -73,\n          65,\n          -127,\n          37,\n          -77,\n          65,\n          -85,\n          113,\n          96,\n          65,\n          -126,\n          95,\n          -94,\n          65,\n          -123,\n          54,\n          -15,\n          65,\n          -82,\n          127,\n          -54,\n          65,\n          -120,\n          -52,\n          19,\n          65,\n          -119,\n          115,\n          118,\n          65,\n          -85,\n          111,\n          106,\n          65,\n          -120,\n          92,\n          -102,\n          65,\n          -78,\n          -54,\n          0\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 155,\n        \"leftIndex\": [\n          0,\n          1,\n          155,\n          160792543,\n          478870959,\n          193547359,\n          11607869,\n          285739649,\n          0\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          155,\n          1031548147,\n          1019338739,\n          1026751,\n          421681978,\n          905972042,\n          5\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -1187640641902384027,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          169,\n          862668693,\n          348672342,\n          947549056,\n          615596819,\n          974769662,\n          472870819,\n          235691778,\n          443611719,\n          264015823,\n          525086843,\n          1054878591,\n          486402587,\n          1065681383,\n          312544744,\n          1006618614,\n          322850877,\n          18019673\n        ],\n        \"cutValueData\": [\n          66,\n          64,\n          -110,\n          82,\n          65,\n          -72,\n          -74,\n          0,\n          66,\n          -103,\n          92,\n          83,\n          65,\n          100,\n          9,\n          79,\n          65,\n          -67,\n          57,\n          -26,\n          66,\n          127,\n          -54,\n          -79,\n          65,\n          83,\n          3,\n          99,\n          65,\n          125,\n          25,\n          17,\n          65,\n          -72,\n          -67,\n          4,\n          66,\n          47,\n          -53,\n          5,\n          66,\n          95,\n          96,\n          24,\n          66,\n          113,\n          3,\n          39,\n          65,\n          64,\n          -59,\n          104,\n          65,\n          89,\n          98,\n          3,\n          65,\n          114,\n          31,\n          119,\n          65,\n          -128,\n          120,\n          -113,\n          65,\n          -22,\n          126,\n          26,\n          66,\n          99,\n          -4,\n          72,\n          66,\n          118,\n          -78,\n          27,\n          66,\n          -126,\n          82,\n          -24,\n          65,\n          61,\n          -85,\n          -116,\n          65,\n          80,\n          -33,\n          -18,\n          65,\n          108,\n          119,\n          57,\n          65,\n          103,\n          54,\n          -22,\n          65,\n          116,\n          60,\n          7,\n          65,\n          115,\n          28,\n          25,\n          65,\n          -118,\n          78,\n          -92,\n          65,\n          -120,\n          -87,\n          -10,\n          65,\n          -49,\n          -32,\n          -113,\n          66,\n          -121,\n          20,\n          -16,\n          66,\n          -118,\n          -115,\n          62,\n          65,\n          41,\n          -49,\n          95,\n          65,\n          68,\n          83,\n          -102,\n          65,\n          69,\n          99,\n          50,\n          65,\n          111,\n          -84,\n          94,\n          65,\n          121,\n          5,\n          -121,\n          65,\n          -121,\n          55,\n          98,\n          65,\n          -117,\n          -122,\n          -23,\n          65,\n          -120,\n          13,\n          93,\n          65,\n          -91,\n          45,\n          -81,\n          65,\n          -51,\n          121,\n          3,\n          65,\n          -56,\n          2,\n          86,\n          65,\n          44,\n          -99,\n          114,\n          65,\n          71,\n          47,\n          -69,\n          65,\n          58,\n          79,\n          24,\n          65,\n          79,\n          115,\n          121,\n          65,\n          88,\n          32,\n          -74,\n          65,\n          107,\n          -115,\n          62,\n          65,\n          120,\n          63,\n          59,\n          65,\n          -120,\n          -7,\n          88,\n          65,\n          -125,\n          -77,\n          -108,\n          65,\n          -118,\n          56,\n          -64,\n          65,\n          -83,\n          64,\n          -77,\n          65,\n          -54,\n          -110,\n          92,\n          65,\n          -43,\n          -15,\n          21,\n          65,\n          -56,\n          96,\n          -127,\n          65,\n          -39,\n          112,\n          85,\n          65,\n          40,\n          9,\n          -84,\n          65,\n          37,\n          1,\n          -88,\n          65,\n          62,\n          -59,\n          107,\n          65,\n          58,\n          -9,\n          -126,\n          65,\n          71,\n          -84,\n          -19,\n          65,\n          77,\n          61,\n          5,\n          65,\n          75,\n          -85,\n          -78,\n          65,\n          92,\n          -12,\n          11,\n          65,\n          112,\n          -68,\n          -26,\n          65,\n          -125,\n          119,\n          5,\n          65,\n          119,\n          46,\n          -115,\n          65,\n          112,\n          -26,\n          68,\n          65,\n          -120,\n          -66,\n          30,\n          65,\n          -124,\n          -33,\n          -71,\n          65,\n          -91,\n          -41,\n          10,\n          65,\n          -84,\n          -75,\n          91,\n          65,\n          -68,\n          1,\n          -125,\n          65,\n          -64,\n          -115,\n          -123,\n          65,\n          -57,\n          84,\n          -34,\n          65,\n          -52,\n          94,\n          53,\n          65,\n          -56,\n          110,\n          97,\n          65,\n          -45,\n          66,\n          -33,\n          65,\n          39,\n          113,\n          119,\n          65,\n          51,\n          -111,\n          33,\n          65,\n          51,\n          -38,\n          -111,\n          65,\n          78,\n          -86,\n          43,\n          65,\n          57,\n          -125,\n          -12,\n          65,\n          67,\n          92,\n          -95,\n          65,\n          119,\n          -93,\n          107,\n          65,\n          -127,\n          -72,\n          -9,\n          65,\n          -124,\n          -63,\n          -118,\n          65,\n          -92,\n          33,\n          123,\n          65,\n          -95,\n          16,\n          -118,\n          65,\n          -90,\n          34,\n          -53,\n          65,\n          -92,\n          85,\n          86,\n          65,\n          -88,\n          21,\n          6,\n          65,\n          -79,\n          -25,\n          11,\n          65,\n          -62,\n          -54,\n          37,\n          65,\n          -60,\n          6,\n          92,\n          65,\n          -57,\n          54,\n          -94,\n          65,\n          -53,\n          -45,\n          24,\n          65,\n          -62,\n          -39,\n          -68,\n          65,\n          -43,\n          -52,\n          -101,\n          65,\n          -33,\n          -84,\n          -110,\n          65,\n          55,\n          -36,\n          -88,\n          65,\n          45,\n          42,\n          -7,\n          65,\n          55,\n          80,\n          51,\n          65,\n          66,\n          74,\n          28,\n          65,\n          -120,\n          -19,\n          30,\n          65,\n          -114,\n          -13,\n          110,\n          65,\n          -97,\n          62,\n          48,\n          65,\n          -104,\n          -21,\n          -4,\n          65,\n          -90,\n          4,\n          9,\n          65,\n          -86,\n          -125,\n          -41,\n          65,\n          -85,\n          -41,\n          -97,\n          65,\n          -76,\n          -22,\n          -49,\n          65,\n          -65,\n          106,\n          117,\n          65,\n          -64,\n          17,\n          86,\n          65,\n          -58,\n          68,\n          102,\n          65,\n          -60,\n          46,\n          106,\n          65,\n          -41,\n          -107,\n          -82,\n          65,\n          -34,\n          -123,\n          114,\n          65,\n          37,\n          -6,\n          86,\n          65,\n          36,\n          26,\n          -11,\n          65,\n          -122,\n          -35,\n          -128,\n          65,\n          -127,\n          -92,\n          34,\n          65,\n          -105,\n          -75,\n          -43,\n          65,\n          -98,\n          125,\n          108,\n          65,\n          -89,\n          65,\n          29,\n          65,\n          -93,\n          52,\n          -76,\n          65,\n          -82,\n          -11,\n          -86,\n          65,\n          -74,\n          58,\n          71,\n          65,\n          -79,\n          120,\n          -4,\n          65,\n          -69,\n          -63,\n          63,\n          65,\n          -33,\n          -107,\n          21,\n          65,\n          -114,\n          -11,\n          -27,\n          65,\n          -100,\n          -121,\n          -23,\n          65,\n          -111,\n          -69,\n          44,\n          65,\n          -93,\n          -85,\n          26,\n          65,\n          -85,\n          -76,\n          97,\n          65,\n          -81,\n          -92,\n          -62,\n          65,\n          -85,\n          83,\n          -16,\n          65,\n          -78,\n          68,\n          -27,\n          65,\n          -78,\n          95,\n          -41,\n          65,\n          -80,\n          24,\n          35,\n          65,\n          -107,\n          -65,\n          107,\n          65,\n          -110,\n          92,\n          -57,\n          65,\n          -100,\n          -2,\n          -8,\n          65,\n          -86,\n          26,\n          81,\n          65,\n          -88,\n          17,\n          6,\n          65,\n          -87,\n          -116,\n          92,\n          65,\n          -77,\n          110,\n          -28,\n          65,\n          -68,\n          122,\n          42,\n          65,\n          -73,\n          101,\n          -34,\n          65,\n          -73,\n          -45,\n          -117,\n          65,\n          -116,\n          -110,\n          80,\n          65,\n          -119,\n          -29,\n          -115,\n          65,\n          -100,\n          -12,\n          -7,\n          65,\n          -73,\n          85,\n          23,\n          65,\n          -66,\n          -111,\n          5,\n          65,\n          -75,\n          -113,\n          118,\n          65,\n          -80,\n          -122,\n          53,\n          65,\n          -114,\n          -53,\n          -77,\n          65,\n          -118,\n          -21,\n          19,\n          65,\n          -100,\n          71,\n          -48,\n          65,\n          -76,\n          51,\n          -73,\n          65,\n          -73,\n          42,\n          68,\n          65,\n          -109,\n          113,\n          42,\n          65,\n          -110,\n          -76,\n          33,\n          65,\n          -105,\n          -85,\n          -114,\n          65,\n          -98,\n          1,\n          8,\n          65,\n          -98,\n          -75,\n          -27\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 169,\n        \"leftIndex\": [\n          0,\n          1,\n          169,\n          498727679,\n          670695278,\n          404159026,\n          272864308,\n          811181400,\n          51517\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          169,\n          471399675,\n          821911114,\n          681474082,\n          8262807,\n          282646216,\n          163840\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5063750875311126499,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          160,\n          130409753,\n          789048668,\n          716434689,\n          657797928,\n          855753852,\n          283770276,\n          417137177,\n          311753669,\n          687644860,\n          1069727695,\n          1000767199,\n          788220748,\n          969933861,\n          266687793,\n          685977667,\n          52226489\n        ],\n        \"cutValueData\": [\n          66,\n          118,\n          80,\n          108,\n          66,\n          75,\n          83,\n          39,\n          66,\n          -104,\n          35,\n          1,\n          65,\n          105,\n          -121,\n          -29,\n          66,\n          112,\n          74,\n          49,\n          66,\n          -107,\n          -79,\n          -127,\n          65,\n          36,\n          101,\n          49,\n          66,\n          53,\n          -94,\n          -15,\n          66,\n          109,\n          -102,\n          -61,\n          66,\n          -124,\n          81,\n          -7,\n          65,\n          41,\n          -31,\n          -81,\n          65,\n          87,\n          27,\n          17,\n          65,\n          -30,\n          -25,\n          -93,\n          65,\n          39,\n          88,\n          62,\n          65,\n          51,\n          -83,\n          91,\n          65,\n          81,\n          -64,\n          41,\n          65,\n          -33,\n          76,\n          -54,\n          66,\n          3,\n          36,\n          66,\n          65,\n          38,\n          -95,\n          59,\n          65,\n          39,\n          -107,\n          32,\n          65,\n          57,\n          -126,\n          -93,\n          65,\n          91,\n          -9,\n          -53,\n          65,\n          90,\n          21,\n          -102,\n          65,\n          101,\n          51,\n          39,\n          65,\n          108,\n          -112,\n          -42,\n          65,\n          -35,\n          125,\n          -13,\n          65,\n          41,\n          27,\n          -16,\n          65,\n          61,\n          -43,\n          -83,\n          65,\n          60,\n          0,\n          -6,\n          65,\n          64,\n          -97,\n          -11,\n          65,\n          106,\n          -76,\n          51,\n          65,\n          96,\n          36,\n          -26,\n          65,\n          106,\n          -51,\n          -63,\n          65,\n          -123,\n          81,\n          -105,\n          65,\n          36,\n          98,\n          40,\n          65,\n          57,\n          73,\n          -106,\n          65,\n          67,\n          -127,\n          87,\n          65,\n          86,\n          -58,\n          90,\n          65,\n          119,\n          118,\n          -88,\n          65,\n          -113,\n          92,\n          -87,\n          65,\n          -89,\n          10,\n          -91,\n          65,\n          36,\n          -45,\n          8,\n          65,\n          72,\n          83,\n          25,\n          65,\n          69,\n          57,\n          -99,\n          65,\n          101,\n          -98,\n          65,\n          65,\n          103,\n          110,\n          -2,\n          65,\n          120,\n          -15,\n          28,\n          65,\n          -95,\n          57,\n          -112,\n          65,\n          -78,\n          -33,\n          53,\n          65,\n          77,\n          35,\n          5,\n          65,\n          71,\n          88,\n          116,\n          65,\n          75,\n          20,\n          -43,\n          65,\n          95,\n          -48,\n          87,\n          65,\n          121,\n          52,\n          -48,\n          65,\n          112,\n          -45,\n          -64,\n          65,\n          119,\n          58,\n          112,\n          65,\n          -113,\n          86,\n          -123,\n          65,\n          -101,\n          106,\n          66,\n          65,\n          -86,\n          -78,\n          -29,\n          65,\n          -65,\n          -70,\n          -94,\n          65,\n          50,\n          108,\n          118,\n          65,\n          68,\n          34,\n          112,\n          65,\n          64,\n          100,\n          -97,\n          65,\n          77,\n          -5,\n          58,\n          65,\n          109,\n          112,\n          -66,\n          65,\n          120,\n          77,\n          61,\n          65,\n          -121,\n          60,\n          102,\n          65,\n          -128,\n          16,\n          -25,\n          65,\n          -100,\n          -79,\n          -69,\n          65,\n          -89,\n          -85,\n          113,\n          65,\n          -84,\n          -96,\n          53,\n          65,\n          -75,\n          -40,\n          122,\n          65,\n          -71,\n          -53,\n          47,\n          65,\n          -56,\n          -61,\n          72,\n          65,\n          85,\n          15,\n          74,\n          65,\n          108,\n          104,\n          -99,\n          65,\n          124,\n          5,\n          -43,\n          65,\n          126,\n          86,\n          -90,\n          65,\n          -126,\n          -103,\n          118,\n          65,\n          -121,\n          -123,\n          -25,\n          65,\n          -120,\n          64,\n          21,\n          65,\n          -102,\n          77,\n          -46,\n          65,\n          -99,\n          121,\n          -27,\n          65,\n          -85,\n          -22,\n          24,\n          65,\n          -93,\n          13,\n          -13,\n          65,\n          -85,\n          112,\n          108,\n          65,\n          -78,\n          -74,\n          2,\n          65,\n          -81,\n          -54,\n          57,\n          65,\n          -77,\n          122,\n          37,\n          65,\n          -71,\n          -17,\n          99,\n          65,\n          -65,\n          -41,\n          30,\n          65,\n          -49,\n          -53,\n          14,\n          65,\n          117,\n          -11,\n          87,\n          65,\n          114,\n          12,\n          -70,\n          65,\n          -122,\n          51,\n          118,\n          65,\n          126,\n          30,\n          46,\n          65,\n          -114,\n          -32,\n          -84,\n          65,\n          -118,\n          -61,\n          -124,\n          65,\n          -106,\n          -46,\n          -73,\n          65,\n          -124,\n          -45,\n          22,\n          65,\n          -94,\n          125,\n          -111,\n          65,\n          -101,\n          -94,\n          -7,\n          65,\n          -93,\n          -53,\n          84,\n          65,\n          -94,\n          92,\n          48,\n          65,\n          -81,\n          -107,\n          70,\n          65,\n          -78,\n          -14,\n          118,\n          65,\n          -75,\n          -25,\n          -42,\n          65,\n          -73,\n          0,\n          -122,\n          65,\n          -70,\n          -46,\n          66,\n          65,\n          -61,\n          49,\n          39,\n          65,\n          -41,\n          36,\n          -95,\n          65,\n          -114,\n          1,\n          44,\n          65,\n          -119,\n          -95,\n          81,\n          65,\n          -114,\n          -71,\n          19,\n          65,\n          -104,\n          -52,\n          -95,\n          65,\n          -94,\n          89,\n          39,\n          65,\n          -98,\n          -97,\n          50,\n          65,\n          -84,\n          -10,\n          111,\n          65,\n          -85,\n          -85,\n          42,\n          65,\n          -77,\n          15,\n          122,\n          65,\n          -71,\n          -113,\n          103,\n          65,\n          -59,\n          -25,\n          -124,\n          65,\n          -70,\n          100,\n          -102,\n          65,\n          -43,\n          95,\n          47,\n          65,\n          -45,\n          102,\n          93,\n          65,\n          -116,\n          122,\n          -99,\n          65,\n          -117,\n          72,\n          75,\n          65,\n          -106,\n          18,\n          -98,\n          65,\n          -109,\n          113,\n          -65,\n          65,\n          -97,\n          69,\n          -114,\n          65,\n          -93,\n          86,\n          109,\n          65,\n          -84,\n          -75,\n          -113,\n          65,\n          -84,\n          91,\n          34,\n          65,\n          -59,\n          24,\n          -109,\n          65,\n          -61,\n          25,\n          -55,\n          65,\n          -53,\n          -77,\n          12,\n          65,\n          -41,\n          -16,\n          71,\n          65,\n          -37,\n          48,\n          -78,\n          65,\n          -115,\n          74,\n          -18,\n          65,\n          -107,\n          49,\n          37,\n          65,\n          -110,\n          113,\n          4,\n          65,\n          -96,\n          -11,\n          38,\n          65,\n          -92,\n          -20,\n          -34,\n          65,\n          -57,\n          82,\n          -62,\n          65,\n          -58,\n          -45,\n          -41,\n          65,\n          -52,\n          72,\n          -17,\n          65,\n          -43,\n          -74,\n          34,\n          65,\n          -44,\n          -23,\n          -18,\n          65,\n          -110,\n          -23,\n          26,\n          65,\n          -105,\n          50,\n          77,\n          65,\n          -64,\n          -29,\n          -2,\n          65,\n          -48,\n          34,\n          96,\n          65,\n          -116,\n          76,\n          74,\n          65,\n          -107,\n          88,\n          33,\n          65,\n          -51,\n          -32,\n          -84,\n          65,\n          -105,\n          89,\n          69,\n          65,\n          -50,\n          -107,\n          -54,\n          65,\n          -117,\n          -127,\n          -50,\n          65,\n          -112,\n          -69,\n          -60,\n          65,\n          -109,\n          50,\n          -124\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 160,\n        \"leftIndex\": [\n          0,\n          1,\n          160,\n          229768447,\n          911161160,\n          87965140,\n          431539329,\n          604481934,\n          42\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          160,\n          833747019,\n          1063761258,\n          494550992,\n          211562115,\n          277096596,\n          176\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 5940388363466496244,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          161,\n          522850868,\n          527587558,\n          559853552,\n          492279565,\n          546472376,\n          400180637,\n          223472585,\n          457316181,\n          1063615559,\n          931380922,\n          1072195636,\n          192614000,\n          436342535,\n          323529216,\n          977169129,\n          100020840,\n          6\n        ],\n        \"cutValueData\": [\n          66,\n          36,\n          28,\n          -120,\n          65,\n          -27,\n          -54,\n          -92,\n          66,\n          79,\n          98,\n          -75,\n          65,\n          -42,\n          23,\n          -108,\n          65,\n          -22,\n          -67,\n          -8,\n          66,\n          79,\n          73,\n          114,\n          66,\n          112,\n          -119,\n          -53,\n          65,\n          -104,\n          -92,\n          -120,\n          65,\n          -48,\n          -34,\n          95,\n          65,\n          -29,\n          122,\n          68,\n          66,\n          37,\n          -75,\n          93,\n          66,\n          -114,\n          -34,\n          32,\n          65,\n          117,\n          80,\n          127,\n          65,\n          -60,\n          -1,\n          121,\n          65,\n          -46,\n          27,\n          -103,\n          65,\n          -33,\n          75,\n          -93,\n          65,\n          64,\n          -55,\n          123,\n          65,\n          -104,\n          87,\n          -116,\n          65,\n          -91,\n          -36,\n          92,\n          65,\n          -60,\n          105,\n          -43,\n          65,\n          -42,\n          -80,\n          12,\n          65,\n          77,\n          118,\n          7,\n          65,\n          108,\n          -107,\n          -40,\n          65,\n          -123,\n          -115,\n          65,\n          65,\n          -95,\n          -48,\n          87,\n          65,\n          -82,\n          31,\n          -11,\n          65,\n          -52,\n          3,\n          24,\n          65,\n          -57,\n          110,\n          -99,\n          65,\n          -38,\n          -4,\n          49,\n          65,\n          48,\n          32,\n          -66,\n          65,\n          67,\n          -56,\n          49,\n          65,\n          79,\n          57,\n          -65,\n          65,\n          109,\n          62,\n          123,\n          65,\n          -121,\n          93,\n          -117,\n          65,\n          -111,\n          85,\n          59,\n          65,\n          -102,\n          68,\n          126,\n          65,\n          -96,\n          -46,\n          -115,\n          65,\n          -85,\n          -32,\n          -94,\n          65,\n          -79,\n          91,\n          59,\n          65,\n          -42,\n          -44,\n          -8,\n          65,\n          -36,\n          -59,\n          2,\n          65,\n          45,\n          69,\n          66,\n          65,\n          51,\n          23,\n          -69,\n          65,\n          86,\n          -63,\n          106,\n          65,\n          80,\n          60,\n          -38,\n          65,\n          113,\n          20,\n          -123,\n          65,\n          114,\n          -7,\n          -60,\n          65,\n          -121,\n          -7,\n          -33,\n          65,\n          126,\n          -110,\n          -23,\n          65,\n          -105,\n          -105,\n          84,\n          65,\n          -100,\n          115,\n          -18,\n          65,\n          -89,\n          105,\n          67,\n          65,\n          -96,\n          58,\n          -127,\n          65,\n          -96,\n          -55,\n          -51,\n          65,\n          -81,\n          96,\n          -122,\n          65,\n          -79,\n          1,\n          -14,\n          65,\n          -70,\n          42,\n          -54,\n          65,\n          -52,\n          19,\n          83,\n          65,\n          -44,\n          63,\n          3,\n          65,\n          36,\n          46,\n          48,\n          65,\n          40,\n          64,\n          -53,\n          65,\n          49,\n          -112,\n          21,\n          65,\n          82,\n          127,\n          106,\n          65,\n          72,\n          18,\n          -96,\n          65,\n          103,\n          -73,\n          -85,\n          65,\n          99,\n          -58,\n          -119,\n          65,\n          118,\n          -14,\n          -23,\n          65,\n          -126,\n          -32,\n          -62,\n          65,\n          -124,\n          -35,\n          47,\n          65,\n          119,\n          122,\n          70,\n          65,\n          -100,\n          28,\n          -78,\n          65,\n          -111,\n          42,\n          72,\n          65,\n          -106,\n          -26,\n          66,\n          65,\n          -99,\n          118,\n          -110,\n          65,\n          -95,\n          -20,\n          26,\n          65,\n          -91,\n          -56,\n          11,\n          65,\n          -94,\n          -126,\n          86,\n          65,\n          -81,\n          33,\n          -4,\n          65,\n          -75,\n          4,\n          55,\n          65,\n          -71,\n          9,\n          46,\n          65,\n          -60,\n          -42,\n          24,\n          65,\n          -57,\n          -62,\n          106,\n          65,\n          57,\n          -42,\n          68,\n          65,\n          38,\n          5,\n          -19,\n          65,\n          51,\n          65,\n          -95,\n          65,\n          51,\n          30,\n          53,\n          65,\n          74,\n          -106,\n          45,\n          65,\n          88,\n          -123,\n          55,\n          65,\n          80,\n          51,\n          -103,\n          65,\n          123,\n          94,\n          125,\n          65,\n          103,\n          22,\n          -55,\n          65,\n          119,\n          67,\n          -98,\n          65,\n          -113,\n          97,\n          -53,\n          65,\n          -114,\n          -49,\n          117,\n          65,\n          -108,\n          126,\n          -95,\n          65,\n          -105,\n          91,\n          -116,\n          65,\n          -95,\n          94,\n          45,\n          65,\n          -86,\n          -74,\n          22,\n          65,\n          -81,\n          -114,\n          5,\n          65,\n          -79,\n          41,\n          -47,\n          65,\n          -67,\n          -27,\n          62,\n          65,\n          -62,\n          56,\n          -115,\n          65,\n          -51,\n          77,\n          -5,\n          65,\n          -51,\n          -105,\n          12,\n          65,\n          -42,\n          -99,\n          41,\n          65,\n          61,\n          -26,\n          61,\n          65,\n          50,\n          -81,\n          -1,\n          65,\n          70,\n          73,\n          -42,\n          65,\n          48,\n          47,\n          -78,\n          65,\n          54,\n          -63,\n          50,\n          65,\n          100,\n          116,\n          77,\n          65,\n          114,\n          39,\n          16,\n          65,\n          119,\n          84,\n          -77,\n          65,\n          -114,\n          -125,\n          32,\n          65,\n          -117,\n          69,\n          -68,\n          65,\n          -117,\n          17,\n          -26,\n          65,\n          -112,\n          25,\n          -126,\n          65,\n          -83,\n          -39,\n          60,\n          65,\n          -80,\n          60,\n          -26,\n          65,\n          -71,\n          91,\n          81,\n          65,\n          -59,\n          18,\n          -82,\n          65,\n          -68,\n          126,\n          37,\n          65,\n          -51,\n          -52,\n          -108,\n          65,\n          -46,\n          111,\n          4,\n          65,\n          -51,\n          -67,\n          126,\n          65,\n          32,\n          30,\n          45,\n          65,\n          59,\n          -107,\n          100,\n          65,\n          66,\n          20,\n          -97,\n          65,\n          73,\n          106,\n          5,\n          65,\n          92,\n          -94,\n          -71,\n          65,\n          -128,\n          98,\n          -39,\n          65,\n          -128,\n          126,\n          92,\n          65,\n          -125,\n          -12,\n          -27,\n          65,\n          -116,\n          -87,\n          122,\n          65,\n          -107,\n          -51,\n          107,\n          65,\n          -79,\n          -67,\n          114,\n          65,\n          -80,\n          127,\n          123,\n          65,\n          -70,\n          -8,\n          -93,\n          65,\n          -51,\n          76,\n          -22,\n          65,\n          -49,\n          116,\n          57,\n          65,\n          -50,\n          -37,\n          47,\n          65,\n          37,\n          3,\n          -127,\n          65,\n          71,\n          86,\n          103,\n          65,\n          125,\n          67,\n          114,\n          65,\n          -113,\n          -119,\n          84,\n          65,\n          -114,\n          -98,\n          -1,\n          65,\n          -116,\n          -97,\n          -59,\n          65,\n          -88,\n          29,\n          -11,\n          65,\n          -76,\n          -96,\n          -92,\n          65,\n          -67,\n          48,\n          31,\n          65,\n          -77,\n          -108,\n          59,\n          65,\n          -49,\n          -21,\n          -34,\n          65,\n          -50,\n          53,\n          -5,\n          65,\n          -121,\n          99,\n          -65,\n          65,\n          -120,\n          102,\n          -58,\n          65,\n          -115,\n          16,\n          82,\n          65,\n          -107,\n          -52,\n          -94,\n          65,\n          -73,\n          103,\n          58,\n          65,\n          -116,\n          -75,\n          -14,\n          65,\n          -109,\n          -120,\n          100,\n          65,\n          -112,\n          -108,\n          117\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 161,\n        \"leftIndex\": [\n          0,\n          1,\n          161,\n          871346591,\n          236760060,\n          359279779,\n          816957486,\n          84272152,\n          576\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          161,\n          738013663,\n          750779358,\n          104736815,\n          567040902,\n          101032232,\n          33\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5606530679278446991,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          162,\n          617182353,\n          210141677,\n          997884226,\n          715561017,\n          570685881,\n          137754876,\n          265363614,\n          266829177,\n          52065312,\n          775122586,\n          551574685,\n          662981972,\n          423618675,\n          115319879,\n          372939092,\n          955234747,\n          0\n        ],\n        \"cutValueData\": [\n          66,\n          82,\n          -12,\n          8,\n          66,\n          0,\n          8,\n          44,\n          66,\n          106,\n          -41,\n          -8,\n          65,\n          -52,\n          1,\n          -38,\n          66,\n          55,\n          -120,\n          27,\n          66,\n          92,\n          27,\n          42,\n          65,\n          35,\n          69,\n          91,\n          65,\n          -50,\n          80,\n          94,\n          66,\n          71,\n          114,\n          -116,\n          65,\n          37,\n          -4,\n          -119,\n          65,\n          -68,\n          -30,\n          104,\n          65,\n          -53,\n          56,\n          81,\n          65,\n          -42,\n          52,\n          16,\n          65,\n          37,\n          -43,\n          63,\n          65,\n          116,\n          46,\n          18,\n          65,\n          -64,\n          79,\n          -97,\n          65,\n          -39,\n          -118,\n          -82,\n          65,\n          -36,\n          -77,\n          116,\n          65,\n          76,\n          -37,\n          54,\n          65,\n          -71,\n          35,\n          -103,\n          65,\n          -71,\n          102,\n          37,\n          65,\n          -51,\n          -128,\n          77,\n          65,\n          -46,\n          71,\n          -128,\n          65,\n          -41,\n          55,\n          -94,\n          65,\n          -46,\n          -72,\n          -95,\n          65,\n          50,\n          -89,\n          -61,\n          65,\n          -105,\n          53,\n          104,\n          65,\n          -127,\n          -75,\n          -111,\n          65,\n          -65,\n          -104,\n          -73,\n          65,\n          -58,\n          118,\n          90,\n          65,\n          -68,\n          -39,\n          78,\n          65,\n          -41,\n          73,\n          102,\n          65,\n          -48,\n          53,\n          -28,\n          65,\n          -44,\n          121,\n          40,\n          65,\n          -36,\n          10,\n          -114,\n          65,\n          -38,\n          -96,\n          12,\n          65,\n          55,\n          -59,\n          98,\n          65,\n          79,\n          116,\n          45,\n          65,\n          69,\n          -65,\n          -4,\n          65,\n          117,\n          97,\n          42,\n          65,\n          -86,\n          45,\n          67,\n          65,\n          -77,\n          -11,\n          98,\n          65,\n          -60,\n          44,\n          106,\n          65,\n          -61,\n          -80,\n          -123,\n          65,\n          -52,\n          125,\n          -28,\n          65,\n          -56,\n          38,\n          -95,\n          65,\n          -53,\n          125,\n          -91,\n          65,\n          -35,\n          47,\n          118,\n          65,\n          -35,\n          -55,\n          49,\n          65,\n          42,\n          -110,\n          -97,\n          65,\n          55,\n          -43,\n          -55,\n          65,\n          58,\n          -14,\n          -12,\n          65,\n          107,\n          -40,\n          29,\n          65,\n          121,\n          -93,\n          -52,\n          65,\n          121,\n          -58,\n          -28,\n          65,\n          -116,\n          -35,\n          -93,\n          65,\n          -76,\n          115,\n          -3,\n          65,\n          -50,\n          112,\n          -25,\n          65,\n          -57,\n          -19,\n          -36,\n          65,\n          -45,\n          121,\n          98,\n          65,\n          47,\n          78,\n          9,\n          65,\n          57,\n          -9,\n          11,\n          65,\n          51,\n          61,\n          35,\n          65,\n          70,\n          -115,\n          -98,\n          65,\n          108,\n          -86,\n          -107,\n          65,\n          -126,\n          26,\n          -32,\n          65,\n          -116,\n          118,\n          -110,\n          65,\n          -101,\n          -13,\n          55,\n          65,\n          -84,\n          45,\n          89,\n          65,\n          -75,\n          99,\n          106,\n          65,\n          -57,\n          122,\n          23,\n          65,\n          -54,\n          82,\n          -27,\n          65,\n          46,\n          49,\n          -82,\n          65,\n          33,\n          -88,\n          -120,\n          65,\n          70,\n          -101,\n          -116,\n          65,\n          73,\n          48,\n          10,\n          65,\n          70,\n          110,\n          36,\n          65,\n          84,\n          -79,\n          25,\n          65,\n          103,\n          -46,\n          59,\n          65,\n          -128,\n          -66,\n          -100,\n          65,\n          -123,\n          13,\n          -63,\n          65,\n          -104,\n          -50,\n          -17,\n          65,\n          -107,\n          57,\n          -49,\n          65,\n          -83,\n          -11,\n          67,\n          65,\n          -76,\n          -54,\n          98,\n          65,\n          -78,\n          -92,\n          79,\n          65,\n          -78,\n          -4,\n          -126,\n          65,\n          -71,\n          92,\n          83,\n          65,\n          -63,\n          -2,\n          -96,\n          65,\n          55,\n          -66,\n          -45,\n          65,\n          53,\n          75,\n          25,\n          65,\n          57,\n          9,\n          89,\n          65,\n          72,\n          40,\n          -67,\n          65,\n          83,\n          115,\n          56,\n          65,\n          89,\n          -67,\n          -81,\n          65,\n          105,\n          43,\n          -43,\n          65,\n          110,\n          70,\n          99,\n          65,\n          111,\n          48,\n          69,\n          65,\n          -128,\n          107,\n          112,\n          65,\n          -113,\n          90,\n          68,\n          65,\n          -103,\n          51,\n          48,\n          65,\n          -97,\n          -88,\n          119,\n          65,\n          -84,\n          -71,\n          27,\n          65,\n          -94,\n          43,\n          -11,\n          65,\n          -84,\n          26,\n          -127,\n          65,\n          -73,\n          -45,\n          -97,\n          65,\n          -81,\n          -95,\n          110,\n          65,\n          -75,\n          -12,\n          -123,\n          65,\n          -60,\n          -99,\n          95,\n          65,\n          -62,\n          54,\n          -9,\n          65,\n          94,\n          72,\n          80,\n          65,\n          82,\n          -38,\n          -8,\n          65,\n          107,\n          45,\n          107,\n          65,\n          122,\n          117,\n          -32,\n          65,\n          -128,\n          44,\n          -112,\n          65,\n          -125,\n          39,\n          -99,\n          65,\n          -116,\n          -56,\n          26,\n          65,\n          -98,\n          -123,\n          27,\n          65,\n          -78,\n          -114,\n          -128,\n          65,\n          -81,\n          -53,\n          -58,\n          65,\n          -76,\n          -15,\n          85,\n          65,\n          72,\n          0,\n          -106,\n          65,\n          89,\n          -111,\n          83,\n          65,\n          112,\n          55,\n          60,\n          65,\n          -115,\n          34,\n          62,\n          65,\n          -113,\n          55,\n          41,\n          65,\n          -106,\n          -16,\n          53,\n          65,\n          -98,\n          30,\n          -118,\n          65,\n          -89,\n          -89,\n          -27,\n          65,\n          -88,\n          32,\n          -26,\n          65,\n          66,\n          30,\n          7,\n          65,\n          109,\n          20,\n          -13,\n          65,\n          -118,\n          32,\n          111,\n          65,\n          -118,\n          -73,\n          114,\n          65,\n          -105,\n          -58,\n          91,\n          65,\n          -109,\n          -45,\n          45,\n          65,\n          -83,\n          -2,\n          2,\n          65,\n          -89,\n          79,\n          -119,\n          65,\n          -109,\n          79,\n          -126,\n          65,\n          -109,\n          -92,\n          96,\n          65,\n          -91,\n          -46,\n          115,\n          65,\n          -90,\n          78,\n          47,\n          65,\n          -94,\n          4,\n          -48,\n          65,\n          -88,\n          127,\n          -121,\n          65,\n          -107,\n          104,\n          105,\n          65,\n          -111,\n          85,\n          8,\n          65,\n          -109,\n          -112,\n          100,\n          65,\n          -103,\n          -80,\n          -46,\n          65,\n          -85,\n          25,\n          14,\n          65,\n          -90,\n          42,\n          -13,\n          65,\n          -82,\n          42,\n          -56,\n          65,\n          -107,\n          85,\n          -22,\n          65,\n          -117,\n          -57,\n          -26,\n          65,\n          -107,\n          5,\n          77,\n          65,\n          -98,\n          -79,\n          -36,\n          65,\n          -97,\n          -101,\n          13,\n          65,\n          -90,\n          64,\n          21,\n          65,\n          -119,\n          -9,\n          14,\n          65,\n          -113,\n          26,\n          -49,\n          65,\n          -98,\n          115,\n          115,\n          65,\n          -95,\n          -76,\n          -128,\n          65,\n          -102,\n          -22,\n          -10\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 162,\n        \"leftIndex\": [\n          0,\n          1,\n          162,\n          520083151,\n          240129758,\n          31975375,\n          638603568,\n          156713440,\n          1034\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          162,\n          194958555,\n          392734565,\n          518920156,\n          245437376,\n          732905866,\n          40\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -6048817666391967237,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          161,\n          637097553,\n          727030967,\n          781123065,\n          239819848,\n          1021133011,\n          1051414488,\n          60260399,\n          860547615,\n          941382511,\n          701943872,\n          648180016,\n          605208074,\n          960876759,\n          192442351,\n          378271223,\n          1059150239,\n          6\n        ],\n        \"cutValueData\": [\n          66,\n          -127,\n          -113,\n          64,\n          66,\n          81,\n          -76,\n          -28,\n          66,\n          -102,\n          -119,\n          109,\n          66,\n          10,\n          -35,\n          -128,\n          66,\n          123,\n          -42,\n          4,\n          66,\n          -119,\n          -88,\n          -5,\n          66,\n          -87,\n          -4,\n          -55,\n          65,\n          -12,\n          -37,\n          -93,\n          66,\n          63,\n          -56,\n          123,\n          66,\n          111,\n          101,\n          -79,\n          66,\n          -124,\n          21,\n          10,\n          66,\n          -84,\n          23,\n          -121,\n          65,\n          126,\n          13,\n          -20,\n          66,\n          9,\n          -50,\n          -102,\n          66,\n          20,\n          122,\n          28,\n          66,\n          59,\n          102,\n          39,\n          65,\n          53,\n          15,\n          81,\n          65,\n          -37,\n          50,\n          18,\n          65,\n          46,\n          -90,\n          75,\n          65,\n          66,\n          81,\n          1,\n          65,\n          -86,\n          81,\n          -23,\n          65,\n          -40,\n          -125,\n          67,\n          65,\n          59,\n          -76,\n          -81,\n          65,\n          32,\n          -86,\n          116,\n          65,\n          62,\n          -92,\n          24,\n          65,\n          119,\n          105,\n          122,\n          65,\n          -96,\n          -7,\n          25,\n          65,\n          -63,\n          94,\n          -75,\n          65,\n          44,\n          -121,\n          -52,\n          65,\n          43,\n          -55,\n          29,\n          65,\n          55,\n          45,\n          108,\n          65,\n          66,\n          -94,\n          23,\n          65,\n          63,\n          -71,\n          -31,\n          65,\n          68,\n          -13,\n          69,\n          65,\n          -126,\n          46,\n          -43,\n          65,\n          -116,\n          14,\n          104,\n          65,\n          -85,\n          -119,\n          124,\n          65,\n          -73,\n          -88,\n          98,\n          65,\n          -53,\n          54,\n          49,\n          65,\n          37,\n          110,\n          -127,\n          65,\n          61,\n          -89,\n          -45,\n          65,\n          66,\n          -9,\n          -105,\n          65,\n          73,\n          -42,\n          -99,\n          65,\n          65,\n          -46,\n          96,\n          65,\n          126,\n          -8,\n          45,\n          65,\n          -127,\n          -24,\n          62,\n          65,\n          -113,\n          -13,\n          90,\n          65,\n          -68,\n          114,\n          107,\n          65,\n          -74,\n          -111,\n          98,\n          65,\n          -63,\n          124,\n          -55,\n          65,\n          -55,\n          5,\n          -104,\n          65,\n          -43,\n          96,\n          78,\n          65,\n          56,\n          96,\n          -38,\n          65,\n          33,\n          -36,\n          -109,\n          65,\n          49,\n          -96,\n          23,\n          65,\n          77,\n          103,\n          -104,\n          65,\n          66,\n          -115,\n          68,\n          65,\n          92,\n          -85,\n          -2,\n          65,\n          -122,\n          101,\n          89,\n          65,\n          117,\n          -116,\n          -53,\n          65,\n          -114,\n          21,\n          7,\n          65,\n          -118,\n          71,\n          60,\n          65,\n          -84,\n          95,\n          72,\n          65,\n          -75,\n          99,\n          122,\n          65,\n          -80,\n          114,\n          33,\n          65,\n          -57,\n          -95,\n          26,\n          65,\n          -54,\n          15,\n          20,\n          65,\n          -43,\n          17,\n          4,\n          65,\n          -38,\n          -79,\n          -100,\n          65,\n          46,\n          38,\n          81,\n          65,\n          77,\n          -27,\n          82,\n          65,\n          71,\n          -120,\n          46,\n          65,\n          90,\n          -111,\n          -24,\n          65,\n          100,\n          -23,\n          103,\n          65,\n          98,\n          126,\n          13,\n          65,\n          -124,\n          -120,\n          79,\n          65,\n          -119,\n          108,\n          -15,\n          65,\n          -122,\n          7,\n          61,\n          65,\n          -104,\n          -30,\n          -64,\n          65,\n          -86,\n          126,\n          38,\n          65,\n          -80,\n          100,\n          -85,\n          65,\n          -81,\n          -91,\n          -9,\n          65,\n          -70,\n          0,\n          -85,\n          65,\n          -73,\n          -66,\n          70,\n          65,\n          -58,\n          -80,\n          34,\n          65,\n          -57,\n          -35,\n          85,\n          65,\n          -58,\n          97,\n          72,\n          65,\n          -49,\n          -25,\n          49,\n          65,\n          -43,\n          48,\n          62,\n          65,\n          75,\n          119,\n          95,\n          65,\n          82,\n          -66,\n          123,\n          65,\n          111,\n          -78,\n          -76,\n          65,\n          -126,\n          74,\n          -12,\n          65,\n          -115,\n          -125,\n          47,\n          65,\n          -116,\n          -2,\n          98,\n          65,\n          -103,\n          -109,\n          85,\n          65,\n          -98,\n          -109,\n          81,\n          65,\n          -95,\n          106,\n          112,\n          65,\n          -89,\n          -55,\n          -50,\n          65,\n          -84,\n          -108,\n          -77,\n          65,\n          -82,\n          -122,\n          95,\n          65,\n          -65,\n          -116,\n          70,\n          65,\n          -74,\n          -66,\n          25,\n          65,\n          -64,\n          116,\n          104,\n          65,\n          -50,\n          104,\n          116,\n          65,\n          -52,\n          29,\n          23,\n          65,\n          -57,\n          -74,\n          89,\n          65,\n          -41,\n          -60,\n          -71,\n          65,\n          -45,\n          -79,\n          -82,\n          65,\n          -47,\n          78,\n          -89,\n          65,\n          101,\n          -72,\n          33,\n          65,\n          103,\n          -72,\n          11,\n          65,\n          121,\n          -48,\n          84,\n          65,\n          -128,\n          126,\n          -103,\n          65,\n          -122,\n          -98,\n          -94,\n          65,\n          -123,\n          -44,\n          -61,\n          65,\n          -119,\n          24,\n          85,\n          65,\n          -115,\n          105,\n          -32,\n          65,\n          -102,\n          40,\n          -83,\n          65,\n          -102,\n          34,\n          -98,\n          65,\n          -91,\n          -91,\n          23,\n          65,\n          -91,\n          -98,\n          115,\n          65,\n          -84,\n          -41,\n          93,\n          65,\n          -86,\n          -75,\n          30,\n          65,\n          -75,\n          -110,\n          33,\n          65,\n          -80,\n          -120,\n          81,\n          65,\n          -69,\n          -51,\n          72,\n          65,\n          -58,\n          -101,\n          -13,\n          65,\n          -60,\n          69,\n          -125,\n          65,\n          -42,\n          74,\n          -9,\n          65,\n          -42,\n          10,\n          45,\n          65,\n          -46,\n          -101,\n          67,\n          65,\n          87,\n          5,\n          101,\n          65,\n          123,\n          -73,\n          -40,\n          65,\n          -125,\n          -42,\n          103,\n          65,\n          -126,\n          55,\n          90,\n          65,\n          -127,\n          119,\n          -36,\n          65,\n          -114,\n          88,\n          21,\n          65,\n          -109,\n          -40,\n          111,\n          65,\n          -91,\n          22,\n          -41,\n          65,\n          -86,\n          -18,\n          -40,\n          65,\n          -65,\n          -117,\n          45,\n          65,\n          -65,\n          -102,\n          13,\n          65,\n          -43,\n          -63,\n          -64,\n          65,\n          -48,\n          -11,\n          -63,\n          65,\n          -124,\n          -128,\n          -18,\n          65,\n          -113,\n          -80,\n          -70,\n          65,\n          -105,\n          -6,\n          -14,\n          65,\n          -111,\n          -107,\n          52,\n          65,\n          -89,\n          -117,\n          13,\n          65,\n          -79,\n          -40,\n          4,\n          65,\n          -66,\n          56,\n          66,\n          65,\n          -45,\n          -121,\n          -115,\n          65,\n          -128,\n          -36,\n          -47,\n          65,\n          -111,\n          65,\n          -23,\n          65,\n          -98,\n          16,\n          -123,\n          65,\n          -108,\n          102,\n          -5,\n          65,\n          -106,\n          -77,\n          41,\n          65,\n          -111,\n          33,\n          -57,\n          65,\n          -99,\n          -2,\n          -78,\n          65,\n          -111,\n          96,\n          5\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 161,\n        \"leftIndex\": [\n          0,\n          1,\n          161,\n          266277279,\n          775861233,\n          450736639,\n          138556286,\n          187433024,\n          240\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          161,\n          262083039,\n          682747823,\n          480059450,\n          194799275,\n          270961737,\n          256\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -264714557822501083,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          157,\n          849630854,\n          923894704,\n          47943257,\n          750616923,\n          322763974,\n          1022413955,\n          105053119,\n          1017673610,\n          699559799,\n          930866354,\n          737611215,\n          793217473,\n          118289421,\n          73547865,\n          117234736,\n          625023\n        ],\n        \"cutValueData\": [\n          66,\n          36,\n          53,\n          114,\n          65,\n          106,\n          -74,\n          -49,\n          66,\n          -111,\n          98,\n          -70,\n          65,\n          40,\n          16,\n          92,\n          65,\n          91,\n          76,\n          -106,\n          66,\n          56,\n          44,\n          -127,\n          66,\n          -111,\n          77,\n          73,\n          65,\n          47,\n          -116,\n          -67,\n          65,\n          74,\n          41,\n          -106,\n          65,\n          97,\n          -20,\n          27,\n          66,\n          45,\n          80,\n          85,\n          66,\n          -126,\n          -113,\n          -119,\n          66,\n          -93,\n          -101,\n          54,\n          65,\n          47,\n          -24,\n          84,\n          65,\n          36,\n          121,\n          -72,\n          65,\n          97,\n          54,\n          -90,\n          65,\n          -58,\n          16,\n          -111,\n          65,\n          34,\n          46,\n          20,\n          65,\n          47,\n          52,\n          -75,\n          65,\n          82,\n          -29,\n          16,\n          65,\n          110,\n          58,\n          -115,\n          65,\n          -78,\n          108,\n          -94,\n          65,\n          -26,\n          -92,\n          -98,\n          65,\n          33,\n          -32,\n          -92,\n          65,\n          51,\n          -90,\n          -104,\n          65,\n          54,\n          -10,\n          -100,\n          65,\n          74,\n          58,\n          48,\n          65,\n          103,\n          -54,\n          -94,\n          65,\n          100,\n          105,\n          126,\n          65,\n          120,\n          -62,\n          123,\n          65,\n          -61,\n          120,\n          84,\n          65,\n          -62,\n          -55,\n          12,\n          65,\n          42,\n          52,\n          26,\n          65,\n          51,\n          -14,\n          -89,\n          65,\n          51,\n          107,\n          -14,\n          65,\n          69,\n          93,\n          59,\n          65,\n          66,\n          118,\n          101,\n          65,\n          105,\n          -4,\n          -4,\n          65,\n          -102,\n          -52,\n          -116,\n          65,\n          -107,\n          -119,\n          3,\n          65,\n          -57,\n          123,\n          88,\n          65,\n          -51,\n          -103,\n          64,\n          65,\n          52,\n          12,\n          -1,\n          65,\n          74,\n          49,\n          -76,\n          65,\n          92,\n          -18,\n          -64,\n          65,\n          79,\n          -50,\n          109,\n          65,\n          96,\n          -63,\n          -8,\n          65,\n          -117,\n          -14,\n          77,\n          65,\n          -82,\n          89,\n          46,\n          65,\n          -70,\n          33,\n          19,\n          65,\n          -54,\n          13,\n          72,\n          65,\n          -36,\n          112,\n          -87,\n          65,\n          52,\n          -125,\n          -27,\n          65,\n          -127,\n          -2,\n          7,\n          65,\n          -114,\n          115,\n          56,\n          65,\n          -111,\n          99,\n          100,\n          65,\n          -87,\n          105,\n          -75,\n          65,\n          -73,\n          42,\n          47,\n          65,\n          -74,\n          -17,\n          86,\n          65,\n          -70,\n          73,\n          88,\n          65,\n          -17,\n          -76,\n          82,\n          65,\n          -42,\n          31,\n          -52,\n          65,\n          -39,\n          -120,\n          -33,\n          65,\n          -37,\n          31,\n          -55,\n          65,\n          66,\n          124,\n          -35,\n          65,\n          -99,\n          107,\n          -21,\n          65,\n          -127,\n          74,\n          54,\n          65,\n          -113,\n          -72,\n          120,\n          65,\n          -116,\n          74,\n          -104,\n          65,\n          -104,\n          100,\n          -119,\n          65,\n          -108,\n          117,\n          44,\n          65,\n          -80,\n          -48,\n          64,\n          65,\n          -76,\n          -6,\n          4,\n          65,\n          -65,\n          -42,\n          20,\n          65,\n          -73,\n          -65,\n          3,\n          65,\n          -64,\n          73,\n          69,\n          65,\n          -61,\n          103,\n          121,\n          65,\n          -44,\n          -104,\n          92,\n          65,\n          -33,\n          55,\n          -71,\n          65,\n          -37,\n          42,\n          68,\n          65,\n          -119,\n          -102,\n          -55,\n          65,\n          112,\n          100,\n          -99,\n          65,\n          -126,\n          34,\n          -32,\n          65,\n          -127,\n          -96,\n          -43,\n          65,\n          -110,\n          -19,\n          -72,\n          65,\n          -111,\n          -120,\n          -42,\n          65,\n          -108,\n          58,\n          98,\n          65,\n          -100,\n          17,\n          57,\n          65,\n          -83,\n          -87,\n          -16,\n          65,\n          -70,\n          38,\n          -50,\n          65,\n          -62,\n          -62,\n          4,\n          65,\n          -58,\n          60,\n          23,\n          65,\n          -41,\n          74,\n          -121,\n          65,\n          122,\n          -52,\n          27,\n          65,\n          -124,\n          22,\n          2,\n          65,\n          114,\n          42,\n          -49,\n          65,\n          -125,\n          120,\n          1,\n          65,\n          -119,\n          -39,\n          -58,\n          65,\n          -109,\n          21,\n          125,\n          65,\n          -120,\n          -93,\n          101,\n          65,\n          -103,\n          -38,\n          -68,\n          65,\n          -81,\n          -12,\n          36,\n          65,\n          -84,\n          -120,\n          -5,\n          65,\n          -74,\n          45,\n          72,\n          65,\n          -80,\n          68,\n          -40,\n          65,\n          -55,\n          111,\n          -53,\n          65,\n          -55,\n          -12,\n          -107,\n          65,\n          -43,\n          -104,\n          -44,\n          65,\n          118,\n          -111,\n          -20,\n          65,\n          122,\n          -63,\n          -99,\n          65,\n          -125,\n          -53,\n          123,\n          65,\n          -119,\n          112,\n          -71,\n          65,\n          -114,\n          76,\n          86,\n          65,\n          -120,\n          42,\n          -5,\n          65,\n          -108,\n          14,\n          37,\n          65,\n          -111,\n          -64,\n          11,\n          65,\n          -107,\n          113,\n          26,\n          65,\n          -100,\n          4,\n          -60,\n          65,\n          -91,\n          -30,\n          -42,\n          65,\n          -84,\n          127,\n          -37,\n          65,\n          -77,\n          -74,\n          -115,\n          65,\n          -76,\n          101,\n          103,\n          65,\n          -58,\n          14,\n          -125,\n          65,\n          -51,\n          113,\n          23,\n          65,\n          -47,\n          -65,\n          72,\n          65,\n          -123,\n          80,\n          -15,\n          65,\n          -121,\n          -17,\n          58,\n          65,\n          -120,\n          42,\n          49,\n          65,\n          -114,\n          -12,\n          52,\n          65,\n          -101,\n          -46,\n          6,\n          65,\n          -99,\n          -8,\n          102,\n          65,\n          -99,\n          -99,\n          21,\n          65,\n          -91,\n          -32,\n          -9,\n          65,\n          -80,\n          -124,\n          -59,\n          65,\n          -56,\n          108,\n          1,\n          65,\n          -121,\n          -109,\n          -38,\n          65,\n          -100,\n          -38,\n          93,\n          65,\n          -101,\n          -54,\n          119,\n          65,\n          -103,\n          -53,\n          73,\n          65,\n          -90,\n          84,\n          -95,\n          65,\n          -82,\n          5,\n          88,\n          65,\n          -68,\n          49,\n          -26,\n          65,\n          -72,\n          71,\n          -48,\n          65,\n          -103,\n          -112,\n          118,\n          65,\n          -95,\n          55,\n          98,\n          65,\n          -98,\n          -72,\n          13,\n          65,\n          -96,\n          122,\n          110,\n          65,\n          -93,\n          -63,\n          -17,\n          65,\n          -87,\n          -63,\n          79,\n          65,\n          -103,\n          57,\n          27,\n          65,\n          -88,\n          -83,\n          10,\n          65,\n          -85,\n          -46,\n          59,\n          65,\n          -83,\n          39,\n          -46,\n          65,\n          -93,\n          122,\n          120,\n          65,\n          -92,\n          -54,\n          83,\n          65,\n          -96,\n          -58,\n          -109,\n          65,\n          -91,\n          99,\n          102\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 157,\n        \"leftIndex\": [\n          0,\n          1,\n          157,\n          645898671,\n          796798737,\n          726937445,\n          188824932,\n          404241444,\n          5\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          157,\n          791528319,\n          1002375770,\n          954303814,\n          169192694,\n          528495618,\n          40\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": -5733416242359448816,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    },\n    {\n      \"version\": \"3.0\",\n      \"root\": 0,\n      \"maxSize\": 256,\n      \"outputAfter\": 32,\n      \"storeSequenceIndexesEnabled\": false,\n      \"centerOfMassEnabled\": false,\n      \"nodeStoreState\": {\n        \"version\": \"3.0\",\n        \"capacity\": 255,\n        \"compressed\": true,\n        \"cutDimension\": [\n          0,\n          7,\n          159,\n          565896663,\n          451510741,\n          796695071,\n          1003314144,\n          407139442,\n          488855626,\n          893114946,\n          17110251,\n          995033619,\n          782679017,\n          333291970,\n          1060853988,\n          884007451,\n          33972487,\n          864153532,\n          11931073\n        ],\n        \"cutValueData\": [\n          65,\n          -96,\n          113,\n          100,\n          65,\n          -101,\n          -26,\n          11,\n          66,\n          -120,\n          51,\n          93,\n          65,\n          59,\n          70,\n          13,\n          65,\n          -102,\n          -24,\n          -75,\n          66,\n          3,\n          -118,\n          -106,\n          65,\n          37,\n          -46,\n          -58,\n          65,\n          101,\n          33,\n          -63,\n          65,\n          -87,\n          31,\n          11,\n          65,\n          -71,\n          -5,\n          -18,\n          66,\n          124,\n          51,\n          44,\n          65,\n          43,\n          -108,\n          14,\n          65,\n          63,\n          101,\n          72,\n          65,\n          62,\n          48,\n          -103,\n          65,\n          -128,\n          26,\n          67,\n          65,\n          -90,\n          41,\n          0,\n          65,\n          -87,\n          -62,\n          -56,\n          65,\n          -84,\n          119,\n          -114,\n          65,\n          -72,\n          119,\n          -18,\n          66,\n          75,\n          8,\n          -121,\n          65,\n          38,\n          65,\n          14,\n          65,\n          32,\n          -72,\n          -91,\n          65,\n          37,\n          -52,\n          113,\n          65,\n          49,\n          119,\n          115,\n          65,\n          59,\n          11,\n          2,\n          65,\n          108,\n          12,\n          -58,\n          65,\n          -118,\n          70,\n          79,\n          65,\n          -123,\n          19,\n          -67,\n          65,\n          -98,\n          -78,\n          89,\n          65,\n          -96,\n          20,\n          34,\n          65,\n          -66,\n          100,\n          100,\n          65,\n          -82,\n          -65,\n          -123,\n          65,\n          -51,\n          -75,\n          -7,\n          65,\n          -68,\n          -12,\n          30,\n          66,\n          78,\n          34,\n          -46,\n          66,\n          82,\n          102,\n          39,\n          65,\n          37,\n          127,\n          92,\n          65,\n          63,\n          40,\n          108,\n          65,\n          50,\n          -106,\n          25,\n          65,\n          71,\n          -95,\n          -100,\n          65,\n          83,\n          -127,\n          -38,\n          65,\n          -124,\n          5,\n          45,\n          65,\n          -122,\n          20,\n          -82,\n          65,\n          -126,\n          -2,\n          -54,\n          65,\n          -118,\n          24,\n          65,\n          65,\n          -91,\n          41,\n          -23,\n          65,\n          -92,\n          -31,\n          -46,\n          65,\n          -89,\n          38,\n          121,\n          65,\n          -95,\n          51,\n          87,\n          65,\n          -76,\n          -87,\n          -36,\n          65,\n          -43,\n          48,\n          -92,\n          66,\n          21,\n          37,\n          5,\n          66,\n          97,\n          -35,\n          4,\n          65,\n          44,\n          14,\n          67,\n          65,\n          49,\n          124,\n          -61,\n          65,\n          99,\n          118,\n          66,\n          65,\n          106,\n          90,\n          40,\n          65,\n          -126,\n          -69,\n          -26,\n          65,\n          -113,\n          -4,\n          -76,\n          65,\n          -112,\n          33,\n          -39,\n          65,\n          -94,\n          -21,\n          -8,\n          65,\n          -98,\n          109,\n          -80,\n          65,\n          -85,\n          89,\n          13,\n          65,\n          -75,\n          -55,\n          90,\n          65,\n          -73,\n          -65,\n          -91,\n          65,\n          -55,\n          -49,\n          117,\n          65,\n          -40,\n          82,\n          -90,\n          66,\n          0,\n          -2,\n          111,\n          65,\n          34,\n          78,\n          59,\n          65,\n          33,\n          -26,\n          -28,\n          65,\n          79,\n          -41,\n          -63,\n          65,\n          90,\n          125,\n          52,\n          65,\n          100,\n          -8,\n          22,\n          65,\n          120,\n          110,\n          100,\n          65,\n          -118,\n          72,\n          -102,\n          65,\n          -109,\n          43,\n          76,\n          65,\n          -108,\n          -54,\n          -107,\n          65,\n          -105,\n          72,\n          -22,\n          65,\n          -102,\n          -79,\n          -112,\n          65,\n          -85,\n          119,\n          67,\n          65,\n          -87,\n          -91,\n          -38,\n          65,\n          -76,\n          -58,\n          -31,\n          65,\n          -68,\n          4,\n          -30,\n          65,\n          -68,\n          -111,\n          -5,\n          65,\n          -72,\n          18,\n          21,\n          65,\n          -53,\n          -27,\n          -80,\n          65,\n          -43,\n          105,\n          -18,\n          65,\n          -40,\n          -29,\n          6,\n          65,\n          48,\n          126,\n          -56,\n          65,\n          75,\n          -53,\n          50,\n          65,\n          76,\n          -119,\n          113,\n          65,\n          115,\n          -62,\n          0,\n          65,\n          115,\n          36,\n          27,\n          65,\n          -123,\n          -94,\n          -123,\n          65,\n          -113,\n          -99,\n          -39,\n          65,\n          -119,\n          35,\n          112,\n          65,\n          -112,\n          -93,\n          -82,\n          65,\n          -103,\n          7,\n          46,\n          65,\n          -95,\n          -1,\n          104,\n          65,\n          -82,\n          -57,\n          -72,\n          65,\n          -88,\n          57,\n          -23,\n          65,\n          -68,\n          -11,\n          -66,\n          65,\n          -61,\n          75,\n          -30,\n          65,\n          -58,\n          22,\n          -83,\n          65,\n          -61,\n          35,\n          -100,\n          65,\n          -51,\n          -45,\n          43,\n          65,\n          -55,\n          -124,\n          -4,\n          65,\n          -44,\n          68,\n          112,\n          65,\n          -34,\n          -18,\n          126,\n          65,\n          66,\n          -98,\n          -64,\n          65,\n          79,\n          37,\n          -12,\n          65,\n          67,\n          -53,\n          37,\n          65,\n          120,\n          -44,\n          32,\n          65,\n          116,\n          16,\n          90,\n          65,\n          115,\n          -94,\n          -110,\n          65,\n          -123,\n          7,\n          41,\n          65,\n          -113,\n          60,\n          -5,\n          65,\n          -115,\n          59,\n          36,\n          65,\n          -100,\n          -54,\n          -103,\n          65,\n          -109,\n          -62,\n          111,\n          65,\n          -102,\n          -32,\n          57,\n          65,\n          -81,\n          65,\n          70,\n          65,\n          -83,\n          -93,\n          -124,\n          65,\n          -78,\n          -45,\n          34,\n          65,\n          -68,\n          107,\n          -36,\n          65,\n          -53,\n          102,\n          125,\n          65,\n          -51,\n          40,\n          -94,\n          65,\n          -41,\n          -15,\n          -69,\n          65,\n          -45,\n          57,\n          -96,\n          65,\n          66,\n          -41,\n          -95,\n          65,\n          73,\n          -76,\n          -109,\n          65,\n          -114,\n          120,\n          50,\n          65,\n          -113,\n          82,\n          18,\n          65,\n          -117,\n          -27,\n          79,\n          65,\n          -109,\n          -107,\n          -89,\n          65,\n          -100,\n          22,\n          85,\n          65,\n          -81,\n          -68,\n          -48,\n          65,\n          -76,\n          -103,\n          97,\n          65,\n          -75,\n          124,\n          -122,\n          65,\n          -80,\n          64,\n          -76,\n          65,\n          -57,\n          79,\n          -34,\n          65,\n          -63,\n          7,\n          -67,\n          65,\n          -49,\n          -125,\n          76,\n          65,\n          -47,\n          48,\n          -125,\n          65,\n          86,\n          -101,\n          105,\n          65,\n          -115,\n          -17,\n          -18,\n          65,\n          -111,\n          25,\n          83,\n          65,\n          -105,\n          -15,\n          -78,\n          65,\n          -57,\n          -35,\n          -94,\n          65,\n          -59,\n          -39,\n          -38,\n          65,\n          -56,\n          -127,\n          1,\n          65,\n          84,\n          -67,\n          117,\n          65,\n          -107,\n          74,\n          -44,\n          65,\n          -111,\n          88,\n          -120,\n          65,\n          -100,\n          -26,\n          -107,\n          65,\n          -62,\n          105,\n          120,\n          65,\n          -110,\n          -120,\n          45,\n          65,\n          -108,\n          -1,\n          -125,\n          65,\n          -52,\n          127,\n          -69\n        ],\n        \"precision\": \"FLOAT_32\",\n        \"root\": 0,\n        \"canonicalAndNotALeaf\": true,\n        \"size\": 159,\n        \"leftIndex\": [\n          0,\n          1,\n          159,\n          533659631,\n          934964307,\n          799237246,\n          672312548,\n          631259309,\n          48\n        ],\n        \"rightIndex\": [\n          0,\n          1,\n          159,\n          479132667,\n          890979370,\n          655779444,\n          169911471,\n          168834108,\n          8\n        ],\n        \"nodeFreeIndexPointer\": 0,\n        \"leafFreeIndexPointer\": 0,\n        \"partialTreeStateEnabled\": true\n      },\n      \"boundingBoxCacheFraction\": 0.0,\n      \"partialTreeState\": true,\n      \"seed\": 6721997949784890477,\n      \"id\": 0,\n      \"dimensions\": 8,\n      \"staticSeed\": 0,\n      \"weight\": 0.0,\n      \"hasAuxiliaryData\": false\n    }\n  ],\n  \"executionContext\": {\n    \"parallelExecutionEnabled\": false,\n    \"threadPoolSize\": 0\n  },\n  \"saveTreeStateEnabled\": true,\n  \"saveSamplerStateEnabled\": true,\n  \"saveCoordinatorStateEnabled\": true\n}\n\n"
  },
  {
    "path": "Java/examples/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n\n    <parent>\n        <groupId>software.amazon.randomcutforest</groupId>\n        <artifactId>randomcutforest-parent</artifactId>\n        <version>4.4.0</version>\n    </parent>\n\n    <artifactId>randomcutforest-examples</artifactId>\n    <packaging>jar</packaging>\n\n    <dependencies>\n        <dependency>\n            <groupId>software.amazon.randomcutforest</groupId>\n            <artifactId>randomcutforest-core</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>software.amazon.randomcutforest</groupId>\n            <artifactId>randomcutforest-parkservices</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>software.amazon.randomcutforest</groupId>\n            <artifactId>randomcutforest-testutils</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.core</groupId>\n            <artifactId>jackson-core</artifactId>\n            <version>2.16.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.core</groupId>\n            <artifactId>jackson-databind</artifactId>\n            <version>2.16.0</version>\n        </dependency>\n        <dependency>\n            <groupId>io.protostuff</groupId>\n            <artifactId>protostuff-core</artifactId>\n            <version>1.8.0</version>\n        </dependency>\n        <dependency>\n            <groupId>io.protostuff</groupId>\n            <artifactId>protostuff-runtime</artifactId>\n            <version>1.8.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.projectlombok</groupId>\n            <artifactId>lombok</artifactId>\n            <version>1.18.30</version>\n            <scope>provided</scope>\n        </dependency>\n    </dependencies>\n\n    <build>\n        <plugins>\n            <plugin>\n                <artifactId>maven-assembly-plugin</artifactId>\n                <version>3.2.0</version>\n                <configuration>\n                    <descriptorRefs>\n                        <descriptorRef>jar-with-dependencies</descriptorRef>\n                    </descriptorRefs>\n                    <archive>\n                        <manifest>\n                            <mainClass>com.amazon.randomcutforest.examples.Main</mainClass>\n                        </manifest>\n                    </archive>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n</project>\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/Example.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples;\n\npublic interface Example {\n    String command();\n\n    String description();\n\n    void run() throws Exception;\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/Main.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples;\n\nimport java.util.Map;\nimport java.util.TreeMap;\n\nimport com.amazon.randomcutforest.examples.dynamicinference.DynamicDensity;\nimport com.amazon.randomcutforest.examples.dynamicinference.DynamicNearNeighbor;\nimport com.amazon.randomcutforest.examples.serialization.JsonExample;\nimport com.amazon.randomcutforest.examples.serialization.ProtostuffExample;\n\npublic class Main {\n\n    public static final String ARCHIVE_NAME = \"randomcutforest-examples-1.0.jar\";\n\n    public static void main(String[] args) throws Exception {\n        new Main().run(args);\n    }\n\n    private final Map<String, Example> examples;\n    private int maxCommandLength;\n\n    public Main() {\n        examples = new TreeMap<>();\n        maxCommandLength = 0;\n        add(new JsonExample());\n        add(new ProtostuffExample());\n        add(new DynamicDensity());\n        add(new DynamicNearNeighbor());\n    }\n\n    private void add(Example example) {\n        examples.put(example.command(), example);\n        if (maxCommandLength < example.command().length()) {\n            maxCommandLength = example.command().length();\n        }\n    }\n\n    public void run(String[] args) throws Exception {\n        if (args == null || args.length < 1 || args[0].equals(\"-h\") || args[0].equals(\"--help\")) {\n            printUsage();\n            return;\n        }\n\n        String command = args[0];\n        if (!examples.containsKey(command)) {\n            throw new IllegalArgumentException(\"No such example: \" + command);\n        }\n\n        examples.get(command).run();\n    }\n\n    public void printUsage() {\n        System.out.printf(\"Usage: java -cp %s [example]%n\", ARCHIVE_NAME);\n        System.out.println(\"Examples:\");\n        String formatString = String.format(\"\\t %%%ds - %%s%%n\", maxCommandLength);\n        for (Example example : examples.values()) {\n            System.out.printf(formatString, example.command(), example.description());\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicconfiguration/DynamicSampling.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.dynamicconfiguration;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class DynamicSampling implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new DynamicSampling().run();\n    }\n\n    @Override\n    public String command() {\n        return \"dynamic_sampling\";\n    }\n\n    @Override\n    public String description() {\n        return \"check dynamic sampling\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_64;\n        int dataSize = 4 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n        RandomCutForest forest2 = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n\n        int first_anomalies = 0;\n        int second_anomalies = 0;\n        forest2.setTimeDecay(10 * forest2.getTimeDecay());\n\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            if (forest.getAnomalyScore(point) > 1.0) {\n                first_anomalies++;\n            }\n            if (forest2.getAnomalyScore(point) > 1.0) {\n                second_anomalies++;\n            }\n            forest.update(point);\n            forest2.update(point);\n        }\n        System.out.println(\"Unusual scores: forest one \" + first_anomalies + \", second one \" + second_anomalies);\n        // should be roughly equal\n\n        first_anomalies = second_anomalies = 0;\n        testData = new NormalMixtureTestData(-3, 40);\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            if (forest.getAnomalyScore(point) > 1.0) {\n                first_anomalies++;\n            }\n            if (forest2.getAnomalyScore(point) > 1.0) {\n                second_anomalies++;\n            }\n            forest.update(point);\n            forest2.update(point);\n        }\n        System.out.println(\"Unusual scores: forest one \" + first_anomalies + \", second one \" + second_anomalies);\n        // forest2 should adapt faster\n\n        first_anomalies = second_anomalies = 0;\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));\n        copyForest.setTimeDecay(50 * forest.getTimeDecay());\n        // force an adjustment to catch up\n        testData = new NormalMixtureTestData(-10, -40);\n        int forced_change_anomalies = 0;\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            if (forest.getAnomalyScore(point) > 1.0) {\n                first_anomalies++;\n            }\n            if (forest2.getAnomalyScore(point) > 1.0) {\n                second_anomalies++;\n            }\n            if (copyForest.getAnomalyScore(point) > 1.0) {\n                forced_change_anomalies++;\n            }\n            copyForest.update(point);\n            forest.update(point);\n            forest2.update(point);\n        }\n        // both should show the similar rate of adjustment\n        System.out.println(\"Unusual scores: forest one \" + first_anomalies + \", second one \" + second_anomalies\n                + \", forced (first) \" + forced_change_anomalies);\n\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicconfiguration/DynamicThroughput.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.dynamicconfiguration;\n\nimport java.time.Duration;\nimport java.time.Instant;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class DynamicThroughput implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new DynamicThroughput().run();\n    }\n\n    @Override\n    public String command() {\n        return \"dynamic_caching\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest as a JSON string\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_64;\n        int dataSize = 10 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        // generate data once to eliminate caching issues\n        testData.generateTestData(dataSize, dimensions);\n        testData.generateTestData(sampleSize, dimensions);\n\n        for (int i = 0; i < 5; i++) {\n\n            RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)\n                    .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n            RandomCutForest forest2 = RandomCutForest.builder().compact(true).dimensions(dimensions).randomSeed(0)\n                    .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n            forest2.setBoundingBoxCacheFraction(i * 0.25);\n\n            int anomalies = 0;\n\n            for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n                double score = forest.getAnomalyScore(point);\n                double score2 = forest2.getAnomalyScore(point);\n\n                if (Math.abs(score - score2) > 1e-10) {\n                    anomalies++;\n                }\n                forest.update(point);\n                forest2.update(point);\n            }\n\n            Instant start = Instant.now();\n\n            for (double[] point : testData.generateTestData(sampleSize, dimensions)) {\n                double score = forest.getAnomalyScore(point);\n                double score2 = forest2.getAnomalyScore(point);\n\n                if (Math.abs(score - score2) > 1e-10) {\n                    anomalies++;\n                }\n                forest.update(point);\n                forest2.update(point);\n            }\n\n            Instant finish = Instant.now();\n\n            // first validate that this was a nontrivial test\n            if (anomalies > 0) {\n                throw new IllegalStateException(\"score mismatch\");\n            }\n\n            System.out.println(\"So far so good! Caching fraction = \" + (i * 0.25) + \", Time =\"\n                    + Duration.between(start, finish).toMillis() + \" ms (note only one forest is changing)\");\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicinference/ConditionalPredictive.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.dynamicinference;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.min;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.PredictiveRandomCutForest;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.summarization.Summarizer;\n\npublic class ConditionalPredictive implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ConditionalPredictive().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Conditional_predictive_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"An example that uses imputation for prediction\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 1;\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n        int dataSize = 40 * sampleSize;\n\n        // 5 dimensions, three are known and 4,5 th unknown (and stochastic)\n        int baseDimensions = 5;\n\n        PredictiveRandomCutForest forest = new PredictiveRandomCutForest.Builder<>().inputDimensions(baseDimensions)\n                .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .startNormalization(sampleSize / 2).transformMethod(TransformMethod.NORMALIZE).build();\n\n        long seed = 17;\n        new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        NormalDistribution normal = new NormalDistribution(new Random(seed));\n        Random random = new Random(seed + 10);\n\n        String name = \"predictive_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n        for (int i = 0; i < dataSize; i++) {\n            float[] recordWithLabel = generateRecordKey(random);\n            float[] record = Arrays.copyOf(recordWithLabel, 5);\n            checkArgument(record[3] == 0, \" should not be filled\");\n            checkArgument(record[4] == 0, \" should not be filled\");\n\n            SampleSummary answer = forest.predict(record, 0, new int[] { 3, 4 });\n            fillInValues(record, random, normal);\n            forest.update(record, 0);\n            double tag = Double.MAX_VALUE;\n            for (int y = 0; y < answer.summaryPoints.length; y++) {\n                double t = Summarizer.L2distance(record, answer.summaryPoints[y]);\n                tag = min(tag, t);\n            }\n\n            file.append(record[0] + \" \" + record[1] + \" \" + record[2] + \" \" + record[3] + \" \" + record[4] + \" \" + tag\n                    + \" \" + recordWithLabel[5] + \"\\n\");\n        }\n        file.close();\n    }\n\n    float[] generateRecordKey(Random random) {\n        float[] record = new float[6];\n        double firstToss = random.nextDouble();\n        double secondToss = random.nextDouble();\n        double thirdToss = random.nextDouble();\n        if (firstToss < 0.8) {\n            record[0] = 1.0f;\n            if (secondToss < 0.8) {\n                record[1] = 19;\n                record[5] = 0;\n            } else {\n                record[1] = 25;\n                record[5] = 1;\n            }\n            record[2] = (float) thirdToss * 10;\n        } else {\n            record[0] = 0.0f;\n            if (secondToss < 0.3) {\n                record[1] = 16;\n                record[2] = 12;\n                record[5] = 2;\n            } else {\n                record[1] = 20;\n                record[2] = 4;\n                record[5] = 3;\n            }\n        }\n        return record;\n    }\n\n    void fillInValues(float[] record, Random random, NormalDistribution normal) {\n        if (record[0] < 0.5) {\n            double next = random.nextDouble();\n            record[3] = (float) ((next < 0.5) ? normal.nextDouble(20, 5) : normal.nextDouble(40, 5));\n            record[4] = (float) normal.nextDouble(-30, 3);\n        } else {\n            if (record[1] < 20) {\n                record[3] = (float) normal.nextDouble(30, 10);\n                record[4] = (float) normal.nextDouble(-10, 3);\n            } else {\n                if (record[2] < 6) {\n                    double next = random.nextDouble();\n                    record[3] = (float) ((next < 0.3) ? normal.nextDouble(20, 5) : normal.nextDouble(40, 3));\n                    record[4] = (float) normal.nextDouble(-50, 1);\n                } else {\n                    double next = random.nextDouble();\n                    record[3] = (float) normal.nextDouble(30, 1);\n                    record[4] = (float) ((next < 0.7) ? normal.nextDouble(-10, 3) : normal.nextDouble(-30, 5));\n                }\n            }\n        }\n    }\n\n    static class NormalDistribution {\n        private final Random rng;\n        private final double[] buffer;\n        private int index;\n\n        NormalDistribution(Random rng) {\n            this.rng = rng;\n            buffer = new double[2];\n            index = 0;\n        }\n\n        double nextDouble() {\n            if (index == 0) {\n                // apply the Box-Muller transform to produce Normal variates\n                double u = rng.nextDouble();\n                double v = rng.nextDouble();\n                double r = Math.sqrt(-2 * Math.log(u));\n                buffer[0] = r * Math.cos(2 * Math.PI * v);\n                buffer[1] = r * Math.sin(2 * Math.PI * v);\n            }\n\n            double result = buffer[index];\n            index = (index + 1) % 2;\n\n            return result;\n        }\n\n        double nextDouble(double mu, double sigma) {\n            return mu + sigma * nextDouble();\n        }\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicinference/DynamicDensity.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.dynamicinference;\n\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.generate;\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;\nimport static java.lang.Math.PI;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.returntypes.DensityOutput;\n\npublic class DynamicDensity implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new DynamicDensity().run();\n    }\n\n    @Override\n    public String command() {\n        return \"dynamic_sampling\";\n    }\n\n    @Override\n    public String description() {\n        return \"shows two potential use of dynamic density computations; estimating density as well \"\n                + \"as its directional components\";\n    }\n\n    /**\n     * plot the dynamic_density_example using any tool in gnuplot one can plot the\n     * directions to higher density via do for [i=0:358:2] {plot\n     * \"dynamic_density_example\" index (i+1) u 1:2:3:4 w vectors t \"\"} or the raw\n     * density at the points via do for [i=0:358:2] {plot \"dynamic_density_example\"\n     * index i w p pt 7 palette t \"\"}\n     * \n     * @throws Exception\n     */\n    @Override\n    public void run() throws Exception {\n        int newDimensions = 2;\n        long randomSeed = 123;\n\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)\n                .dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)\n                .build();\n        String name = \"dynamic_density_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n        double[][] data = generate(1000);\n        double[] queryPoint;\n        for (int degree = 0; degree < 360; degree += 2) {\n            for (double[] datum : data) {\n                newForest.update(rotateClockWise(datum, -2 * PI * degree / 360));\n            }\n            for (double[] datum : data) {\n                queryPoint = rotateClockWise(datum, -2 * PI * degree / 360);\n                DensityOutput density = newForest.getSimpleDensity(queryPoint);\n                double value = density.getDensity(0.001, 2);\n                file.append(queryPoint[0] + \" \" + queryPoint[1] + \" \" + value + \"\\n\");\n            }\n            file.append(\"\\n\");\n            file.append(\"\\n\");\n\n            for (double x = -0.95; x < 1; x += 0.1) {\n                for (double y = -0.95; y < 1; y += 0.1) {\n                    DensityOutput density = newForest.getSimpleDensity(new double[] { x, y });\n                    double aboveInY = density.getDirectionalDensity(0.001, 2).low[1];\n                    double belowInY = density.getDirectionalDensity(0.001, 2).high[1];\n                    double toTheLeft = density.getDirectionalDensity(0.001, 2).high[0];\n                    double toTheRight = density.getDirectionalDensity(0.001, 2).low[0];\n                    double len = Math.sqrt(aboveInY * aboveInY + belowInY * belowInY + toTheLeft * toTheLeft\n                            + toTheRight * toTheRight);\n                    file.append(x + \" \" + y + \" \" + ((toTheRight - toTheLeft) * 0.05 / len) + \" \"\n                            + ((aboveInY - belowInY) * 0.05 / len) + \"\\n\");\n                }\n            }\n            file.append(\"\\n\");\n            file.append(\"\\n\");\n        }\n        file.close();\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/dynamicinference/DynamicNearNeighbor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.dynamicinference;\n\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.generate;\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;\nimport static java.lang.Math.PI;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.examples.Example;\n\npublic class DynamicNearNeighbor implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new DynamicNearNeighbor().run();\n    }\n\n    @Override\n    public String command() {\n        return \"dynamic_near_neighbor\";\n    }\n\n    @Override\n    public String description() {\n        return \"shows an example of dynamic near neighbor computation where both the data and query are \"\n                + \"evolving in time\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        int newDimensions = 2;\n        long randomSeed = 123;\n\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)\n                .dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)\n                .storeSequenceIndexesEnabled(true).build();\n\n        String name = \"dynamic_near_neighbor_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n        double[][] data = generate(1000);\n        double[] queryPoint = new double[] { 0.5, 0.6 };\n        for (int degree = 0; degree < 360; degree += 2) {\n            for (double[] datum : data) {\n                double[] transformed = rotateClockWise(datum, -2 * PI * degree / 360);\n                file.append(transformed[0] + \" \" + transformed[1] + \"\\n\");\n                newForest.update(transformed);\n            }\n            file.append(\"\\n\");\n            file.append(\"\\n\");\n\n            double[] movingQuery = rotateClockWise(queryPoint, -3 * PI * degree / 360);\n            float[] neighbor = newForest.getNearNeighborsInSample(movingQuery, 1).get(0).point;\n            file.append(movingQuery[0] + \" \" + movingQuery[1] + \" \" + (neighbor[0] - movingQuery[0]) + \" \"\n                    + (neighbor[1] - movingQuery[1]) + \"\\n\");\n            file.append(\"\\n\");\n            file.append(\"\\n\");\n        }\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ForecastWithLimits.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static java.lang.Math.max;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.io.IOException;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.ForecastDescriptor;\nimport com.amazon.randomcutforest.parkservices.RCFCaster;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n/**\n * The following example demonstrates the self calibration of RCFCast. Change\n * various parameters -- we recommend keeping baseDimension = 1 (for single\n * variate forecasting -- multivariate forecasting can be a complicated\n * endeavour. The value shifForViz is for easier visualization.\n *\n * Once the datafile calibration_example is produced consider plotting it. For\n * example to use gnuplot, to generate a quick and dirty gif file, consider\n * these commands\n *\n * set terminal gif transparent animate delay 5\n *\n * set output \"example.gif\"\n *\n * do for [i = 0:3000:3] { (all the below in a single line)\n *\n * plot [0:1000][-100:500] \"example\" i 0 u 1:2 w l lc \"black\" t \"Data (seen one\n * at a time)\", \"example\" index (i+3) u 1:2 w l lw 2 lc \"blue\" t \" Online\n * Forecast (future)\", \"example\" i (i+2) u 1:(100*$8) w l lw 2 lc \"magenta\" t\n * \"Interval Accuracy %\", \"example\" index (i+3) u 1:($4-$2):($3-$4) w\n * filledcurves fc \"blue\" fs transparent solid 0.3 noborder t \"Calibrated\n * uncertainty range (future)\", \"example\" index (i+2) u 1:7:6 w filledcurves fc\n * \"brown\" fs transparent solid 0.5 noborder t \"Observed error distribution\n * range (past)\", \"example\" i (i+1) u 1:2 w impulses t \"\", 0 lc \"gray\" t \"\", 100\n * lc \"gray\" t \"\", 80 lc \"gray\" t\"\" }\n *\n * Try different calibrations below to see the precision over the intervals. The\n * struggle of past and new data would become obvious; however the algorithm\n * would self-calibrate eventually. Changing the different values for\n * transformDecay() would correspond to different moving average analysis.\n *\n */\npublic class ForecastWithLimits implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ForecastWithLimits().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Calibrated Forecast with Limits\";\n    }\n\n    @Override\n    public String description() {\n        return \"Calibrated Forecast Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 2 * sampleSize;\n\n        // Multi attribute forecasting is less understood than singe attribute\n        // forecasting;\n        // it is not always clear or easy to decide if multi-attribute forecasting is\n        // reasonable\n        // but the code below will run for multi-attribute case.\n        int baseDimensions = 1;\n        int forecastHorizon = 15;\n        int shingleSize = 20;\n        int outputAfter = 64;\n\n        long seed = 2023L;\n\n        double shiftForViz = 200;\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 50, 50, 5, seed,\n                baseDimensions, true);\n\n        int dimensions = baseDimensions * shingleSize;\n        // change this line to try other transforms; but the default is NORMALIZE\n        // uncomment the transformMethod() below\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n        RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision)\n                .anomalyRate(0.01).outputAfter(outputAfter).calibration(Calibration.MINIMAL)\n                // the following affects the moving average in many of the transformations\n                // the 0.02 corresponds to a half life of 1/0.02 = 50 observations\n                // this is different from the timeDecay() of RCF; however it is a similar\n                // concept\n                .transformDecay(0.02).forecastHorizon(forecastHorizon).lowerLimit(new float[baseDimensions]) // zero in\n                                                                                                             // every\n                                                                                                             // dimension\n                .initialAcceptFraction(0.125).build();\n\n        String name = \"example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            file.append(j + \" \");\n            for (int k = 0; k < baseDimensions; k++) {\n                dataWithKeys.data[j][k] = max(0, dataWithKeys.data[j][k]);\n                file.append(dataWithKeys.data[j][k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            ForecastDescriptor result = caster.process(dataWithKeys.data[j], 0L);\n            printResult(file, result, j, baseDimensions);\n        }\n        file.close();\n\n    }\n\n    void printResult(BufferedWriter file, ForecastDescriptor result, int current, int inputLength) throws IOException {\n        RangeVector forecast = result.getTimedForecast().rangeVector;\n        float[] errorP50 = result.getObservedErrorDistribution().values;\n        float[] upperError = result.getObservedErrorDistribution().upper;\n        float[] lowerError = result.getObservedErrorDistribution().lower;\n        DiVector rmse = result.getErrorRMSE();\n        float[] mean = result.getErrorMean();\n        float[] intervalPrecision = result.getIntervalPrecision();\n\n        file.append(current + \" \" + 1000 + \"\\n\");\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        // block corresponding to the past; print the errors\n        for (int i = forecast.values.length / inputLength - 1; i >= 0; i--) {\n            file.append((current - i) + \" \");\n            for (int j = 0; j < inputLength; j++) {\n                int k = i * inputLength + j;\n                file.append(mean[k] + \" \" + rmse.high[k] + \" \" + rmse.low[k] + \" \" + errorP50[k] + \" \" + upperError[k]\n                        + \" \" + lowerError[k] + \" \" + intervalPrecision[k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        // block corresponding to the future; the projections and the projected errors\n        for (int i = 0; i < forecast.values.length / inputLength; i++) {\n            file.append((current + i) + \" \");\n            for (int j = 0; j < inputLength; j++) {\n                int k = i * inputLength + j;\n                file.append(forecast.values[k] + \" \" + forecast.upper[k] + \" \" + forecast.lower[k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/LowNoisePeriodic.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\n\npublic class LowNoisePeriodic implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new LowNoisePeriodic().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Multi_Dim_example with low noise\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded Multi Dimensional Example with Low Noise\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        int dataSize = 100000;\n        int initialSegment = 100;\n\n        double[] reference = new double[] { 1.0f, 3.0f, 5.0f, 7.0f, 9.0f, 11.0f, 9.5f, 8.5f, 7.5f, 6.5f, 6.0f, 6.5f,\n                7.0f, 7.5f, 9.5f, 11.0f, 12.5f, 10.5f, 8.5f, 7.0f, 5.0f, 3.0f, 2.0f, 1.0f };\n\n        // the noise should leave suffient gap between the consecutive levels\n        double noise = 0.25;\n        // the noise will be amplified by something within [factorRange, 2*factorRange]\n        // increase should lead to increased precision--recall; likewise decrease must\n        // also\n        // lead to decreased precision recall; if the factor = 1, then the anomalies are\n        // information theoretically almost non-existent\n        double anomalyFactor = 10;\n\n        double slope = 0.2 * sampleSize\n                * (Arrays.stream(reference).max().getAsDouble() - Arrays.stream(reference).min().getAsDouble()) / 50000;\n\n        // to analyse without linear shift; comment out the line below and change the\n        // slope above as desired\n        slope = 0;\n\n        double anomalyRate = 0.005;\n        long seed = new Random().nextLong();\n        System.out.println(\" Seed \" + seed);\n        Random rng = new Random(seed);\n        int numAnomalies = 0;\n        int incorrectlyFlagged = 0;\n        int correct = 0;\n        int late = 0;\n\n        // change the transformation below to experiment;\n        // if slope != 0 then NONE will have poor result\n        // both of the difference operations also introduce many errors\n        TransformMethod method = TransformMethod.NORMALIZE;\n\n        int dimensions = shingleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(0)\n                .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD).startNormalization(32)\n                .transformMethod(method).outputAfter(32).initialAcceptFraction(0.125)\n                // for 1D data weights should not alter results significantly (if in reasonable\n                // range say [0.1,10]\n                // weights are not recommended for 1D, but retained here for illustration\n                // as well as a mechanism to verify that results do not vary significantly\n                .weights(new double[] { 1.0 })\n                // change to transformDecay( 1.0/(desired interval length)) to perform\n                // a moving average smoothing the default is 1.0/sampleSize\n                // .transformDecay(1.0/sampleSize)\n                .build();\n\n        // the following ignore anomalies that are shifted up or down by a fixed amount\n        // from the internal prediction of RCF. Default is 0.001\n\n        // the below will show results like\n        // missed current value 3.0 (say X), intended 1.0 (equiv., X - noise), because\n        // the shift up in the actual was not 2*noise\n\n        // forest.setIgnoreNearExpectedFromAbove( new double [] {2*noise});\n\n        // or to suppress all anomalies that are shifted up from predicted\n        // for any sequence; using Double.MAX_VALUE may cause overflow\n        // forest.setIgnoreNearExpectedFromAbove(new double [] {Float.MAX_VALUE});\n\n        // the below will show results like\n        // missed current value 5.5 (say Y), intended 7.5 (equiv., Y + noise) because\n        // the shift down in the actual was not 2*noise, in effect we suppress all\n        // anomalies\n\n        // forest.setIgnoreNearExpectedFromBelow(new double [] {noise*2});\n\n        // the following suppresses all anomalies that shifted down compared to\n        // predicted\n        // for any sequence\n\n        // forest.setIgnoreNearExpectedFromBelow(new double [] {Float.MAX_VALUE});\n\n        double[] value = new double[] { 0.0 };\n\n        int lastAnomaly = 0;\n\n        for (int count = 0; count < dataSize; count++) {\n            boolean anomaly = false;\n\n            double intendedValue = reference[(count + 4) % reference.length] + slope * count;\n            // extremely periodic signal -- note that there is no periodicity detection\n            value[0] = intendedValue;\n            if (rng.nextDouble() < anomalyRate && count > initialSegment) {\n                double anomalyValue = noise * anomalyFactor * (1 + rng.nextDouble());\n                value[0] += (rng.nextDouble() < 0.5) ? -anomalyValue : anomalyValue;\n                anomaly = true;\n                ++numAnomalies;\n            } else {\n                value[0] += (2 * rng.nextDouble() - 1) * noise;\n            }\n\n            AnomalyDescriptor result = forest.process(new double[] { value[0] }, 0);\n\n            if (result.getAnomalyGrade() > 0) {\n                System.out.print(count + \" \" + result.getAnomalyGrade() + \" \");\n                if (result.getRelativeIndex() < 0) {\n                    System.out.print((lastAnomaly == count + result.getRelativeIndex()) + \" \"\n                            + (-result.getRelativeIndex()) + \" steps ago,\");\n                    if (lastAnomaly == count + result.getRelativeIndex()) {\n                        late++;\n                    } else {\n                        incorrectlyFlagged++;\n                    }\n                } else {\n                    System.out.print(anomaly);\n                    if (anomaly) {\n                        correct++;\n                    } else {\n                        incorrectlyFlagged++;\n                    }\n                }\n                System.out.print(\" current value \" + value[0]);\n                if (result.isExpectedValuesPresent()) {\n                    System.out.print(\" expected \" + result.getExpectedValuesList()[0][0] + \" instead of  \"\n                            + result.getPastValues()[0]);\n                }\n                System.out.print(\" score \" + result.getRCFScore() + \" threshold \" + result.getThreshold());\n                System.out.println();\n            } else if (anomaly) {\n                System.out.println(count + \" missed current value \" + value[0] + \", intended \" + intendedValue\n                        + \", score \" + result.getRCFScore() + \", threshold \" + result.getThreshold());\n\n            }\n            if (anomaly) {\n                lastAnomaly = count;\n            }\n        }\n\n        System.out.println(\"Anomalies \" + numAnomalies + \",  correct \" + correct + \", late \" + late\n                + \", incorrectly flagged \" + incorrectlyFlagged);\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/NumericGLADexample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;\nimport static java.lang.Math.PI;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n/**\n * The following example demonstrates clustering based anomaly detection for\n * numeric vectors. The clustering can use an arbitrary distance metric (but it\n * has no mechanism to verify if the function provided is a metric beyond\n * checking that distances are non-negative; improper implementations of\n * distances can produce uninterpretable results). The clustering corresponds to\n * clustering a recency biased sample of points (using the exact same as RCF)\n * and clustering using multi-centroid method (CURE algorithm).\n *\n * There is a natural question that given that this is the RCF library, how does\n * this clustering based algorithm perform vis-a-vis RCF. First, RCF is\n * preferred/natural for shingled/sequenced data, e.g., in analysis of time\n * series. Simple clustering of shingles do not seem to provide similar benefit.\n * In fact, even for shinglesize 1, which correponds to time dependent\n * population analysis, the recursive decomposition provided by RCF can provide\n * a richer detail (even though RCF naturally considers the L1/Manhattan\n * metric). That recursive decomposition can be viewed as a (randomized) partion\n * based clustering. That distance function is used to compute the DensityOutput\n * in RCF. Multilevel clustering is known to be more useful than simple\n * clustering in many applications. Here we show such an application which\n *\n * (i) shows an example use of GlobalLocalAnomalyDetector (GLAD) for dynamic\n * data as well as\n *\n * (ii) a comparable use using a new ForestMode.DISTANCE exposed for RCF.\n *\n * RCF seems to perform better for this simple two dimensional dynamic case. At\n * the same time, the new clusering based algorithm works for generic types with\n * just a distance function. In applications where distances are meaningful and\n * key, such geo etc., user-defined distance based anomalies can be extremely\n * beneficial. If the data can be mapped to explicit vectors then perhaps RCF\n * and its multi-level partitioning can provide more useful insights.\n *\n * Try the following in a visualizer. For example in vanilla gnuplot try\n *\n * set terminal gif transparent animate delay 5\n *\n * set size square\n *\n * set output \"test.gif\"\n *\n * do for [i = 0:359] { plot [-15:15][-15:15] \"clustering_example\" i i u 1:2:3 w\n * p palette pt 7 t \"\" }\n *\n *\n * Try the above/equivalent for setting printFlaggedGLAD = true (setting\n * printFlaggedRCF = false), or to see the data, printData = true. Try changing\n * the number of blades in the fan, the zFactor setting etc.\n */\npublic class NumericGLADexample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new NumericGLADexample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"An example of Global-Local Anomaly Detector on numeric vectors\";\n    }\n\n    @Override\n    public String description() {\n        return \"An example of Global-Local Anomaly Detector on numeric vectors\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        long randomSeed = new Random().nextLong();\n        System.out.println(\"Seed \" + randomSeed);\n        // we would be sending dataSize * 360 vectors\n        int dataSize = 2000;\n        double range = 10.0;\n        int numberOfFans = 3;\n        // corresponds to number of clusters\n        double[][] data = shiftedEllipse(dataSize, 7, range / 2, numberOfFans);\n        int truePos = 0;\n        int falsePos = 0;\n        int falseNeg = 0;\n\n        int truePosRCF = 0;\n        int falsePosRCF = 0;\n        int falseNegRCF = 0;\n\n        int reservoirSize = dataSize;\n\n        // this ensures that the points are flushed out (albeit randomly) duting the\n        // rotation\n        double timedecay = 1.0 / reservoirSize;\n\n        GlobalLocalAnomalyDetector<float[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(3).timeDecay(timedecay).capacity(reservoirSize).build();\n        reservoir.setGlobalDistance(Summarizer::L2distance);\n\n        double zFactor = 6.0; // six sigma deviation; seems to work best\n        reservoir.setZfactor(zFactor);\n\n        ThresholdedRandomCutForest test = ThresholdedRandomCutForest.builder().dimensions(2).shingleSize(1)\n                .randomSeed(77).timeDecay(timedecay).scoringStrategy(ScoringStrategy.DISTANCE).build();\n        test.setZfactor(zFactor); // using the zFactor for same apples to apples comparison\n\n        String name = \"clustering_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n        boolean printData = true;\n        boolean printAnomalies = false;\n        // use one or the other prints below\n        boolean printFlaggedRCF = false;\n        boolean printFlaggedGLAD = true;\n\n        Random noiseGen = new Random(randomSeed + 1);\n        for (int degree = 0; degree < 360; degree += 1) {\n            int index = 0;\n            while (index < data.length) {\n                boolean injected = false;\n                float[] vec;\n                if (noiseGen.nextDouble() < 0.005) {\n                    injected = true;\n                    double[] candAnomaly = new double[2];\n                    // generate points along x axis\n                    candAnomaly[0] = (range / 2 * noiseGen.nextDouble() + range / 2);\n                    candAnomaly[1] = 0.1 * (2.0 * noiseGen.nextDouble() - 1.0);\n                    int antiFan = noiseGen.nextInt(numberOfFans);\n                    // rotate to be 90-180 degrees away -- these are decidedly anomalous\n                    vec = toFloatArray(rotateClockWise(candAnomaly,\n                            -2 * PI * (degree + 180 * (1 + 2 * antiFan) / numberOfFans) / 360));\n                    if (printAnomalies) {\n                        file.append(vec[0] + \" \" + vec[1] + \" \" + 0.0 + \"\\n\");\n                    }\n                } else {\n                    vec = toFloatArray(rotateClockWise(data[index], -2 * PI * degree / 360));\n                    if (printData) {\n                        file.append(vec[0] + \" \" + vec[1] + \" \" + 0.0 + \"\\n\");\n                    }\n                    ++index;\n                }\n\n                GenericAnomalyDescriptor<float[]> result = reservoir.process(vec, 1.0f, null, true);\n\n                AnomalyDescriptor res = test.process(toDoubleArray(vec), 0L);\n                double grade = res.getAnomalyGrade();\n\n                if (injected) {\n                    if (result.getAnomalyGrade() > 0) {\n                        ++truePos;\n                    } else {\n                        ++falseNeg;\n                    }\n                    if (grade > 0) {\n                        ++truePosRCF;\n                    } else {\n                        ++falseNegRCF;\n                    }\n                } else {\n                    if (result.getAnomalyGrade() > 0) {\n                        ++falsePos;\n                    }\n                    if (grade > 0) {\n                        ++falsePosRCF;\n                    }\n                }\n                if (printFlaggedRCF && grade > 0) {\n                    file.append(vec[0] + \" \" + vec[1] + \" \" + grade + \"\\n\");\n                } else if (printFlaggedGLAD && result.getAnomalyGrade() > 0) {\n                    file.append(vec[0] + \" \" + vec[1] + \" \" + result.getAnomalyGrade() + \"\\n\");\n                }\n            }\n            if (printAnomalies || printData || printFlaggedRCF || printFlaggedGLAD) {\n                file.append(\"\\n\");\n                file.append(\"\\n\");\n            }\n\n            if (falsePos + truePos == 0) {\n                throw new IllegalStateException(\"\");\n            }\n\n            checkArgument(falseNeg + truePos == falseNegRCF + truePosRCF, \" incorrect accounting\");\n            System.out.println(\" at degree \" + degree + \" injected \" + (truePos + falseNeg));\n            System.out.print(\"Precision = \" + precision(truePos, falsePos));\n            System.out.println(\" Recall = \" + recall(truePos, falseNeg));\n            System.out.print(\"RCF Distance Mode Precision = \" + precision(truePosRCF, falsePosRCF));\n            System.out.println(\" RCF Distance Mode Recall = \" + recall(truePosRCF, falseNegRCF));\n\n        }\n    }\n\n    public double[][] shiftedEllipse(int dataSize, int seed, double shift, int fans) {\n        NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);\n        double[][] data = generator.generateTestData(dataSize, 2, seed);\n        Random prg = new Random(0);\n        for (int i = 0; i < dataSize; i++) {\n            int nextFan = prg.nextInt(fans);\n            // scale\n            data[i][1] *= 1.0 / fans;\n            data[i][0] *= 2.0;\n            // shift\n            data[i][0] += shift + 1.0 / fans;\n            data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);\n        }\n\n        return data;\n    }\n\n    double precision(int truePos, int falsePos) {\n        return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;\n    }\n\n    double recall(int truePos, int falseNeg) {\n        return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/RCFCasterExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.io.IOException;\nimport java.util.Arrays;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.ForecastDescriptor;\nimport com.amazon.randomcutforest.parkservices.RCFCaster;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n/**\n * The following example demonstrates the self calibration of RCFCast. Change\n * various parameters -- we recommend keeping baseDimension = 1 (for single\n * variate forecasting -- multivariate forecasting can be a complicated\n * endeavour. The value shifForViz is for easier visualization.\n *\n * Once the datafile calibration_example is produced consider plotting it. For\n * example to use gnuplot, to generate a quick and dirty gif file, consider\n * these commands set terminal gif transparent animate delay 5 set output\n * \"example.gif\" do for [i = 0:3000:3] { (all the below in a single line) plot\n * [0:1000][-100:500] \"example\" i 0 u 1:2 w l lc \"black\" t \"Data (seen one at a\n * time)\", \"example\" index (i+3) u 1:2 w l lw 2 lc \"blue\" t \" Online Forecast\n * (future)\", \"example\" i (i+2) u 1:(100*$8) w l lw 2 lc \"magenta\" t \"Interval\n * Accuracy %\", \"example\" index (i+3) u 1:($4-$2):($3-$2) w filledcurves fc\n * \"blue\" fs transparent solid 0.3 noborder t \"Calibrated uncertainty range\n * (future)\", \"example\" index (i+2) u 1:7:6 w filledcurves fc \"brown\" fs\n * transparent solid 0.5 noborder t \"Observed error distribution range (past)\",\n * \"example\" i (i+1) u 1:2 w impulses t \"\", 0 lc \"gray\" t \"\", 100 lc \"gray\" t\n * \"\", 80 lc \"gray\" t\"\" }\n *\n * Try different calibrations below to see the precision over the intervals. The\n * struggle of past and new data would become obvious; however the algorithm\n * would self-calibrate eventually. Changing the different values for\n * transformDecay() would correspond to different moving average analysis.\n *\n */\npublic class RCFCasterExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new RCFCasterExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Calibrated RCFCast\";\n    }\n\n    @Override\n    public String description() {\n        return \"Calibrated RCFCast Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 2 * sampleSize;\n\n        // Multi attribute forecasting is less understood than singe attribute\n        // forecasting;\n        // it is not always clear or easy to decide if multi-attribute forecasting is\n        // reasonable\n        // but the code below will run for multi-attribute case.\n        int baseDimensions = 2;\n        int forecastHorizon = 15;\n        int shingleSize = 20;\n        int outputAfter = 64;\n\n        long seed = 2023L;\n\n        double[][] fulldata = new double[2 * dataSize][];\n        double shiftForViz = 200;\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 50, 50, 5, seed,\n                baseDimensions, true);\n        for (int i = 0; i < dataSize; i++) {\n            fulldata[i] = Arrays.copyOf(dataWithKeys.data[i], baseDimensions);\n            fulldata[i][0] += shiftForViz;\n        }\n\n        // changing both period and amplitude for fun\n        MultiDimDataWithKey second = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize, 70, 30, 5, seed + 1,\n                baseDimensions, true);\n        for (int i = 0; i < dataSize; i++) {\n            fulldata[dataSize + i] = Arrays.copyOf(second.data[i], baseDimensions);\n            fulldata[dataSize + i][0] += shiftForViz;\n        }\n\n        int dimensions = baseDimensions * shingleSize;\n        // change this line to try other transforms; but the default is NORMALIZE\n        // uncomment the transformMethod() below\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n        RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision)\n                .anomalyRate(0.01).outputAfter(outputAfter).calibration(Calibration.SIMPLE)\n                // the following affects the moving average in many of the transformations\n                // the 0.02 corresponds to a half life of 1/0.02 = 50 observations\n                // this is different from the timeDecay() of RCF; however it is a similar\n                // concept\n                .transformDecay(0.02).forecastHorizon(forecastHorizon).initialAcceptFraction(0.125)\n                .useRCFCallibration(true).build();\n\n        String name = \"example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n\n        for (int j = 0; j < fulldata.length; j++) {\n            file.append(j + \" \");\n            for (int k = 0; k < baseDimensions; k++) {\n                file.append(fulldata[j][k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        for (int j = 0; j < fulldata.length; j++) {\n            ForecastDescriptor result = caster.process(fulldata[j], 0L);\n            printResult(file, result, j, baseDimensions);\n        }\n        file.close();\n\n    }\n\n    void printResult(BufferedWriter file, ForecastDescriptor result, int current, int inputLength) throws IOException {\n        RangeVector forecast = result.getTimedForecast().rangeVector;\n        float[] errorP50 = result.getObservedErrorDistribution().values;\n        float[] upperError = result.getObservedErrorDistribution().upper;\n        float[] lowerError = result.getObservedErrorDistribution().lower;\n        DiVector rmse = result.getErrorRMSE();\n        float[] mean = result.getErrorMean();\n        float[] intervalPrecision = result.getIntervalPrecision();\n        file.append(current + \" \" + 1000 + \"\\n\");\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        // block corresponding to the past; print the errors\n        for (int i = forecast.values.length / inputLength - 1; i >= 0; i--) {\n            file.append((current - i) + \" \");\n            for (int j = 0; j < inputLength; j++) {\n                int k = i * inputLength + j;\n                file.append(mean[k] + \" \" + rmse.high[k] + \" \" + rmse.low[k] + \" \" + errorP50[k] + \" \" + upperError[k]\n                        + \" \" + lowerError[k] + \" \" + intervalPrecision[k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n\n        // block corresponding to the future; the projections and the projected errors\n        for (int i = 0; i < forecast.values.length / inputLength; i++) {\n            file.append((current + i) + \" \");\n            for (int j = 0; j < inputLength; j++) {\n                int k = i * inputLength + j;\n                file.append(forecast.values[k] + \" \" + forecast.upper[k] + \" \" + forecast.lower[k] + \" \");\n            }\n            file.append(\"\\n\");\n        }\n        file.append(\"\\n\");\n        file.append(\"\\n\");\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ScoringStrategyExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ScoringStrategyExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ScoringStrategyExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Scoring_strategy_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Scoring Strategy Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n\n        long seed = new Random().nextLong();\n        long count = 0;\n        int dimensions = baseDimensions * shingleSize;\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.EXPECTED_INVERSE_DEPTH)\n                .transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();\n        ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.MULTI_MODE)\n                .transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();\n        ThresholdedRandomCutForest third = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).scoringStrategy(ScoringStrategy.MULTI_MODE_RECALL)\n                .transformMethod(transformMethod).outputAfter(32).initialAcceptFraction(0.125).build();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions);\n\n        int keyCounter = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            AnomalyDescriptor result = forest.process(point, 0L);\n            AnomalyDescriptor multi_mode = second.process(point, 0L);\n            AnomalyDescriptor multi_mode_recall = third.process(point, 0L);\n\n            checkArgument(Math.abs(result.getRCFScore() - multi_mode.getRCFScore()) < 1e-10, \" error\");\n            checkArgument(Math.abs(result.getRCFScore() - multi_mode_recall.getRCFScore()) < 1e-10, \" error\");\n\n            if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                System.out\n                        .println(\"timestamp \" + count + \" CHANGE \" + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                ++keyCounter;\n            }\n\n            printResult(\"MULTI_MODE_RECALL\", multi_mode_recall, count, baseDimensions);\n            printResult(\"EXPECTED_INVERSE_DEPTH\", result, count, baseDimensions);\n            printResult(\"MULTI_MODE\", multi_mode, count, baseDimensions);\n            ++count;\n        }\n    }\n\n    void printResult(String description, AnomalyDescriptor result, long count, int baseDimensions) {\n        if (result.getAnomalyGrade() != 0) {\n            System.out.print(description + \" timestamp \" + count + \" RESULT value \");\n            for (int i = 0; i < baseDimensions; i++) {\n                System.out.print(result.getCurrentInput()[i] + \", \");\n            }\n            System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n            if (result.getRelativeIndex() != 0) {\n                System.out.print(-result.getRelativeIndex() + \" steps ago, \");\n            }\n            if (result.isExpectedValuesPresent()) {\n                if (result.getRelativeIndex() != 0) {\n                    System.out.print(\"instead of \");\n                    for (int i = 0; i < baseDimensions; i++) {\n                        System.out.print(result.getPastValues()[i] + \", \");\n                    }\n                    System.out.print(\"expected \");\n                    for (int i = 0; i < baseDimensions; i++) {\n                        System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                        if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                            System.out.print(\n                                    \"( \" + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                        }\n                    }\n                } else {\n                    System.out.print(\"expected \");\n                    for (int i = 0; i < baseDimensions; i++) {\n                        System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                        if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                            System.out.print(\"( \" + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i])\n                                    + \" ) \");\n                        }\n                    }\n                }\n            } else {\n                System.out.print(\"insufficient data to provide expected values\");\n            }\n            System.out.println();\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/SequentialAnomalyExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.SequentialAnalysis;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class SequentialAnomalyExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new SequentialAnomalyExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Sequential_analysis_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Sequential Analysis Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 2;\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions);\n        double timeDecay = 1.0 / (10 * sampleSize);\n\n        List<AnomalyDescriptor> anomalies = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,\n                sampleSize, timeDecay, TransformMethod.NONE, seed);\n        int keyCounter = 0;\n\n        for (AnomalyDescriptor result : anomalies) {\n\n            // first print the changes\n            while (keyCounter < dataWithKeys.changeIndices.length\n                    && dataWithKeys.changeIndices[keyCounter] <= result.getInternalTimeStamp()) {\n                System.out.println(\"timestamp \" + dataWithKeys.changeIndices[keyCounter] + \" CHANGE \"\n                        + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"timestamp \" + result.getInternalTimeStamp() + \" RESULT value \");\n                for (int i = 0; i < baseDimensions; i++) {\n                    System.out.print(result.getCurrentInput()[i] + \", \");\n                }\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n                if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                    System.out.print(-result.getRelativeIndex() + \" step(s) ago, \");\n                }\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(\"instead of \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getPastValues()[i] + \", \");\n                        }\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    } else {\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    }\n                } else {\n                    System.out.print(\"insufficient data to provide expected values\");\n                }\n                System.out.println();\n            }\n\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/SequentialForecastExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.SequentialAnalysis;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class SequentialForecastExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new SequentialForecastExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Sequential_analysis_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Sequential Analysis Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        int dataSize = 4 * sampleSize;\n\n        // the code will run if the following is changed, but interpretations of\n        // multivariate forecasting vary\n        int baseDimensions = 1;\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions);\n        double timeDecay = 1.0 / (10 * sampleSize);\n        int forecastHorizon = 2 * shingleSize;\n        int errorHorizon = 10 * forecastHorizon;\n\n        List<AnomalyDescriptor> anomalies = SequentialAnalysis.forecastWithAnomalies(dataWithKeys.data, shingleSize,\n                sampleSize, timeDecay, TransformMethod.NONE, forecastHorizon, errorHorizon, 42L).getAnomalies();\n        int keyCounter = 0;\n\n        for (AnomalyDescriptor result : anomalies) {\n\n            // first print the changes\n            while (keyCounter < dataWithKeys.changeIndices.length\n                    && dataWithKeys.changeIndices[keyCounter] <= result.getInternalTimeStamp()) {\n                System.out.println(\"timestamp \" + dataWithKeys.changeIndices[keyCounter] + \" CHANGE \"\n                        + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"timestamp \" + result.getInternalTimeStamp() + \" RESULT value \");\n                for (int i = 0; i < baseDimensions; i++) {\n                    System.out.print(result.getCurrentInput()[i] + \", \");\n                }\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n                if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                    System.out.print(-result.getRelativeIndex() + \" step(s) ago, \");\n                }\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(\"instead of \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getPastValues()[i] + \", \");\n                        }\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    } else {\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    }\n                } else {\n                    System.out.print(\"insufficient data to provide expected values\");\n                }\n                System.out.println();\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/StringGLADexample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static java.lang.Math.min;\n\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector;\nimport com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * A clustering based anomaly detection for strings for two characters using\n * edit distance. Note that the algorithm does not have any inbuilt test for\n * verifying if the distance is indeed a metric (other than checking for\n * non-negative values.\n */\npublic class StringGLADexample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new StringGLADexample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Clustering based Global-Local Anomaly Detection Example for strings\";\n    }\n\n    @Override\n    public String description() {\n        return \"Clustering based Global-Local Anomaly Detection Example for strings\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        long seed = new Random().nextLong();\n        System.out.println(\"seed : \" + seed);\n        Random random = new Random(seed);\n        int stringSize = 70;\n        int numberOfStrings = 200000;\n        int reservoirSize = 2000;\n        boolean changeInMiddle = true;\n        // the following should be away from 0.5 in [0.5,1]\n        double gapProbOfA = 0.85;\n\n        double anomalyRate = 0.05;\n        char[][] points = new char[numberOfStrings][];\n        boolean[] injected = new boolean[numberOfStrings];\n        boolean printClusters = true;\n        boolean printFalseNeg = false;\n        boolean printFalsePos = false;\n        int numberOfInjected = 0;\n\n        for (int i = 0; i < numberOfStrings; i++) {\n            if (random.nextDouble() < anomalyRate && i > reservoirSize / 2) {\n                injected[i] = true;\n                ++numberOfInjected;\n                points[i] = getABArray(stringSize + 10, 0.5, random, false, 0);\n            } else {\n                boolean flag = changeInMiddle && random.nextDouble() < 0.25;\n                double prob = (random.nextDouble() < 0.5) ? gapProbOfA : (1 - gapProbOfA);\n                points[i] = getABArray(stringSize, prob, random, flag, 0.25 * i / numberOfStrings);\n            }\n        }\n\n        System.out.println(\"Injected \" + numberOfInjected + \" 'anomalies' in \" + points.length);\n        int recluster = reservoirSize / 2;\n\n        BiFunction<char[], char[], Double> dist = (a, b) -> toyD(a, b, stringSize / 2.0);\n        GlobalLocalAnomalyDetector<char[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(5).timeDecay(1.0 / reservoirSize).capacity(reservoirSize).build();\n        reservoir.setGlobalDistance(dist);\n        // for non-geometric bounded distances, such as for strings, keep the factor at\n        // 3.0 or below\n        // minimum is 2.5, set as default; uncomment to change\n        // reservoir.setZfactor(DEFAULT_Z_FACTOR);\n\n        int truePos = 0;\n        int falsePos = 0;\n        int falseNeg = 0;\n        for (int y = 0; y < points.length; y++) {\n\n            GenericAnomalyDescriptor<char[]> result = reservoir.process(points[y], 1.0f, null, true);\n            if (result.getAnomalyGrade() > 0) {\n                if (!injected[y]) {\n                    ++falsePos;\n                    List<Weighted<char[]>> list = result.getRepresentativeList();\n                    if (printFalsePos) {\n                        System.out.println(result.getScore() + \" \" + injected[y] + \" at \" + y + \" dist \"\n                                + dist.apply(points[y], list.get(0).index) + \" \" + result.getThreshold());\n                        printCharArray(list.get(0).index);\n                        System.out.println();\n                        printCharArray(points[y]);\n                        System.out.println();\n                    }\n                } else {\n                    ++truePos;\n                }\n            } else if (injected[y]) {\n                ++falseNeg;\n                if (printFalseNeg) {\n                    System.out.println(\" missed \" + result.getScore() + \"  \" + result.getThreshold());\n                }\n            }\n\n            if (printClusters && y % 10000 == 0 && y > 0) {\n                System.out.println(\" at \" + y);\n                printClusters(reservoir.getClusters());\n\n            }\n\n            if (10 * y % points.length == 0 && y > 0) {\n                System.out.println(\" at \" + y);\n                System.out.println(\"Precision = \" + precision(truePos, falsePos));\n                System.out.println(\"Recall = \" + recall(truePos, falseNeg));\n            }\n        }\n        System.out.println(\" Final: \");\n        System.out.println(\"Precision = \" + precision(truePos, falsePos));\n        System.out.println(\"Recall = \" + recall(truePos, falseNeg));\n    }\n\n    public static double toyD(char[] a, char[] b, double u) {\n        if (a.length > b.length) {\n            return toyD(b, a, u);\n        }\n        double[][] dist = new double[2][b.length + 1];\n        for (int j = 0; j < b.length + 1; j++) {\n            dist[0][j] = j;\n        }\n\n        for (int i = 1; i < a.length + 1; i++) {\n            dist[1][0] = i;\n            for (int j = 1; j < b.length + 1; j++) {\n                double t = dist[0][j - 1] + ((a[i - 1] == b[j - 1]) ? 0 : 1);\n                dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);\n            }\n            for (int j = 0; j < b.length + 1; j++) {\n                dist[0][j] = dist[1][j];\n            }\n        }\n        return dist[1][b.length];\n    }\n\n    // colors\n    public static final String ANSI_RESET = \"\\u001B[0m\";\n    public static final String ANSI_RED = \"\\u001B[31m\";\n    public static final String ANSI_BLUE = \"\\u001B[34m\";\n\n    public static void printCharArray(char[] a) {\n        for (int i = 0; i < a.length; i++) {\n            if (a[i] == '-') {\n                System.out.print(ANSI_RED + a[i] + ANSI_RESET);\n            } else {\n                System.out.print(ANSI_BLUE + a[i] + ANSI_RESET);\n            }\n        }\n\n    }\n\n    public void printClusters(List<ICluster<char[]>> summary) {\n        for (int i = 0; i < summary.size(); i++) {\n            double weight = summary.get(i).getWeight();\n            System.out.println(\"Cluster \" + i + \" representatives, weight \"\n                    + ((float) Math.round(1000 * weight) * 0.001) + \" avg radius \" + summary.get(i).averageRadius());\n            List<Weighted<char[]>> representatives = summary.get(i).getRepresentatives();\n            for (int j = 0; j < representatives.size(); j++) {\n                double t = representatives.get(j).weight;\n                t = Math.round(1000.0 * t / weight) * 0.001;\n                System.out\n                        .print(\"relative weight \" + (float) t + \" length \" + representatives.get(j).index.length + \" \");\n                printCharArray(representatives.get(j).index);\n                System.out.println();\n            }\n            System.out.println();\n        }\n    }\n\n    public char[] getABArray(int size, double probabilityOfA, Random random, Boolean changeInMiddle, double fraction) {\n\n        int newSize = size + random.nextInt(size / 5);\n        char[] a = new char[newSize];\n        for (int i = 0; i < newSize; i++) {\n            double toss = (changeInMiddle && (i > (1 - fraction) * newSize || i < newSize * fraction))\n                    ? (1 - probabilityOfA)\n                    : probabilityOfA;\n            if (random.nextDouble() < toss) {\n                a[i] = '-';\n            } else {\n                a[i] = '_';\n            }\n        }\n        return a;\n    }\n\n    double precision(int truePos, int falsePos) {\n        return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;\n    }\n\n    double recall(int truePos, int falseNeg) {\n        return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/Thresholded1DGaussianMix.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class Thresholded1DGaussianMix implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new Thresholded1DGaussianMix().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_1D_Gaussian_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded one dimensional gassian mixture Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n\n        int count = 0;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.TIME_AUGMENTED)\n                .build();\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"Anomalies would correspond to a run, based on a change of state.\");\n        System.out.println(\"Each change is normal <-> anomaly;  so after the second change the data is normal\");\n        System.out.println(\"seed = \" + seed);\n        NormalMixtureTestData normalMixtureTestData = new NormalMixtureTestData(10, 1.0, 50, 2.0, 0.01, 0.1);\n        MultiDimDataWithKey dataWithKeys = normalMixtureTestData.generateTestDataWithKey(dataSize, 1, 0);\n\n        int keyCounter = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            AnomalyDescriptor result = forest.process(point, count);\n\n            if (keyCounter < dataWithKeys.changeIndices.length\n                    && result.getInternalTimeStamp() == dataWithKeys.changeIndices[keyCounter]) {\n                System.out.println(\"timestamp \" + (result.getInputTimestamp()) + \" CHANGE\");\n                ++keyCounter;\n            }\n\n            if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                System.out.println(\"timestamp \" + (count) + \" CHANGE \");\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"timestamp \" + (count) + \" RESULT value \");\n                for (int i = 0; i < baseDimensions; i++) {\n                    System.out.print(result.getCurrentInput()[i] + \", \");\n                }\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(-result.getRelativeIndex() + \" steps ago, instead of \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getPastValues()[i] + \", \");\n                        }\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    } else {\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    }\n                }\n                System.out.println();\n            }\n            ++count;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedForecast.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static java.lang.Math.min;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ThresholdedForecast implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new com.amazon.randomcutforest.examples.parkservices.ThresholdedForecast().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Forecast_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Example of Forecast using Thresholded RCF\";\n    }\n\n    @Override\n    public void run() throws Exception {\n\n        int sampleSize = 256;\n        int baseDimensions = 1;\n\n        long seed = 100L;\n\n        int length = 4 * sampleSize;\n        int outputAfter = 128;\n\n        // as the ratio of amplitude (signal) to noise is changed, the estimation range\n        // in forecast\n        // (or any other inference) should increase\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,\n                baseDimensions, true);\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n\n        // horizon/lookahead can be larger than shingleSize for transformations that do\n        // not\n        // involve differencing -- but longer horizon would have larger error\n        int horizon = 60;\n        int shingleSize = 30;\n\n        // if the useSlope is set as true then it is recommended to use NORMALIZE or\n        // SUBTRACT_MA as\n        // transformation methods to adjust to the linear drift\n\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(baseDimensions * shingleSize).precision(Precision.FLOAT_32).randomSeed(seed)\n                .internalShinglingEnabled(true).shingleSize(shingleSize).outputAfter(outputAfter)\n                .transformMethod(TransformMethod.NORMALIZE).build();\n\n        if (forest.getTransformMethod() == TransformMethod.NORMALIZE_DIFFERENCE\n                || forest.getTransformMethod() == TransformMethod.DIFFERENCE) {\n            // single step differencing will not produce stable forecasts over long horizons\n            horizon = min(horizon, shingleSize / 2 + 1);\n        }\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            // forecast first; change centrality to achieve a control over the sampling\n            // setting centrality = 0 would correspond to random sampling from the leaves\n            // reached by\n            // impute visitor\n\n            // the following prints\n            // <sequenceNo> <predicted_next_value> <likely_upper_bound> <likely_lower_bound>\n            // where the sequence number varies between next-to-be-read .. (next + horizon\n            // -1 )\n            //\n            // Every new element corresponds to a new set of horizon forecasts; we measure\n            // the\n            // errors keeping the leadtime fixed.\n            //\n            // verify that forecast is done before seeing the actual value (in the process()\n            // function)\n            //\n\n            TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n            RangeVector forecast = extrapolate.rangeVector;\n            for (int i = 0; i < horizon; i++) {\n                System.out.println(\n                        (j + i) + \" \" + forecast.values[i] + \" \" + forecast.upper[i] + \" \" + forecast.lower[i]);\n                // compute errors\n                if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {\n                    double t = dataWithKeys.data[j + i][0] - forecast.values[i];\n                    error[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.lower[i];\n                    lowerError[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.upper[i];\n                    upperError[i] += t * t;\n                }\n            }\n            System.out.println();\n            System.out.println();\n            forest.process(dataWithKeys.data[j], j);\n        }\n\n        System.out.println(forest.getTransformMethod().name() + \" RMSE (as horizon increases) \");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n}"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedImpute.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ThresholdedImpute implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ThresholdedImpute().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Imputation_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded Imputation Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n        int baseDimensions = 1;\n\n        long count = 0;\n\n        int dropped = 0;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).imputationMethod(ImputationMethod.RCF)\n                .forestMode(ForestMode.STREAMING_IMPUTE).transformMethod(TransformMethod.NORMALIZE_DIFFERENCE)\n                .autoAdjust(true).build();\n\n        long seed = new Random().nextLong();\n        Random noisePRG = new Random(0);\n\n        System.out.println(\"seed = \" + seed);\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions);\n\n        // as we loop over the data we will be dropping observations with probability\n        // 0.2\n        // note that as a result the predictor correct method would like be more\n        // error-prone\n        // note that estimation of the number of entries to be imputed is also another\n        // estimation\n        // therefore the overall method may have runaway effects if more values are\n        // dropped.\n\n        int keyCounter = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            if (noisePRG.nextDouble() < 0.2 && !((keyCounter < dataWithKeys.changeIndices.length\n                    && count == dataWithKeys.changeIndices[keyCounter]))) {\n                dropped++;\n                if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                    System.out.println(\" dropped sequence \" + (count) + \" INPUT \" + Arrays.toString(point) + \" CHANGE \"\n                            + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                }\n            } else {\n                long newStamp = 100 * count + 2 * noisePRG.nextInt(10) - 5;\n                AnomalyDescriptor result = forest.process(point, newStamp);\n\n                if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                    System.out.println(\"sequence \" + (count) + \" INPUT \" + Arrays.toString(point) + \" CHANGE \"\n                            + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                    ++keyCounter;\n                }\n\n                if (result.getAnomalyGrade() != 0) {\n                    System.out.print(\"sequence \" + (count) + \" RESULT value \");\n                    for (int i = 0; i < baseDimensions; i++) {\n                        System.out.print(result.getCurrentInput()[i] + \", \");\n                    }\n                    System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n\n                    if (result.isExpectedValuesPresent()) {\n                        if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                            System.out.print(-result.getRelativeIndex() + \" steps ago, instead of \");\n                            for (int i = 0; i < baseDimensions; i++) {\n                                System.out.print(result.getPastValues()[i] + \", \");\n                            }\n                            System.out.print(\"expected \");\n                            for (int i = 0; i < baseDimensions; i++) {\n                                System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                                if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                    System.out.print(\n                                            \"( \" + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i])\n                                                    + \" ) \");\n                                }\n                            }\n                        } else {\n                            System.out.print(\"expected \");\n                            for (int i = 0; i < baseDimensions; i++) {\n                                System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                                if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                                    System.out.print(\n                                            \"( \" + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i])\n                                                    + \" ) \");\n                                }\n                            }\n                        }\n                    }\n                    System.out.println();\n                }\n            }\n            ++count;\n        }\n        System.out.println(\"Dropped \" + dropped + \" out of \" + count);\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedInternalShinglingExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ThresholdedInternalShinglingExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ThresholdedInternalShinglingExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Multi_Dim_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded Multi Dimensional Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n\n        long count = 0;\n        int dimensions = baseDimensions * shingleSize;\n        TransformMethod transformMethod = TransformMethod.NORMALIZE_DIFFERENCE;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .weightTime(0).transformMethod(transformMethod).normalizeTime(true).outputAfter(32)\n                .initialAcceptFraction(0.125).build();\n        ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                .forestMode(ForestMode.TIME_AUGMENTED).weightTime(0).transformMethod(transformMethod)\n                .normalizeTime(true).outputAfter(32).initialAcceptFraction(0.125).build();\n\n        // ensuring that the parameters are the same; otherwise the grades/scores cannot\n        // be the same\n        // weighTime has to be 0\n        forest.setLowerThreshold(1.1);\n        second.setLowerThreshold(1.1);\n        forest.setHorizon(0.75);\n        second.setHorizon(0.75);\n\n        long seed = new Random().nextLong();\n        Random noise = new Random(0);\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions);\n\n        int keyCounter = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            // idea is that we expect the arrival order to be roughly 100 apart (say\n            // seconds)\n            // then the noise corresponds to a jitter; one can try TIME_AUGMENTED and\n            // .normalizeTime(true)\n\n            long timestamp = 100 * count + noise.nextInt(10) - 5;\n            AnomalyDescriptor result = forest.process(point, timestamp);\n            AnomalyDescriptor test = second.process(point, timestamp);\n            checkArgument(Math.abs(result.getRCFScore() - test.getRCFScore()) < 1e-10, \" error\");\n            checkArgument(Math.abs(result.getAnomalyGrade() - test.getAnomalyGrade()) < 1e-10, \" error\");\n\n            if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                System.out\n                        .println(\"timestamp \" + count + \" CHANGE \" + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"timestamp \" + count + \" RESULT value \" + result.getInternalTimeStamp() + \" \");\n                for (int i = 0; i < baseDimensions; i++) {\n                    System.out.print(result.getCurrentInput()[i] + \", \");\n                }\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n                if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                    System.out.print(-result.getRelativeIndex() + \" steps ago, \");\n                }\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(\"instead of \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getPastValues()[i] + \", \");\n                        }\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    } else {\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getCurrentInput()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getCurrentInput()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    }\n                } else {\n                    System.out.print(\"insufficient data to provide expected values\");\n                }\n                System.out.println();\n            }\n\n            ++count;\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedMultiDimensionalExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.config.CorrectionMode;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ThresholdedMultiDimensionalExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ThresholdedMultiDimensionalExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Multi_Dim_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded Multi Dimensional Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 3;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder()\n                // dimensions is shingleSize x the number of base dimensions in input (in this\n                // case 3)\n                .dimensions(dimensions)\n                // shingle size is the context (sliding) window of last contiguous observations\n                .shingleSize(shingleSize)\n                // fixed random seed would produce deterministic/reproducible results\n                .randomSeed(0)\n                // use about 50; more than 100 may not be useful\n                .numberOfTrees(numberOfTrees)\n                // samplesize should be large enough to cover the desired phenomenon; for a\n                // 5-minute\n                // interval reading if one is interested investigating anomalies over a weekly\n                // pattern\n                // there are 12 * 24 * 7 different\n                // 5-minute intervals in a week. That being said, larger samplesize is a larger\n                // model.\n                .sampleSize(sampleSize)\n                // shingling is now performed internally by default -- best not to change it\n                // .internalShinglingEnabled(true)\n                // change to different streaming transformations that are performed on the fly\n                // note the transformation affects the characteristics of the anomaly that can\n                // be\n                // detected\n                .transformMethod(TransformMethod.NORMALIZE)\n                // the following would increase precision at the cost of recall\n                // for the reverse, try ScoringStrategy.MULTI_MODE_RECALL\n                // the default strategy is an attempted goldilocks version and may not work\n                // for all data\n                // .scoringStrategy(ScoringStrategy.MULTI_MODE)\n                // the following will learn data (concept) drifts (also referered to as level\n                // shifts) automatically and\n                // stop repeated alarms. The reverse is also true -- to detect level shifts, set\n                // the following to false\n                // and test for continuous alarms\n                .autoAdjust(true)\n                // the following is a much coarser tool to eliminate repeated alarms\n                // the descriptor below 'result' will contain information about different\n                // correction/suppression modes\n                // .alertOnce(true)\n                .build();\n\n        long seed = new Random().nextLong();\n        System.out.println(\"seed = \" + seed);\n\n        // basic amplitude of the waves -- the parameter will be randomly scaled up\n        // between 0-20 percent\n        double amplitude = 100.0;\n\n        // the amplitude of random noise it will be +ve/-ve uniformly at random\n        double noise = 5.0;\n\n        // the following controls the ratio of anomaly magnitude to noise\n        // notice amplitude/noise would determine signal-to-noise ratio\n        double anomalyFactor = 5;\n\n        // the following determines if a random linear trend should be added\n        boolean useSlope = false;\n\n        // provide explanations and alternatives considered for non-anomalies\n        boolean verboseSupression = true;\n\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 24,\n                amplitude, noise, seed, baseDimensions, anomalyFactor, useSlope);\n        int keyCounter = 0;\n        int count = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            AnomalyDescriptor result = forest.process(point, 0L);\n\n            if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                System.out.println(\n                        \"timestamp \" + (count) + \" CHANGE \" + Arrays.toString(dataWithKeys.changes[keyCounter]));\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"timestamp \" + (count) + \" RESULT value \");\n                for (int i = 0; i < baseDimensions; i++) {\n                    System.out.print(point[i] + \", \");\n                }\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(-result.getRelativeIndex() + \" steps ago, instead of \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getPastValues()[i] + \", \");\n                        }\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (result.getPastValues()[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( \"\n                                        + (result.getPastValues()[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    } else {\n                        System.out.print(\"expected \");\n                        for (int i = 0; i < baseDimensions; i++) {\n                            System.out.print(result.getExpectedValuesList()[0][i] + \", \");\n                            if (point[i] != result.getExpectedValuesList()[0][i]) {\n                                System.out.print(\"( inferred change = \"\n                                        + (point[i] - result.getExpectedValuesList()[0][i]) + \" ) \");\n                            }\n                        }\n                    }\n                }\n                System.out.println();\n            } else if (verboseSupression && result.getCorrectionMode() != CorrectionMode.NONE) {\n                System.out.println(count + \" corrected via \" + result.getCorrectionMode().name());\n            }\n            ++count;\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedPredictive.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class ThresholdedPredictive implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new com.amazon.randomcutforest.examples.parkservices.ThresholdedPredictive().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Predictive_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Example of predictive forecast across multiple time series using ThresholdedRCF\";\n    }\n\n    @Override\n    public void run() throws Exception {\n\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int length = 4 * sampleSize;\n        int outputAfter = 128;\n\n        long seed = 2022L;\n        Random random = new Random(seed);\n        int numberOfModels = 10;\n        MultiDimDataWithKey[] dataWithKeys = new MultiDimDataWithKey[numberOfModels];\n        ThresholdedRandomCutForest[] forests = new ThresholdedRandomCutForest[numberOfModels];\n        int[] period = new int[numberOfModels];\n\n        double alertThreshold = 300;\n        double lastActualSum = 0;\n\n        int anomalies = 0;\n        for (int k = 0; k < numberOfModels; k++) {\n            period[k] = (int) Math.round(40 + 30 * random.nextDouble());\n            dataWithKeys[k] = ShingledMultiDimDataWithKeys.getMultiDimData(length, period[k], 100, 10, seed,\n                    baseDimensions, false);\n            anomalies += dataWithKeys[k].changes.length;\n        }\n\n        System.out.println(anomalies + \" anomalies injected \");\n\n        int shingleSize = 10;\n        int horizon = 20;\n        for (int k = 0; k < numberOfModels; k++) {\n            forests[k] = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(baseDimensions * shingleSize).precision(Precision.FLOAT_32).randomSeed(seed + k)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).outputAfter(outputAfter)\n                    .transformMethod(TransformMethod.NORMALIZE).build();\n\n        }\n\n        boolean predictNextCrossing = true;\n        boolean actualCrossingAlerted = false;\n\n        boolean printPredictions = false;\n        boolean printEvents = true;\n\n        for (int i = 0; i < length; i++) {\n            double[] prediction = new double[horizon];\n\n            // any prediction needs suffient data\n            // it's best to suggest 0 till such\n            if (i > sampleSize) {\n                for (int k = 0; k < numberOfModels; k++) {\n                    RangeVector forecast = forests[k].extrapolate(horizon).rangeVector;\n                    for (int t = 0; t < horizon; t++) {\n                        prediction[t] += forecast.values[t];\n                    }\n                }\n                if (prediction[horizon - 1] > alertThreshold && predictNextCrossing) {\n                    if (printEvents) {\n                        System.out.println(\"Currently at \" + i + \", should cross \" + alertThreshold + \" at sequence \"\n                                + (i + horizon - 1));\n                    }\n                    predictNextCrossing = false;\n                } else if (prediction[horizon - 1] < alertThreshold && !predictNextCrossing) {\n                    predictNextCrossing = true;\n                }\n                if (printPredictions) {\n                    for (int t = 0; t < horizon; t++) {\n                        System.out.println((i + t) + \" \" + prediction[t]);\n                    }\n                    System.out.println();\n                    System.out.println();\n                }\n            }\n\n            // now look at actuals\n            double sumValue = 0;\n            for (int k = 0; k < numberOfModels; k++) {\n                sumValue += dataWithKeys[k].data[i][0];\n            }\n            if (lastActualSum > alertThreshold && sumValue > alertThreshold) {\n                if (!actualCrossingAlerted) {\n                    if (printEvents) {\n                        System.out.println(\" Crossing \" + alertThreshold + \" at consecutive sequence indices \" + (i - 1)\n                                + \" \" + i);\n                    }\n                    actualCrossingAlerted = true;\n                }\n            } else if (sumValue < alertThreshold) {\n                actualCrossingAlerted = false;\n            }\n            lastActualSum = sumValue;\n\n            // update model\n            for (int k = 0; k < numberOfModels; k++) {\n                forests[k].process(dataWithKeys[k].data[i], 0L);\n            }\n        }\n\n    }\n\n}"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedRCFJsonExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;\nimport com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestState;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\n/**\n * Serialize a Random Cut Forest to JSON using\n * <a href=\"https://github.com/FasterXML/jackson\">Jackson</a>.\n */\npublic class ThresholdedRCFJsonExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ThresholdedRCFJsonExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"json\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Thresholded Random Cut Forest as a JSON string\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int baseDimension = 2;\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\"seed :\" + seed);\n        Random rng = new Random(seed);\n\n        int dimensions = baseDimension * shingleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions)\n                .shingleSize(shingleSize).transformMethod(TransformMethod.NORMALIZE).numberOfTrees(numberOfTrees)\n                .sampleSize(sampleSize).build();\n\n        int dataSize = 4 * sampleSize;\n        int testSize = sampleSize;\n        double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50, 100, 5,\n                rng.nextLong(), baseDimension, 5.0, false).data;\n\n        for (int i = 0; i < data.length - testSize; i++) {\n            forest.process(data[i], 0L);\n        }\n\n        // Convert to JSON and print the number of bytes\n\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ObjectMapper jsonMapper = new ObjectMapper();\n\n        String json = jsonMapper.writeValueAsString(mapper.toState(forest));\n\n        System.out.printf(\"JSON size = %d bytes%n\", json.getBytes().length);\n\n        // Restore from JSON and compare anomaly scores produced by the two forests\n\n        ThresholdedRandomCutForest forest2 = mapper\n                .toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));\n\n        for (int i = data.length; i < data.length; i++) {\n            AnomalyDescriptor result = forest.process(data[i], 0L);\n            AnomalyDescriptor shadow = forest2.process(data[i], 0L);\n            assert (Math.abs(result.getRCFScore() - shadow.getRCFScore()) < 1e-6);\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/parkservices/ThresholdedTime.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.parkservices;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class ThresholdedTime implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ThresholdedTime().run();\n    }\n\n    @Override\n    public String command() {\n        return \"Thresholded_Time_example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Thresholded Time Example\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n\n        int count = 0;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                .forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).build();\n\n        long seed = new Random().nextLong();\n\n        double[] data = new double[] { 1.0 };\n\n        System.out.println(\"seed = \" + seed);\n        NormalMixtureTestData normalMixtureTestData = new NormalMixtureTestData(10, 50);\n        MultiDimDataWithKey dataWithKeys = normalMixtureTestData.generateTestDataWithKey(dataSize, 1, 0);\n\n        /**\n         * the anomalies will move from normal -> anomalous -> normal starts from normal\n         */\n        boolean anomalyState = false;\n\n        int keyCounter = 0;\n        for (double[] point : dataWithKeys.data) {\n\n            long time = (long) (1000L * count + Math.floor(10 * point[0]));\n            AnomalyDescriptor result = forest.process(data, time);\n\n            if (keyCounter < dataWithKeys.changeIndices.length && count == dataWithKeys.changeIndices[keyCounter]) {\n                System.out.print(\"Sequence \" + count + \" stamp \" + (result.getInternalTimeStamp()) + \" CHANGE \");\n                if (!anomalyState) {\n                    System.out.println(\" to Distribution 1 \");\n                } else {\n                    System.out.println(\" to Distribution 0 \");\n                }\n                anomalyState = !anomalyState;\n                ++keyCounter;\n            }\n\n            if (result.getAnomalyGrade() != 0) {\n                System.out.print(\"Sequence \" + count + \" stamp \" + (result.getInternalTimeStamp()) + \" RESULT \");\n                System.out.print(\"score \" + result.getRCFScore() + \", grade \" + result.getAnomalyGrade() + \", \");\n\n                if (result.isExpectedValuesPresent()) {\n                    if (result.getRelativeIndex() != 0 && result.isStartOfAnomaly()) {\n                        System.out.print(-result.getRelativeIndex() + \" steps ago, instead of stamp \"\n                                + result.getPastTimeStamp());\n                        System.out.print(\", expected timestamp \" + result.getExpectedTimeStamp() + \" ( \"\n                                + (result.getPastTimeStamp() - result.getExpectedTimeStamp() + \")\"));\n                    } else {\n                        System.out.print(\"expected \" + result.getExpectedTimeStamp() + \" ( \"\n                                + (result.getInternalTimeStamp() - result.getExpectedTimeStamp() + \")\"));\n                    }\n                }\n                System.out.println();\n            }\n            ++count;\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/JsonExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.serialization;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\n/**\n * Serialize a Random Cut Forest to JSON using\n * <a href=\"https://github.com/FasterXML/jackson\">Jackson</a>.\n */\npublic class JsonExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new JsonExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"json\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest as a JSON string\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_64;\n\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n\n        int dataSize = 4 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            forest.update(point);\n        }\n\n        // Convert to JSON and print the number of bytes\n\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        ObjectMapper jsonMapper = new ObjectMapper();\n\n        String json = jsonMapper.writeValueAsString(mapper.toState(forest));\n\n        System.out.printf(\"dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n\", dimensions,\n                numberOfTrees, sampleSize, precision);\n        System.out.printf(\"JSON size = %d bytes%n\", json.getBytes().length);\n\n        // Restore from JSON and compare anomaly scores produced by the two forests\n\n        RandomCutForest forest2 = mapper.toModel(jsonMapper.readValue(json, RandomCutForestState.class));\n\n        int testSize = 100;\n        double delta = Math.log(sampleSize) / Math.log(2) * 0.05;\n\n        int differences = 0;\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions)) {\n            double score = forest.getAnomalyScore(point);\n            double score2 = forest2.getAnomalyScore(point);\n\n            // we mostly care that points that are scored as an anomaly by one forest are\n            // also scored as an anomaly by the other forest\n            if (score > 1 || score2 > 1) {\n                anomalies++;\n                if (Math.abs(score - score2) > delta) {\n                    differences++;\n                }\n            }\n\n            forest.update(point);\n            forest2.update(point);\n        }\n\n        // first validate that this was a nontrivial test\n        if (anomalies == 0) {\n            throw new IllegalStateException(\"test data did not produce any anomalies\");\n        }\n\n        // validate that the two forests agree on anomaly scores\n        if (differences >= 0.01 * testSize) {\n            throw new IllegalStateException(\"restored forest does not agree with original forest\");\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ObjectStreamExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.serialization;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\npublic class ObjectStreamExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new ObjectStreamExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"object_stream\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest with object stream\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 10;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n\n        int dataSize = 1000 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            forest.update(point);\n        }\n\n        // Convert to an array of bytes and print the size\n\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        System.out.printf(\"dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n\", dimensions,\n                numberOfTrees, sampleSize, precision);\n        byte[] bytes = serialize(mapper.toState(forest));\n        System.out.printf(\"Object output stream size = %d bytes%n\", bytes.length);\n\n        // Restore from object stream and compare anomaly scores produced by the two\n        // forests\n\n        RandomCutForestState state2 = (RandomCutForestState) deserialize(bytes);\n        RandomCutForest forest2 = mapper.toModel(state2);\n\n        int testSize = 100;\n        double delta = Math.log(sampleSize) / Math.log(2) * 0.05;\n\n        int differences = 0;\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions)) {\n            double score = forest.getAnomalyScore(point);\n            double score2 = forest2.getAnomalyScore(point);\n\n            // we mostly care that points that are scored as an anomaly by one forest are\n            // also scored as an anomaly by the other forest\n            if (score > 1 || score2 > 1) {\n                anomalies++;\n                if (Math.abs(score - score2) > delta) {\n                    differences++;\n                }\n            }\n\n            forest.update(point);\n            forest2.update(point);\n        }\n\n        // first validate that this was a nontrivial test\n        if (anomalies == 0) {\n            throw new IllegalStateException(\"test data did not produce any anomalies\");\n        }\n\n        // validate that the two forests agree on anomaly scores\n        if (differences >= 0.01 * testSize) {\n            throw new IllegalStateException(\"restored forest does not agree with original forest\");\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n\n    private byte[] serialize(Object model) {\n        try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();\n                ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)) {\n            objectOutputStream.writeObject(model);\n            objectOutputStream.flush();\n            return byteArrayOutputStream.toByteArray();\n        } catch (IOException e) {\n            throw new RuntimeException(\"Failed to serialize model.\", e.getCause());\n        }\n    }\n\n    private Object deserialize(byte[] modelBin) {\n        try (ObjectInputStream objectInputStream = new ObjectInputStream(new ByteArrayInputStream(modelBin))) {\n            return objectInputStream.readObject();\n        } catch (IOException | ClassNotFoundException e) {\n            throw new RuntimeException(\"Failed to deserialize model.\", e.getCause());\n        }\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.serialization;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\nimport io.protostuff.LinkedBuffer;\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\n/**\n * Serialize a Random Cut Forest using the\n * <a href=\"https://github.com/protostuff/protostuff\">protostuff</a> library.\n */\npublic class ProtostuffExample implements Example {\n    public static void main(String[] args) throws Exception {\n        new ProtostuffExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"protostuff\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest with the protostuff library\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 10;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n\n        int dataSize = 1000 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            forest.update(point);\n        }\n\n        // Convert to an array of bytes and print the size\n\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n\n        Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n        LinkedBuffer buffer = LinkedBuffer.allocate(512);\n        byte[] bytes;\n        try {\n            RandomCutForestState state = mapper.toState(forest);\n            bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);\n        } finally {\n            buffer.clear();\n        }\n\n        System.out.printf(\"dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n\", dimensions,\n                numberOfTrees, sampleSize, precision);\n        System.out.printf(\"protostuff size = %d bytes%n\", bytes.length);\n\n        // Restore from protostuff and compare anomaly scores produced by the two\n        // forests\n\n        RandomCutForestState state2 = schema.newMessage();\n        ProtostuffIOUtil.mergeFrom(bytes, state2, schema);\n        RandomCutForest forest2 = mapper.toModel(state2);\n\n        int testSize = 100;\n        double delta = Math.log(sampleSize) / Math.log(2) * 0.05;\n\n        int differences = 0;\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions)) {\n            double score = forest.getAnomalyScore(point);\n            double score2 = forest2.getAnomalyScore(point);\n\n            // we mostly care that points that are scored as an anomaly by one forest are\n            // also scored as an anomaly by the other forest\n            if (score > 1 || score2 > 1) {\n                anomalies++;\n                if (Math.abs(score - score2) > delta) {\n                    differences++;\n                }\n            }\n\n            forest.update(point);\n            forest2.update(point);\n        }\n\n        // first validate that this was a nontrivial test\n        if (anomalies == 0) {\n            throw new IllegalStateException(\"test data did not produce any anomalies\");\n        }\n\n        // validate that the two forests agree on anomaly scores\n        if (differences >= 0.01 * testSize) {\n            throw new IllegalStateException(\"restored forest does not agree with original forest\");\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExampleWithDynamicLambda.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.serialization;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\nimport io.protostuff.LinkedBuffer;\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\n/**\n * Serialize a Random Cut Forest using the\n * <a href=\"https://github.com/protostuff/protostuff\">protostuff</a> library.\n */\npublic class ProtostuffExampleWithDynamicLambda implements Example {\n    public static void main(String[] args) throws Exception {\n        new ProtostuffExampleWithDynamicLambda().run();\n    }\n\n    @Override\n    public String command() {\n        return \"protostuff_dynamic\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest with the protostuff library\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_64;\n\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).build();\n\n        int dataSize = 4 * sampleSize;\n        NormalMixtureTestData testData = new NormalMixtureTestData();\n        for (double[] point : testData.generateTestData(dataSize, dimensions)) {\n            forest.update(point);\n        }\n\n        // Convert to an array of bytes and print the size\n\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n\n        Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n        LinkedBuffer buffer = LinkedBuffer.allocate(512);\n        byte[] bytes;\n        try {\n            RandomCutForestState state = mapper.toState(forest);\n            bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);\n        } finally {\n            buffer.clear();\n        }\n\n        System.out.printf(\"dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n\", dimensions,\n                numberOfTrees, sampleSize, precision);\n        System.out.printf(\"protostuff size = %d bytes%n\", bytes.length);\n\n        // Restore from protostuff and compare anomaly scores produced by the two\n        // forests\n\n        RandomCutForestState state2 = schema.newMessage();\n        ProtostuffIOUtil.mergeFrom(bytes, state2, schema);\n        RandomCutForest forest2 = mapper.toModel(state2);\n\n        double saveLambda = forest.getTimeDecay();\n        forest.setTimeDecay(10 * forest.getTimeDecay());\n        forest2.setTimeDecay(10 * forest2.getTimeDecay());\n\n        for (int i = 0; i < numberOfTrees; i++) {\n            CompactSampler sampler = (CompactSampler) ((SamplerPlusTree) forest.getComponents().get(i)).getSampler();\n            CompactSampler sampler2 = (CompactSampler) ((SamplerPlusTree) forest2.getComponents().get(i)).getSampler();\n\n            if (sampler.getMaxSequenceIndex() != sampler2.getMaxSequenceIndex()) {\n                throw new IllegalStateException(\"Incorrect sampler state\");\n            }\n            if (sampler.getMostRecentTimeDecayUpdate() != sampler2.getMostRecentTimeDecayUpdate()) {\n                throw new IllegalStateException(\"Incorrect sampler state\");\n            }\n            if (sampler2.getMostRecentTimeDecayUpdate() != dataSize - 1) {\n                throw new IllegalStateException(\"Incorrect sampler state\");\n            }\n        }\n\n        int testSize = 100;\n        double delta = Math.log(sampleSize) / Math.log(2) * 0.05;\n\n        int differences = 0;\n        int anomalies = 0;\n\n        for (double[] point : testData.generateTestData(testSize, dimensions)) {\n            double score = forest.getAnomalyScore(point);\n            double score2 = forest2.getAnomalyScore(point);\n\n            // we mostly care that points that are scored as an anomaly by one forest are\n            // also scored as an anomaly by the other forest\n            if (score > 1 || score2 > 1) {\n                anomalies++;\n                if (Math.abs(score - score2) > delta) {\n                    differences++;\n                }\n            }\n\n            forest.update(point);\n            forest2.update(point);\n        }\n\n        // first validate that this was a nontrivial test\n        if (anomalies == 0) {\n            throw new IllegalStateException(\"test data did not produce any anomalies\");\n        }\n\n        // validate that the two forests agree on anomaly scores\n        if (differences >= 0.01 * testSize) {\n            throw new IllegalStateException(\"restored forest does not agree with original forest\");\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/serialization/ProtostuffExampleWithShingles.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.serialization;\n\nimport static java.lang.Math.PI;\n\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\n\nimport io.protostuff.LinkedBuffer;\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\n/**\n * Serialize a Random Cut Forest using the\n * <a href=\"https://github.com/protostuff/protostuff\">protostuff</a> library.\n */\npublic class ProtostuffExampleWithShingles implements Example {\n    public static void main(String[] args) throws Exception {\n        new ProtostuffExampleWithShingles().run();\n    }\n\n    @Override\n    public String command() {\n        return \"protostuffWithShingles\";\n    }\n\n    @Override\n    public String description() {\n        return \"serialize a Random Cut Forest with the protostuff library for shingled points\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        // Create and populate a random cut forest\n\n        int dimensions = 10;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_64;\n        RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).sampleSize(sampleSize).precision(precision).shingleSize(dimensions)\n                .build();\n        int count = 1;\n        int dataSize = 1000 * sampleSize;\n        for (double[] point : generateShingledData(dataSize, dimensions, 0)) {\n            forest.update(point);\n        }\n\n        // Convert to an array of bytes and print the size\n\n        RandomCutForestMapper mapper = new RandomCutForestMapper();\n        mapper.setSaveExecutorContextEnabled(true);\n        mapper.setSaveTreeStateEnabled(false);\n\n        Schema<RandomCutForestState> schema = RuntimeSchema.getSchema(RandomCutForestState.class);\n        LinkedBuffer buffer = LinkedBuffer.allocate(512);\n        byte[] bytes;\n        try {\n            RandomCutForestState state = mapper.toState(forest);\n            bytes = ProtostuffIOUtil.toByteArray(state, schema, buffer);\n        } finally {\n            buffer.clear();\n        }\n\n        System.out.printf(\"dimensions = %d, numberOfTrees = %d, sampleSize = %d, precision = %s%n\", dimensions,\n                numberOfTrees, sampleSize, precision);\n        System.out.printf(\"protostuff size = %d bytes%n\", bytes.length);\n\n        // Restore from protostuff and compare anomaly scores produced by the two\n        // forests\n\n        RandomCutForestState state2 = schema.newMessage();\n        ProtostuffIOUtil.mergeFrom(bytes, state2, schema);\n        RandomCutForest forest2 = mapper.toModel(state2);\n\n        int testSize = 10000;\n        double delta = Math.log(sampleSize) / Math.log(2) * 0.05;\n\n        int differences = 0;\n        int anomalies = 0;\n\n        for (double[] point : generateShingledData(testSize, dimensions, 2)) {\n            double score = forest.getAnomalyScore(point);\n            double score2 = forest2.getAnomalyScore(point);\n\n            // we mostly care that points that are scored as an anomaly by one forest are\n            // also scored as an anomaly by the other forest\n            if (score > 1 || score2 > 1) {\n                anomalies++;\n                if (Math.abs(score - score2) > delta) {\n                    differences++;\n                }\n            }\n\n            forest.update(point);\n            forest2.update(point);\n        }\n\n        // validate that the two forests agree on anomaly scores\n        if (differences >= 0.01 * testSize) {\n            throw new IllegalStateException(\"restored forest does not agree with original forest\");\n        }\n\n        System.out.println(\"Looks good!\");\n    }\n\n    private double[][] generateShingledData(int size, int dimensions, long seed) {\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[dimensions];\n        int count = 0;\n        double[] data = getDataD(size + dimensions - 1, 100, 5, seed);\n        for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % dimensions;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                // System.out.println(\"Adding \" + j);\n                answer[count++] = getShinglePoint(history, entryIndex, dimensions);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {\n        double[] shingledPoint = new double[shingleLength];\n        int i = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            shingledPoint[i++] = point;\n\n        }\n        return shingledPoint;\n    }\n\n    double[] getDataD(int num, double amplitude, double noise, long seed) {\n\n        double[] data = new double[num];\n        Random noiseprg = new Random(seed);\n        for (int i = 0; i < num; i++) {\n            data[i] = amplitude * Math.cos(2 * PI * (i + 50) / 1000) + noise * noiseprg.nextDouble();\n        }\n\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/DynamicSummarization.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.summarization;\n\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;\nimport static java.lang.Math.PI;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * Summarized representation of the stored points provide a convenient view into\n * the \"current state\" of the stream seen/sampled by an RCF. However since RCFs\n * provide a generic sketch for multple different scenrios\n * https://opensearch.org/blog/odfe-updates/2019/11/random-cut-forests/ the\n * summarization can be used repeatedly to provide a dynamic clustering a\n * numeric data stream as shown in the example below.\n *\n * The summarization is based on a well-scattered multi-centroid representation\n * as in CURE https://en.wikipedia.org/wiki/CURE_algorithm and distance based\n * clustering as in https://en.wikipedia.org/wiki/Data_stream_clustering\n *\n * The example corresponds to a wheel like arrangement -- where numberOfBlades\n * determine the number of spokes. For many settings of the parameter the spokes\n * are closer to each other near the center than the extremity at the rim. Thus\n * a centroidal representation cannot conceptually capture each spoke as a\n * cluster, and multi-centroid approach is necessary. Note that the input to the\n * summarization is not the same as the numberOfBladed; the maxAllowed number\n * corresponds to the maximum number of clusters which can be much larger. In a\n * clustering application, the number of clusters are typically not known\n * apriori.\n *\n * The pointset is generated once and are input to RCF with rotations. As the\n * \"blades are running\", the output clusters can be colored and we can visualize\n * the clusters produced. For the parameters below, simplistic plotting\n * functions such as gnuplot using do for [i = 0:359] { plot [-15:15][-15:15]\n * \"sum\" index i u 1:2:3:4 w circles fill solid noborder fc palette z t \"\" }\n * would show the rotating clusters where the representatives corresponding to\n * the same cluster has the same color. We note that the visualizations is\n * neither polished nor complete, since the goal is to highlight the\n * functionality of summarization in RCFs.\n */\npublic class DynamicSummarization implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new DynamicSummarization().run();\n    }\n\n    @Override\n    public String command() {\n        return \"dynamic_summarization\";\n    }\n\n    @Override\n    public String description() {\n        return \"shows a potential use of dynamic clustering/summarization\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        int newDimensions = 2;\n        long randomSeed = 123;\n        int dataSize = 1350;\n        int numberOfBlades = 9;\n\n        RandomCutForest newForest = RandomCutForest.builder().numberOfTrees(100).sampleSize(256)\n                .dimensions(newDimensions).randomSeed(randomSeed).timeDecay(1.0 / 800).centerOfMassEnabled(true)\n                .build();\n        String name = \"dynamic_summarization_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n        double[][] data = getData(dataSize, 0, numberOfBlades);\n\n        boolean printData = false;\n        boolean printClusters = true;\n\n        List<ICluster<float[]>> oldSummary = null;\n        int[] oldColors = null;\n\n        int count = 0;\n        int sum = 0;\n        for (int degree = 0; degree < 360; degree += 1) {\n            for (double[] datum : data) {\n                double[] vec = rotateClockWise(datum, -2 * PI * degree / 360);\n                if (printData) {\n                    file.append(vec[0] + \" \" + vec[1] + \"\\n\");\n                }\n                newForest.update(vec);\n            }\n            if (printData) {\n                file.append(\"\\n\");\n                file.append(\"\\n\");\n            }\n\n            List<ICluster<float[]>> summary = newForest.summarize(2 * numberOfBlades + 2, 0.05, 5, 0.8,\n                    Summarizer::L2distance, oldSummary);\n            sum += summary.size();\n            System.out.println(degree + \" \" + summary.size());\n            if (summary.size() == numberOfBlades) {\n                ++count;\n            }\n            int[] colors = align(summary, oldSummary, oldColors);\n\n            for (int i = 0; i < summary.size(); i++) {\n                double weight = summary.get(i).getWeight();\n                for (Weighted<float[]> representative : summary.get(i).getRepresentatives()) {\n                    double t = representative.weight / weight;\n                    if (t > 0.05 && printClusters) {\n                        file.append(representative.index[0] + \" \" + representative.index[1] + \" \" + t + \" \" + colors[i]\n                                + \"\\n\");\n                    }\n                }\n            }\n            if (summary.size() == numberOfBlades) {\n                oldSummary = summary;\n                oldColors = colors;\n            }\n            if (printClusters) {\n                file.append(\"\\n\");\n                file.append(\"\\n\");\n            }\n\n        }\n        System.out.println(\"Exact detection :\" + ((float) Math.round(count / 3.6) * 0.01)\n                + \" fraction, average number of clusters \" + ((float) Math.round(sum / 3.6) * 0.01));\n        file.close();\n    }\n\n    public double[][] getData(int dataSize, int seed, int fans) {\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);\n        int newDimensions = 2;\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n\n        for (int i = 0; i < dataSize; i++) {\n            int nextFan = prg.nextInt(fans);\n            // scale, make an ellipse\n            data[i][1] *= 1.0 / fans;\n            data[i][0] *= 2.0;\n            // shift\n            data[i][0] += 5.0 + fans / 2;\n            data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);\n        }\n\n        return data;\n    }\n\n    int[] align(List<ICluster<float[]>> current, List<ICluster<float[]>> previous, int[] oldColors) {\n        int[] nearest = new int[current.size()];\n\n        if (previous == null || previous.size() == 0) {\n            for (int i = 0; i < current.size(); i++) {\n                nearest[i] = i;\n            }\n        } else {\n            Arrays.fill(nearest, previous.size() + 1);\n            for (int i = 0; i < current.size(); i++) {\n                double dist = previous.get(0).distance(current.get(i), Summarizer::L1distance);\n                nearest[i] = oldColors[0];\n                for (int j = 1; j < previous.size(); j++) {\n                    double t = previous.get(j).distance(current.get(i), Summarizer::L1distance);\n                    if (t < dist) {\n                        dist = t;\n                        nearest[i] = oldColors[j];\n                    }\n                }\n            }\n        }\n        return nearest;\n    }\n}\n"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFMultiSummarizeExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static java.lang.Math.abs;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * centroidal clustering fails in many scenarios; primarily because a single\n * point in combination with a distance metric can only represent a sphere. A\n * reasonable solution is to use multiple well scattered centroids to represent\n * a cluster and has been long in use, see CURE\n * https://en.wikipedia.org/wiki/CURE_algorithm\n *\n * The following example demonstrates the use of a multicentroid clustering; the\n * data corresponds to 2*d clusters in d dimensions (d chosen randomly) such\n * that the clusters almost touch, but remain separable. Note that the knowledge\n * of the true number of clusters is not required -- the clustering is invoked\n * with a maximum of 5*d potential clusters, and yet the example often finds the\n * true 2*d clusters.\n */\npublic class RCFMultiSummarizeExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new com.amazon.randomcutforest.examples.summarization.RCFMultiSummarizeExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"RCF_Multi_Summarize_Example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Example of using RCF Multi Summarization\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        int dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n\n        double epsilon = 0.01;\n        List<ICluster<float[]>> summary = Summarizer.multiSummarize(points, 5 * newDimensions, 0.1, true, 5,\n                random.nextLong());\n        System.out.println(summary.size() + \" clusters for \" + newDimensions + \" dimensions, seed : \" + seed);\n        double weight = summary.stream().map(e -> e.getWeight()).reduce(Double::sum).get();\n        System.out.println(\n                \"Total weight \" + ((float) Math.round(weight * 1000) * 0.001) + \" rounding to multiples of \" + epsilon);\n        System.out.println();\n\n        for (int i = 0; i < summary.size(); i++) {\n            double clusterWeight = summary.get(i).getWeight();\n            System.out.println(\n                    \"Cluster \" + i + \" representatives, weight \" + ((float) Math.round(1000 * clusterWeight) * 0.001));\n            List<Weighted<float[]>> representatives = summary.get(i).getRepresentatives();\n            for (int j = 0; j < representatives.size(); j++) {\n                double t = representatives.get(j).weight;\n                t = Math.round(1000.0 * t / clusterWeight) * 0.001;\n                System.out.print(\"relative weight \" + (float) t + \" center (approx)  \");\n                printArray(representatives.get(j).index, epsilon);\n                System.out.println();\n            }\n            System.out.println();\n        }\n\n    }\n\n    void printArray(float[] values, double epsilon) {\n        System.out.print(\" [\");\n        if (abs(values[0]) < epsilon) {\n            System.out.print(\"0\");\n        } else {\n            if (epsilon <= 0) {\n                System.out.print(values[0]);\n            } else {\n                long t = (int) Math.round(values[0] / epsilon);\n                System.out.print(t * epsilon);\n            }\n        }\n        for (int i = 1; i < values.length; i++) {\n            if (abs(values[i]) < epsilon) {\n                System.out.print(\", 0\");\n            } else {\n                if (epsilon <= 0) {\n                    System.out.print(\", \" + values[i]);\n                } else {\n                    long t = Math.round(values[i] / epsilon);\n                    System.out.print(\", \" + t * epsilon);\n                }\n            }\n        }\n        System.out.print(\"]\");\n    }\n\n    public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {\n        double baseMu = 0.0;\n        double baseSigma = 1.0;\n        double anomalyMu = 0.0;\n        double anomalySigma = 1.0;\n        double transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        double transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n        float[][] floatData = new float[dataSize][];\n\n        float[] allZero = new float[newDimensions];\n        float[] sigma = new float[newDimensions];\n        Arrays.fill(sigma, 1f);\n        double scale = distance.apply(allZero, sigma);\n\n        for (int i = 0; i < dataSize; i++) {\n            // shrink, shift at random\n            int nextD = prg.nextInt(newDimensions);\n            for (int j = 0; j < newDimensions; j++) {\n                data[i][j] *= 1.0 / (3.0);\n                // standard deviation adds up across dimension; taking square root\n                // and using s 3 sigma ball\n                if (j == nextD) {\n                    if (prg.nextDouble() < 0.5)\n                        data[i][j] += 2.0 * scale;\n                    else\n                        data[i][j] -= 2.0 * scale;\n                }\n            }\n            floatData[i] = toFloatArray(data[i]);\n        }\n\n        return floatData;\n    }\n\n}"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFStringSummarizeExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.summarization;\n\nimport static java.lang.Math.min;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * the following example showcases the use of RCF multi-summarization on generic\n * types R, when provided with a distance function from (R,R) into double. In\n * this example R correpsonds to Strings and the distance is EditDistance The\n * srings are genrated from two clusters one where character A (or '-' for viz)\n * occurs with probability 2/3 and anothewr where it occurs with probability 1/3\n * (and the character B or '_' occurs with probability 2/3)\n *\n * Clearly, and the following example makes it visual, multicentroid approach is\n * necessary.\n *\n * All the strings do not have the same length. Note that the summarization is\n * asked with a maximum of 10 clusters but the algorithm self-adjusts to 2\n * clusters.\n */\npublic class RCFStringSummarizeExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new com.amazon.randomcutforest.examples.summarization.RCFStringSummarizeExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"RCF_String_Summarize_Example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Example of using RCF String Summarization, uses multi-centroid approach\";\n    }\n\n    @Override\n    public void run() throws Exception {\n\n        long seed = -8436172895711381300L;\n        new Random().nextLong();\n        System.out.println(\"String summarization seed : \" + seed);\n        Random random = new Random(seed);\n        int size = 100;\n        int numberOfStrings = 20000;\n\n        String[] points = new String[numberOfStrings];\n        for (int i = 0; i < numberOfStrings; i++) {\n            if (random.nextDouble() < 0.5) {\n                points[i] = getABString(size, 0.8, random);\n            } else {\n                points[i] = getABString(size, 0.2, random);\n            }\n        }\n\n        int nextSeed = random.nextInt();\n        List<ICluster<String>> summary = Summarizer.multiSummarize(points, 5, 10, 1, false, 0.8,\n                RCFStringSummarizeExample::toyDistance, nextSeed, true, 0.1, 5);\n        System.out.println();\n        for (int i = 0; i < summary.size(); i++) {\n            double weight = summary.get(i).getWeight();\n            System.out.println(\n                    \"Cluster \" + i + \" representatives, weight \" + ((float) Math.round(1000 * weight) * 0.001));\n            List<Weighted<String>> representatives = summary.get(i).getRepresentatives();\n            for (int j = 0; j < representatives.size(); j++) {\n                double t = representatives.get(j).weight;\n                t = Math.round(1000.0 * t / weight) * 0.001;\n                System.out.print(\n                        \"relative weight \" + (float) t + \" length \" + representatives.get(j).index.length() + \" \");\n                printString(representatives.get(j).index);\n                System.out.println();\n            }\n            System.out.println();\n        }\n\n    }\n\n    public static double toyDistance(String a, String b) {\n        if (a.length() > b.length()) {\n            return toyDistance(b, a);\n        }\n        double[][] dist = new double[2][b.length() + 1];\n        for (int j = 0; j < b.length() + 1; j++) {\n            dist[0][j] = j;\n        }\n\n        for (int i = 1; i < a.length() + 1; i++) {\n            dist[1][0] = i;\n            for (int j = 1; j < b.length() + 1; j++) {\n                double t = dist[0][j - 1] + ((a.charAt(i - 1) == b.charAt(j - 1)) ? 0 : 1);\n                dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);\n            }\n            for (int j = 0; j < b.length() + 1; j++) {\n                dist[0][j] = dist[1][j];\n            }\n        }\n        return dist[1][b.length()];\n    }\n\n    // colors\n    public static final String ANSI_RESET = \"\\u001B[0m\";\n    public static final String ANSI_RED = \"\\u001B[31m\";\n    public static final String ANSI_BLUE = \"\\u001B[34m\";\n\n    public static void printString(String a) {\n        for (int i = 0; i < a.length(); i++) {\n            if (a.charAt(i) == '-') {\n                System.out.print(ANSI_RED + a.charAt(i) + ANSI_RESET);\n            } else {\n                System.out.print(ANSI_BLUE + a.charAt(i) + ANSI_RESET);\n            }\n        }\n\n    }\n\n    public String getABString(int size, double probabilityOfA, Random random) {\n        StringBuilder stringBuilder = new StringBuilder();\n        int newSize = size + random.nextInt(size / 5);\n        for (int i = 0; i < newSize; i++) {\n            if (random.nextDouble() < probabilityOfA) {\n                stringBuilder.append(\"-\");\n            } else {\n                stringBuilder.append(\"_\");\n            }\n        }\n        return stringBuilder.toString();\n    }\n\n}"
  },
  {
    "path": "Java/examples/src/main/java/com/amazon/randomcutforest/examples/summarization/RCFSummarizeExample.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.examples.summarization;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static java.lang.Math.abs;\n\nimport java.util.Arrays;\nimport java.util.Random;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.examples.Example;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\n\n/**\n * The following example is based off a test of summarization and provides an\n * example use of summarization based on centroidal representation. The\n * clustering takes a distance function from (float[],float []) into double as\n * input, along with a maximum number of allowed clusters and provides a summary\n * which contains the list of cluster centers as \"typical points\" along with\n * relative likelihood.\n *\n * The specific example below corresponds to 2*d clusters (one each in +ve and\n * -ve axis for each of the d dimensions) where d is chosen at random between 3\n * and 13. The clusters are designed to almost touch -- but are separable (with\n * high probability) and should be discoverable separately. Note that the\n * algorithm does not require the knowledge of the true number of clusters (2*d)\n * but is run with a maximum allowed number 5*d.\n */\npublic class RCFSummarizeExample implements Example {\n\n    public static void main(String[] args) throws Exception {\n        new com.amazon.randomcutforest.examples.summarization.RCFSummarizeExample().run();\n    }\n\n    @Override\n    public String command() {\n        return \"RCF_Summarize_Example\";\n    }\n\n    @Override\n    public String description() {\n        return \"Example of using RCF Summarization\";\n    }\n\n    @Override\n    public void run() throws Exception {\n        long seed = new Random().nextLong();\n        Random random = new Random(seed);\n        int newDimensions = random.nextInt(10) + 3;\n        int dataSize = 200000;\n\n        float[][] points = getData(dataSize, newDimensions, random.nextInt(), Summarizer::L2distance);\n\n        SampleSummary summary = Summarizer.l2summarize(points, 5 * newDimensions, 42);\n        System.out.println(\n                summary.summaryPoints.length + \" clusters for \" + newDimensions + \" dimensions, seed : \" + seed);\n        double epsilon = 0.01;\n        System.out.println(\"Total weight \" + summary.weightOfSamples + \" rounding to multiples of \" + epsilon);\n        System.out.println();\n        for (int i = 0; i < summary.summaryPoints.length; i++) {\n            long t = Math.round(summary.relativeWeight[i] / epsilon);\n            System.out.print(\"Cluster \" + i + \" relative weight \" + ((float) t * epsilon) + \" center (approx): \");\n            printArray(summary.summaryPoints[i], epsilon);\n            System.out.println();\n        }\n\n    }\n\n    void printArray(float[] values, double epsilon) {\n        System.out.print(\" [\");\n        if (abs(values[0]) < epsilon) {\n            System.out.print(\"0\");\n        } else {\n            if (epsilon <= 0) {\n                System.out.print(values[0]);\n            } else {\n                long t = (int) Math.round(values[0] / epsilon);\n                System.out.print((float) t * epsilon);\n            }\n        }\n        for (int i = 1; i < values.length; i++) {\n            if (abs(values[i]) < epsilon) {\n                System.out.print(\", 0\");\n            } else {\n                if (epsilon <= 0) {\n                    System.out.print(\", \" + values[i]);\n                } else {\n                    long t = Math.round(values[i] / epsilon);\n                    System.out.print(\", \" + ((float) t * epsilon));\n                }\n            }\n        }\n        System.out.print(\"]\");\n    }\n\n    public float[][] getData(int dataSize, int newDimensions, int seed, BiFunction<float[], float[], Double> distance) {\n        double baseMu = 0.0;\n        double baseSigma = 1.0;\n        double anomalyMu = 0.0;\n        double anomalySigma = 1.0;\n        double transitionToAnomalyProbability = 0.0;\n        // ignoring anomaly cluster for now\n        double transitionToBaseProbability = 1.0;\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(baseMu, baseSigma, anomalyMu, anomalySigma,\n                transitionToAnomalyProbability, transitionToBaseProbability);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, seed);\n        float[][] floatData = new float[dataSize][];\n\n        float[] allZero = new float[newDimensions];\n        float[] sigma = new float[newDimensions];\n        Arrays.fill(sigma, 1f);\n        double scale = distance.apply(allZero, sigma);\n\n        for (int i = 0; i < dataSize; i++) {\n            // shrink, shift at random\n            int nextD = prg.nextInt(newDimensions);\n            for (int j = 0; j < newDimensions; j++) {\n                data[i][j] *= 1.0 / (3.0);\n                // standard deviation adds up across dimension; taking square root\n                // and using s 3 sigma ball\n                if (j == nextD) {\n                    if (prg.nextDouble() < 0.5)\n                        data[i][j] += 2.0 * scale;\n                    else\n                        data[i][j] -= 2.0 * scale;\n                }\n            }\n            floatData[i] = toFloatArray(data[i]);\n        }\n\n        return floatData;\n    }\n\n}"
  },
  {
    "path": "Java/findbugs-filters.xml",
    "content": "<FindBugsFilter>\n    <!--\n         EI_EXPOSE_REP: May expose internal representation by returning reference to mutable object\n\n             Returning a reference to a mutable object value stored in one of the object's fields exposes the internal representation of the object.  If instances are accessed by untrusted code, and unchecked changes to the mutable object would compromise security or other important properties, you will need to do something different. Returning a new copy of the object is better approach in many situations.\n\n         EI_EXPOSE_REP2: May expose internal representation by incorporating reference to mutable object\n\n             This code stores a reference to an externally mutable object into the internal representation of the object.  If instances are accessed by untrusted code, and unchecked changes to the mutable object would compromise security or other important properties, you will need to do something different. Storing a copy of the object is better approach in many situations.\n    -->\n    <Match>\n        <Bug code=\"EI,EI2\"/>\n    </Match>\n</FindBugsFilter>\n"
  },
  {
    "path": "Java/license-header",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\n\n"
  },
  {
    "path": "Java/lombok.config",
    "content": "lombok.addLombokGeneratedAnnotation = true\n"
  },
  {
    "path": "Java/parkservices/pom.xml",
    "content": "<?xml version=\"1.0\"?>\n<project xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\" xmlns=\"http://maven.apache.org/POM/4.0.0\"\n    xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n  <modelVersion>4.0.0</modelVersion>\n\n  <parent>\n    <groupId>software.amazon.randomcutforest</groupId>\n    <artifactId>randomcutforest-parent</artifactId>\n    <version>4.4.0</version>\n  </parent>\n\n  <artifactId>randomcutforest-parkservices</artifactId>\n  <packaging>jar</packaging>\n\n  <dependencies>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-core</artifactId>\n      <version>${project.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>software.amazon.randomcutforest</groupId>\n      <artifactId>randomcutforest-testutils</artifactId>\n      <version>${project.version}</version>\n    </dependency>\n    <dependency>\n      <groupId>org.projectlombok</groupId>\n      <artifactId>lombok</artifactId>\n      <version>1.18.30</version>\n      <scope>provided</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.junit.jupiter</groupId>\n      <artifactId>junit-jupiter-engine</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.junit.jupiter</groupId>\n      <artifactId>junit-jupiter-params</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-core</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>org.mockito</groupId>\n      <artifactId>mockito-junit-jupiter</artifactId>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>com.fasterxml.jackson.core</groupId>\n      <artifactId>jackson-databind</artifactId>\n      <version>2.16.0</version>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>io.protostuff</groupId>\n      <artifactId>protostuff-core</artifactId>\n      <version>1.8.0</version>\n      <scope>test</scope>\n    </dependency>\n    <dependency>\n      <groupId>io.protostuff</groupId>\n      <artifactId>protostuff-runtime</artifactId>\n      <version>1.8.0</version>\n      <scope>test</scope>\n    </dependency>\n  </dependencies>\n</project>\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/AnomalyDescriptor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\n\n@Getter\n@Setter\npublic class AnomalyDescriptor extends RCFComputeDescriptor {\n\n    public static int NUMBER_OF_EXPECTED_VALUES = 1;\n\n    // confidence, for both anomalies/non-anomalies\n    double dataConfidence;\n\n    // flag indicating if the anomaly is the start of an anomaly or part of a run of\n    // anomalies\n    boolean startOfAnomaly;\n\n    // flag indicating if the time stamp is in elevated score region to be\n    // considered as anomaly\n    boolean inHighScoreRegion;\n\n    // a flattened version denoting the basic contribution of each input variable\n    // (not shingled) for the\n    // time slice indicated by relativeIndex\n    double[] relevantAttribution;\n\n    // when time is appended for the anomalous time slice\n    double timeAttribution;\n\n    // the values being replaced; may correspond to past\n    double[] pastValues;\n\n    // older timestamp if that is replaced\n    long pastTimeStamp;\n\n    // expected values, currently set to maximum 1\n    double[][] expectedValuesList;\n\n    // likelihood values for the list\n    double[] likelihoodOfValues;\n\n    public AnomalyDescriptor(double[] input, long inputTimeStamp) {\n        super(input, inputTimeStamp);\n    }\n\n    public void setPastValues(double[] values) {\n        pastValues = copyIfNotnull(values);\n    }\n\n    public boolean isExpectedValuesPresent() {\n        return expectedValuesList != null;\n    }\n\n    public void setRelevantAttribution(double[] values) {\n        this.relevantAttribution = copyIfNotnull(values);\n    }\n\n    public void setExpectedValues(int position, double[] values, double likelihood) {\n        checkArgument(position < NUMBER_OF_EXPECTED_VALUES, \"Increase size of expected array\");\n        if (expectedValuesList == null) {\n            expectedValuesList = new double[NUMBER_OF_EXPECTED_VALUES][];\n        }\n        if (likelihoodOfValues == null) {\n            likelihoodOfValues = new double[NUMBER_OF_EXPECTED_VALUES];\n        }\n        expectedValuesList[position] = Arrays.copyOf(values, values.length);\n        likelihoodOfValues[position] = likelihood;\n    }\n\n    public void setDataConfidence(double timeDecay, long valuesSeen, long outputAfter, double dataQuality) {\n        long total = valuesSeen;\n        double lambda = timeDecay;\n        double totalExponent = total * lambda;\n        if (totalExponent == 0) {\n            dataConfidence = 0.0;\n        } else if (totalExponent >= 20) {\n            dataConfidence = Math.min(1.0, dataQuality);\n        } else {\n            double eTotal = Math.exp(totalExponent);\n            double confidence = dataQuality * (eTotal - Math.exp(lambda * Math.min(total, outputAfter))) / (eTotal - 1);\n            dataConfidence = Math.max(0, confidence);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/ForecastDescriptor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\n\n@Getter\n@Setter\npublic class ForecastDescriptor extends AnomalyDescriptor {\n\n    // all the following objects will be of length (forecast horizon x the number of\n    // input variables)\n\n    /**\n     * basic forecast field, with the time information to be used for TIME_AUGMENTED\n     * mode in the future\n     */\n    TimedRangeVector timedForecast;\n\n    /**\n     * the distribution of errors -- for an algorithm that self-calibrates, this\n     * information has to be computed exposing the error can be of use for the user\n     * to audit the results. The distributions will use interpolations and will not\n     * adhere to specific quantile values -- thereby allowing for better\n     * generalization.\n     */\n    RangeVector observedErrorDistribution;\n\n    /**\n     * typically RMSE is a single vector -- however unlike standard literature, we\n     * would not be limited to zero mean time series; in fact converting a time\n     * series to a zero mean series in an online manner is already challenging.\n     * Moreover, it is often the case that errors have a typical distribution skew;\n     * in the current library we have partitioned many of the explainabilty aspects\n     * (e.g., attribution in anomaly detection, directionality in density\n     * estimation, etc.) based on high/low; when the actual value being observed is\n     * correspondingly higher/lower than some (possibly implicit) baseline. We split\n     * the same for error.\n     */\n    DiVector errorRMSE;\n\n    /**\n     * mean error corresponding to the forecast horizon x the number of input\n     * variables This is not used in the current intervalPrecision -- we use the\n     * median value from the error distribution.\n     */\n    float[] errorMean;\n\n    /**\n     * in the forecast horizon x the number of input variables this corresponds to\n     * the fraction of variables \\predicted correctly over the error horizon. A\n     * value of 1.0 is terrific.\n     */\n    float[] intervalPrecision;\n\n    public ForecastDescriptor(double[] input, long inputTimeStamp, int horizon) {\n        super(input, inputTimeStamp);\n        int forecastLength = input.length * horizon;\n        this.timedForecast = new TimedRangeVector(forecastLength, horizon);\n        this.observedErrorDistribution = new RangeVector(forecastLength);\n        Arrays.fill(this.observedErrorDistribution.lower, -Float.MAX_VALUE);\n        Arrays.fill(this.observedErrorDistribution.upper, Float.MAX_VALUE);\n        this.errorMean = new float[forecastLength];\n        this.errorRMSE = new DiVector(forecastLength);\n        this.intervalPrecision = new float[forecastLength];\n    }\n\n    public void setObservedErrorDistribution(RangeVector base) {\n        checkArgument(base.values.length == this.observedErrorDistribution.values.length, \" incorrect length\");\n        System.arraycopy(base.values, 0, this.observedErrorDistribution.values, 0, base.values.length);\n        System.arraycopy(base.upper, 0, this.observedErrorDistribution.upper, 0, base.upper.length);\n        System.arraycopy(base.lower, 0, this.observedErrorDistribution.lower, 0, base.lower.length);\n    }\n\n    public void setIntervalPrecision(float[] calibration) {\n        System.arraycopy(calibration, 0, this.intervalPrecision, 0, calibration.length);\n    }\n\n    public float[] getIntervalPrecision() {\n        return Arrays.copyOf(intervalPrecision, intervalPrecision.length);\n    }\n\n    public void setErrorMean(float[] errorMean) {\n        System.arraycopy(errorMean, 0, this.errorMean, 0, errorMean.length);\n    }\n\n    public void setErrorRMSE(DiVector errorRMSE) {\n        checkArgument(this.errorRMSE.getDimensions() == errorRMSE.getDimensions(), \" incorrect input\");\n        System.arraycopy(errorRMSE.high, 0, this.errorRMSE.high, 0, errorRMSE.high.length);\n        System.arraycopy(errorRMSE.low, 0, this.errorRMSE.low, 0, errorRMSE.low.length);\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/GlobalLocalAnomalyDetector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_NUMBER_OF_REPRESENTATIVES;\nimport static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_SHRINKAGE;\nimport static java.lang.Math.abs;\nimport static java.lang.Math.exp;\nimport static java.lang.Math.min;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Optional;\nimport java.util.function.BiFunction;\n\nimport com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.store.StreamSampler;\nimport com.amazon.randomcutforest.summarization.GenericMultiCenter;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class GlobalLocalAnomalyDetector<P> extends StreamSampler<P> {\n\n    // default maximum number of clusters to consider\n    public static int DEFAULT_MAX = 10;\n\n    // an upper bound on the score\n    public static float FLOAT_MAX = 10;\n\n    // the relative weight of small clusters which should not be used in anomaly\n    // detection\n    // this controls masking effects\n    public static double DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE = 0.005;\n\n    public static double DEFAULT_GLAD_THRESHOLD = 1.2;\n\n    // the number of steps we have to wait before reclustering; in principle this\n    // can be 1, but that would be\n    // neither be meaningful nor efficient; it is set to a default of the capacity/2\n    protected int doNotreclusterWithin;\n\n    // a thresholder for flagging anomalies (same thresholder as in TRCF)\n    protected final BasicThresholder thresholder;\n\n    // remembering when the last clustering was performed\n    protected long lastCluster = 0L;\n\n    // remembers when the mean of the scores were just above a certain threshold\n    // acts as a calibration mechanism\n    protected double lastMean = 1;\n\n    // actual list of clusters\n    List<ICluster<P>> clusters;\n\n    // the number of maximum clusters to be considered; this is configurable and can\n    // be chaned dynamically\n    protected int maxAllowed;\n\n    // the shrinkage parameter in multi-centroid clustering such as CURE. Shrinkage\n    // of 0 provides\n    // non-spherical shapes, whereas shrinkage of 1 corresponds to choosing single\n    // centroid (not recommended)\n    protected double shrinkage;\n\n    // number of representatives used in multi-centroidal clustering\n    protected int numberOfRepresentatives;\n\n    // threshold of weight for small clusters so that masking can be averted, can be\n    // changed dynamically\n    protected double ignoreBelow;\n\n    // the global function used in clustering, can be changed dynamically (but\n    // clustering would be controlled\n    // automatically due to efficiency reasons)\n    protected BiFunction<P, P, Double> globalDistance;\n\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    protected GlobalLocalAnomalyDetector(Builder<?> builder) {\n        super(builder);\n        thresholder = new BasicThresholder(builder.getTimeDecay());\n        thresholder.setAbsoluteThreshold(DEFAULT_GLAD_THRESHOLD);\n        doNotreclusterWithin = builder.doNotReclusterWithin.orElse(builder.getCapacity() / 2);\n        shrinkage = builder.shrinkage;\n        maxAllowed = builder.maxAllowed;\n        numberOfRepresentatives = builder.numberOfRepresentatives;\n        ignoreBelow = builder.ignoreBelow;\n    }\n\n    protected GlobalLocalAnomalyDetector(Builder<?> builder, BiFunction<P, P, Double> distance) {\n        this(builder);\n        globalDistance = distance;\n    }\n\n    public void setGlobalDistance(BiFunction<P, P, Double> dist) {\n        globalDistance = dist;\n    }\n\n    // sets the zFactor; increasing this number should increase precision (and will\n    // likely lower recall)\n    // this is the same as in BasicThresholder class\n    public void setZfactor(double factor) {\n\n        checkArgument(factor > 1, \"must be more than 1\");\n        thresholder.setZfactor(factor);\n    }\n\n    public double getZfactor() {\n        return thresholder.getZFactor();\n    }\n\n    // as in BasicThresholder class, useful in tuning\n    public void setLowerThreshold(double lowerThreshold) {\n        checkArgument(lowerThreshold > 0, \"cannot be negative\");\n\n        thresholder.setAbsoluteThreshold(lowerThreshold);\n    }\n\n    public double getLowerThreshold() {\n        return thresholder.getAbsoluteThreshold();\n    }\n\n    public int getDoNotreclusterWithin() {\n        return doNotreclusterWithin;\n    }\n\n    public void setDoNotreclusterWithin(int value) {\n        checkArgument(value > 0, \" has to be positive, recommended as 1/2 the capacity\");\n        doNotreclusterWithin = value;\n    }\n\n    public int getNumberOfRepresentatives() {\n        return numberOfRepresentatives;\n    }\n\n    public void setNumberOfRepresentatives(int reps) {\n\n        checkArgument(reps > 0, \" has to be positive\");\n        checkArgument(reps < 25, \"too large a number\");\n        numberOfRepresentatives = reps;\n    }\n\n    public double getShrinkage() {\n        return shrinkage;\n    }\n\n    public void setShrinkage(double value) {\n        checkArgument(value >= 0 && value <= 1, \" has to be in [0,1]\");\n        shrinkage = value;\n    }\n\n    public double getIgnoreBelow() {\n        return ignoreBelow;\n    }\n\n    public void setIgnoreBelow(double value) {\n        checkArgument(value >= 0 && value < 0.1, \" relative weight has to be in range [0,0.1] \");\n        ignoreBelow = value;\n    }\n\n    public int getMaxAllowed() {\n        return maxAllowed;\n    }\n\n    public void setMaxAllowed(int value) {\n        checkArgument(value >= 5 && value < 100,\n                \" too few or too many clusters are not \" + \"meaningful to this algorithm\");\n        maxAllowed = value;\n    }\n\n    /**\n     * The following provides a single invocation for scoring and updating.\n     * Semantics of the recency biased sampling (sequentiality in decision making)\n     * and efficient automatic reclustering demand that scoring and updating be\n     * simultaneous. While scoring is provided as a separate function to let future\n     * preditor-corrector methods reuse this code, it is strongly recommneded that\n     * only the process() function be invoked.\n     * \n     * @param object            current object being considered\n     * @param weight            weight of the object (for clustering purposes as\n     *                          well as recency biased sampling)\n     * @param localDistance     a local distance metric that determines the order in\n     *                          which different clusters are considered; can be\n     *                          null, in which case the global distance would be\n     *                          used\n     * @param considerOcclusion consider occlusion by smaller dense clusters, when\n     *                          adjacent to larger and more spread out clusters\n     * @return a generic descriptor with score, threshold, anomaly grade (anomaly\n     *         grade greater than zero is likely anomalous; anomaly grade can be -ve\n     *         to allow down stream correction using semi-supervision or other\n     *         means) and a list of cluster representatives (sorted by distance)\n     *         with corresponding scores (lowest score may not correspond to lowest\n     *         distance) which can be used to investigate anomalous points further\n     */\n    public GenericAnomalyDescriptor<P> process(P object, float weight, BiFunction<P, P, Double> localDistance,\n            boolean considerOcclusion) {\n        checkArgument(weight >= 0, \"weight cannot be negative\");\n        // recompute clusters first; this enables easier merges and deserialization\n        if (sequenceNumber > lastCluster + doNotreclusterWithin) {\n            checkArgument(globalDistance != null, \"set global distance function\");\n            double currentMean = thresholder.getPrimaryDeviation().getMean();\n            if (abs(currentMean - lastMean) > 0.1 || currentMean > 1.7\n                    || sequenceNumber > lastCluster + 20 * doNotreclusterWithin) {\n                lastCluster = sequenceNumber;\n                lastMean = currentMean;\n                clusters = getClusters(maxAllowed, 4 * maxAllowed, 1, numberOfRepresentatives, shrinkage,\n                        globalDistance, null);\n            }\n        }\n        List<Weighted<P>> result = score(object, localDistance, considerOcclusion);\n        double threshold = thresholder.threshold();\n        double grade = 0;\n        float score = 0;\n        if (result != null) {\n            score = result.stream().map(a -> a.weight).reduce(FLOAT_MAX, Float::min);\n            if (score < FLOAT_MAX) {\n                // an exponential attribution\n                double sum = result.stream()\n                        .map(a -> (double) ((a.weight == FLOAT_MAX) ? 0 : exp(-a.weight * a.weight)))\n                        .reduce(0.0, Double::sum);\n                for (Weighted<P> item : result) {\n                    item.weight = (item.weight == FLOAT_MAX) ? 0.0f\n                            : (float) min(1.0f, (float) exp(-item.weight * item.weight) / sum);\n                }\n            } else {\n                // uniform attribution\n                for (Weighted<P> item : result) {\n                    item.weight = (float) 1.0 / (result.size());\n                }\n            }\n            grade = thresholder.getAnomalyGrade(score, false);\n\n        }\n        // note average score would be 1\n        thresholder.update(score, min(score, thresholder.getZFactor()));\n        sample(object, weight);\n\n        return new GenericAnomalyDescriptor<>(result, score, threshold, grade);\n    }\n\n    /**\n     * The following function scores a point -- it considers an ordering of the\n     * representatives based on the local distance; and considers occlusion --\n     * namely, should an asteroid between moon and the earth be considered to be a\n     * part of a cluster around the moon or the earth? The below provides some\n     * initial geometric take on the three objects. We deliberately avoid explicit\n     * density computation since it would be difficult to define uniform definition\n     * of density.\n     * \n     * @param current           the object being scored\n     * @param localDistance     a distance function that we wish to use for this\n     *                          specific score. This can be null, and in that case\n     *                          the global distance would be used\n     * @param considerOcclusion a boolean that determines if closeby dense clusters\n     *                          can occlude membership in further away \"less dense\n     *                          cluster\"\n     * @return A list of weighted type where the index is a representative (based on\n     *         local distance) and the weight is the score corresponding to that\n     *         representative. The scores are sorted from least anomalous to most\n     *         anomalous.\n     */\n    public List<Weighted<P>> score(P current, BiFunction<P, P, Double> localDistance, boolean considerOcclusion) {\n        if (clusters == null) {\n            return null;\n        } else {\n            BiFunction<P, P, Double> local = (localDistance != null) ? localDistance : globalDistance;\n            double totalWeight = clusters.stream().map(e -> e.getWeight()).reduce(0.0, Double::sum);\n            ArrayList<Candidate> candidateList = new ArrayList<>();\n            for (ICluster<P> cluster : clusters) {\n                double wt = cluster.averageRadius();\n                double tempMinimum = Double.MAX_VALUE;\n                P closestInCluster = null;\n                for (Weighted<P> rep : cluster.getRepresentatives()) {\n                    if (rep.weight > ignoreBelow * totalWeight) {\n                        double tempDist = local.apply(current, rep.index);\n                        if (tempDist < 0) {\n                            throw new IllegalArgumentException(\" distance cannot be negative \");\n                        }\n                        if (tempMinimum > tempDist) {\n                            tempMinimum = tempDist;\n                            closestInCluster = rep.index;\n                        }\n                    }\n                }\n                if (closestInCluster != null) {\n                    candidateList.add(new Candidate(closestInCluster, wt, tempMinimum));\n                }\n            }\n            candidateList.sort((o1, o2) -> Double.compare(o1.distance, o2.distance));\n            checkArgument(candidateList.size() > 0, \"empty candidate list, should not happen\");\n            ArrayList<Weighted<P>> answer = new ArrayList<>();\n            if (candidateList.get(0).distance == 0.0) {\n                answer.add(new Weighted<P>(candidateList.get(0).representative, 0.0f));\n                return answer;\n            }\n            int index = 0;\n            while (index < candidateList.size()) {\n                Candidate head = candidateList.get(index);\n                double dist = (localDistance == null) ? head.distance\n                        : globalDistance.apply(current, head.representative);\n                float tempMeasure = (head.averageRadiusOfCluster > 0.0)\n                        ? min(FLOAT_MAX, (float) (dist / head.averageRadiusOfCluster))\n                        : FLOAT_MAX;\n                answer.add(new Weighted<P>(head.representative, tempMeasure));\n                if (considerOcclusion) {\n                    int consider = index + 1;\n                    while (consider < candidateList.size()) {\n                        double occludeDistance = local.apply(head.representative,\n                                candidateList.get(consider).representative);\n                        double candidateDistance = candidateList.get(consider).distance;\n                        if (occludeDistance < candidateDistance && candidateDistance > Math\n                                .sqrt(head.distance * head.distance + occludeDistance * occludeDistance)) {\n                            // delete element\n                            candidateList.remove(consider);\n                        }\n                        consider++;\n                    }\n                }\n                ++index;\n            }\n            // we will not resort answer; the scores will be in order of distance\n            // we note that score() should be invoked with care and likely postprocessing\n            return answer;\n        }\n    }\n\n    /**\n     * a merging routine for the mopdels which would be used in the future for\n     * distributed analysis. Note that there is no point of storing sequence indices\n     * explicitly in case of a merge.\n     * \n     * @param first     the first model\n     * @param second    the second model\n     * @param builder   the parameters of the new clustering\n     * @param recluster a boolean that determines immediate reclustering\n     * @param distance  the distance function of the new clustering\n     */\n    public GlobalLocalAnomalyDetector(GlobalLocalAnomalyDetector first, GlobalLocalAnomalyDetector second,\n            Builder<?> builder, boolean recluster, BiFunction<P, P, Double> distance) {\n        super(first, second, builder.getCapacity(), builder.getTimeDecay(), builder.getRandomSeed());\n        thresholder = new BasicThresholder(builder.getTimeDecay(), builder.anomalyRate, false);\n        thresholder.setAbsoluteThreshold(1.2);\n        doNotreclusterWithin = builder.doNotReclusterWithin.orElse(builder.getCapacity() / 2);\n        shrinkage = builder.shrinkage;\n        maxAllowed = builder.maxAllowed;\n        numberOfRepresentatives = builder.numberOfRepresentatives;\n        globalDistance = distance;\n        if (recluster) {\n            lastCluster = sequenceNumber;\n            clusters = getClusters(maxAllowed, 4 * maxAllowed, 1, numberOfRepresentatives, shrinkage, globalDistance,\n                    null);\n        }\n    }\n\n    /**\n     * an inner class that is useful for the scoring procedure to avoid\n     * recomputation of fields.\n     */\n    class Candidate {\n        P representative;\n        double averageRadiusOfCluster;\n        double distance;\n\n        Candidate(P representative, double averageRadiusOfCluster, double distance) {\n            this.representative = representative;\n            this.averageRadiusOfCluster = averageRadiusOfCluster;\n            this.distance = distance;\n        }\n    }\n\n    public List<ICluster<P>> getClusters() {\n        return clusters;\n    }\n\n    public List<ICluster<P>> getClusters(int maxAllowed, int initial, int stopAt, int representatives, double shrink,\n            BiFunction<P, P, Double> distance, List<ICluster<P>> previousClusters) {\n        BiFunction<P, Float, ICluster<P>> clusterInitializer = (a, b) -> GenericMultiCenter.initialize(a, b, shrink,\n                representatives);\n        return Summarizer.summarize(objectList, maxAllowed, initial, stopAt, false, 0.8, distance, clusterInitializer,\n                0L, false, previousClusters);\n    }\n\n    /**\n     * a builder\n     */\n    public static class Builder<T extends Builder<T>> extends StreamSampler.Builder<T> {\n        protected double shrinkage = DEFAULT_SHRINKAGE;\n        protected double ignoreBelow = DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE;\n        protected int numberOfRepresentatives = DEFAULT_NUMBER_OF_REPRESENTATIVES;\n        protected Optional<Integer> doNotReclusterWithin = Optional.empty();\n        protected int maxAllowed = DEFAULT_MAX;\n        protected double anomalyRate = 0.01;\n\n        // ignores small clusters with population weight below this threshold\n        public T ignoreBelow(double ignoreBelow) {\n            this.ignoreBelow = ignoreBelow;\n            return (T) this;\n        }\n\n        // parameters of the multi-representative CURE algorithm\n        public T shrinkage(double shrinkage) {\n            this.shrinkage = shrinkage;\n            return (T) this;\n        }\n\n        // a parameter that ensures that clustering is not recomputed too frequently,\n        // which can be both inefficient as well as jittery\n        public T doNotReclusterWithin(int refresh) {\n            this.doNotReclusterWithin = Optional.of(refresh);\n            return (T) this;\n        }\n\n        // maximum number of clusters to consider\n        public T maxAllowed(int maxAllowed) {\n            this.maxAllowed = maxAllowed;\n            return (T) this;\n        }\n\n        // parameters of the multi-representative CURE algorithm\n        public T numberOfRepresentatives(int number) {\n            this.numberOfRepresentatives = number;\n            return (T) this;\n        }\n\n        // a flag that can adjust to the burstiness of anomalies\n        public T anomalyRate(double anomalyRate) {\n            this.anomalyRate = anomalyRate;\n            return (T) this;\n        }\n\n        @Override\n        public GlobalLocalAnomalyDetector build() {\n            return new GlobalLocalAnomalyDetector<>(this);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/PredictorCorrector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.parkservices.config.CorrectionMode.ALERT_ONCE;\nimport static com.amazon.randomcutforest.parkservices.config.CorrectionMode.CONDITIONAL_FORECAST;\nimport static com.amazon.randomcutforest.parkservices.config.CorrectionMode.DATA_DRIFT;\nimport static com.amazon.randomcutforest.parkservices.config.CorrectionMode.NONE;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.DEFAULT_NORMALIZATION_PRECISION;\nimport static java.lang.Math.exp;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.config.CorrectionMode;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.Neighbor;\nimport com.amazon.randomcutforest.statistics.Deviation;\nimport com.amazon.randomcutforest.util.Weighted;\n\n/**\n * This class provides a combined RCF and thresholder, both of which operate in\n * a streaming manner and respect the arrow of time.\n */\npublic class PredictorCorrector {\n    private static double DEFAULT_DIFFERENTIAL_FACTOR = 0.3;\n\n    public static int DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS = 5;\n\n    public static double DEFAULT_NOISE_SUPPRESSION_FACTOR = 1.0;\n\n    public static double DEFAULT_MULTI_MODE_SAMPLING_RATE = 0.1;\n\n    public static double DEFAULT_SAMPLING_SUPPORT = 0.1;\n\n    public static int DEFAULT_RUN_ALLOWED = 2;\n    // the above will trigger on the 4th occurrence, because the first is not\n    // counted in the run\n\n    protected static int NUMBER_OF_MODES = 2;\n\n    protected final static int EXPECTED_INVERSE_DEPTH_INDEX = 0;\n\n    protected final static int DISTANCE_INDEX = 1;\n\n    // the following vectors enable suppression of anomalies\n    // the first pair correspond to additive differences\n    // the second pair correspond to multiplicative differences\n    // which are not meaningful for differenced operations\n\n    double[] ignoreNearExpectedFromBelow;\n\n    double[] ignoreNearExpectedFromAbove;\n\n    double[] ignoreNearExpectedFromBelowByRatio;\n\n    double[] ignoreNearExpectedFromAboveByRatio;\n\n    // for anomaly description we would only look at these many top attributors\n    // AExpected value is not well-defined when this number is greater than 1\n    // that being said there is no formal restriction other than the fact that the\n    // answers would be error prone as this parameter is raised.\n    protected int numberOfAttributors = DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS;\n\n    protected double[] lastScore = new double[NUMBER_OF_MODES];\n\n    protected ScoringStrategy lastStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;\n\n    protected BasicThresholder[] thresholders;\n\n    protected int baseDimension;\n\n    protected long randomSeed;\n\n    protected double[] modeInformation;\n\n    protected Deviation[] deviationsActual;\n\n    protected Deviation[] deviationsExpected;\n\n    protected double samplingRate = DEFAULT_MULTI_MODE_SAMPLING_RATE;\n\n    protected double noiseFactor = DEFAULT_NOISE_SUPPRESSION_FACTOR;\n\n    protected boolean autoAdjust = false;\n\n    protected RCFComputeDescriptor lastDescriptor;\n\n    protected int runLength;\n\n    protected boolean ignoreDrift = false;\n\n    protected double samplingSupport = DEFAULT_SAMPLING_SUPPORT;\n\n    public PredictorCorrector(double timeDecay, double anomalyRate, boolean adjust, int baseDimension,\n            long randomSeed) {\n        this.thresholders = new BasicThresholder[NUMBER_OF_MODES];\n        thresholders[0] = new BasicThresholder(timeDecay, anomalyRate, adjust);\n        thresholders[1] = new BasicThresholder(timeDecay);\n        this.baseDimension = baseDimension;\n        this.randomSeed = randomSeed;\n        this.autoAdjust = adjust;\n        if (adjust) {\n            this.deviationsActual = new Deviation[baseDimension];\n            this.deviationsExpected = new Deviation[baseDimension];\n            for (int i = 0; i < baseDimension; i++) {\n                this.deviationsActual[i] = new Deviation(timeDecay);\n                this.deviationsExpected[i] = new Deviation(timeDecay);\n            }\n        }\n        ignoreNearExpectedFromAbove = new double[baseDimension];\n        ignoreNearExpectedFromBelow = new double[baseDimension];\n        ignoreNearExpectedFromAboveByRatio = new double[baseDimension];\n        ignoreNearExpectedFromBelowByRatio = new double[baseDimension];\n    }\n\n    // for mappers\n    public PredictorCorrector(BasicThresholder[] thresholders, Deviation[] deviations, int baseDimension,\n            long randomSeed) {\n        checkArgument(thresholders.length > 0, \" cannot be empty\");\n        this.thresholders = new BasicThresholder[NUMBER_OF_MODES];\n        int size = min(thresholders.length, NUMBER_OF_MODES);\n        for (int i = 0; i < size; i++) {\n            this.thresholders[i] = thresholders[i];\n        }\n        Deviation deviation = thresholders[0].getPrimaryDeviation();\n        for (int i = size; i < NUMBER_OF_MODES; i++) {\n            this.thresholders[i] = new BasicThresholder(thresholders[0].getPrimaryDeviation().getDiscount());\n        }\n        this.deviationsActual = new Deviation[baseDimension];\n        this.deviationsExpected = new Deviation[baseDimension];\n        if (deviations != null) {\n            checkArgument(deviations.length == 2 * baseDimension, \"incorrect state\");\n            for (int i = 0; i < baseDimension; i++) {\n                deviationsActual[i] = deviations[i];\n            }\n            for (int i = 0; i < baseDimension; i++) {\n                deviationsExpected[i] = deviations[i + baseDimension];\n            }\n        }\n        this.baseDimension = baseDimension;\n        this.randomSeed = randomSeed;\n        ignoreNearExpectedFromAbove = new double[baseDimension];\n        ignoreNearExpectedFromBelow = new double[baseDimension];\n        ignoreNearExpectedFromAboveByRatio = new double[baseDimension];\n        ignoreNearExpectedFromBelowByRatio = new double[baseDimension];\n    }\n\n    public PredictorCorrector(BasicThresholder thresholder, int baseDimension) {\n        this(new BasicThresholder[] { thresholder }, null, baseDimension, 0L);\n    }\n\n    protected double nextDouble() {\n        Random random = new Random(randomSeed);\n        randomSeed = random.nextLong();\n        return random.nextDouble();\n    }\n\n    /**\n     * uses the attribution information to find the time slice which contributed\n     * most to the anomaly note that the basic length of the vectors is shingleSize\n     * * basDimension; the startIndex corresponds to the shingle entry beyond which\n     * the search is performed. if two anomalies are in a shingle it would focus on\n     * later one, the previous one would have been (hopefully) reported earlier.\n     *\n     * @param diVector      attribution of current shingle\n     * @param baseDimension number of attributes/variables in original data\n     * @param startIndex    time slice of the farthest in the past we are looking\n     * @return the index (in this shingle) which has the largest contributions\n     */\n    protected int maxContribution(DiVector diVector, int baseDimension, int startIndex) {\n        double val = 0;\n        int index = startIndex;\n        int position = diVector.getDimensions() + startIndex * baseDimension;\n        for (int i = 0; i < baseDimension; i++) {\n            val += diVector.getHighLowSum(i + position);\n        }\n        for (int i = position + baseDimension; i < diVector.getDimensions(); i += baseDimension) {\n            double sum = 0;\n            for (int j = 0; j < baseDimension; j++) {\n                sum += diVector.getHighLowSum(i + j);\n            }\n            if (sum > val) {\n                val = sum;\n                index = (i - diVector.getDimensions()) / baseDimension;\n            }\n        }\n        return index;\n    }\n\n    /**\n     * the following creates the expected point based on RCF forecasting\n     * \n     * @param diVector      the attribution vector that is used to choose which\n     *                      elements are to be predicted\n     * @param position      the block of (multivariate) elements we are focusing on\n     * @param baseDimension the base dimension of the block\n     * @param point         the point near which we wish to predict\n     * @param forest        the resident RCF\n     * @return a vector that is most likely, conditioned on changing a few elements\n     *         in the block at position\n     */\n    protected float[] getExpectedPoint(DiVector diVector, int position, int baseDimension, float[] point,\n            RandomCutForest forest) {\n        int[] likelyMissingIndices;\n        if (baseDimension == 1) {\n            likelyMissingIndices = new int[] { position };\n        } else {\n            double sum = 0;\n            double[] values = new double[baseDimension];\n            for (int i = 0; i < baseDimension; i++) {\n                sum += values[i] = diVector.getHighLowSum(i + position);\n            }\n            Arrays.sort(values);\n            int pick = 1;\n            if (values[baseDimension - pick] < 0.1 * sum) {\n                // largest contributor is only 10 percent; there are too many to predict\n                return null;\n            }\n\n            double threshold = min(0.1 * sum, 0.1);\n            while (pick < baseDimension && values[baseDimension - pick - 1] >= threshold) {\n                ++pick;\n            }\n\n            if (pick > numberOfAttributors) {\n                // we chose everything; not usable\n                return null;\n            }\n\n            double cutoff = values[baseDimension - pick];\n            likelyMissingIndices = new int[pick];\n            int count = 0;\n            for (int i = 0; i < baseDimension && count < pick; i++) {\n                if (diVector.getHighLowSum(i + position) >= cutoff\n                        && (count == 0 || diVector.getHighLowSum(i + position) > sum * 0.1)) {\n                    likelyMissingIndices[count++] = position + i;\n                }\n            }\n        }\n        if (likelyMissingIndices.length > 0.5 * forest.getDimensions()) {\n            return null;\n        } else {\n            return forest.imputeMissingValues(point, likelyMissingIndices.length, likelyMissingIndices);\n        }\n    }\n\n    /**\n     * a subroutine that helps eliminates flagging anomalies too close to a\n     * previously flagged anomaly -- this avoids the repetition due to shingling;\n     * but still can detect some anomalies if the deviations are usual\n     * \n     * @param candidate             the candidate attribution of the point\n     * @param difference            the gap (in RCF space) from the last anomaly\n     * @param baseDimension         the size of a block\n     * @param ideal                 an idealized version of the candidate (can be\n     *                              null) where the most offending elements are\n     *                              imputed out\n     * @param lastAnomalyDescriptor the description of the last anomaly\n     * @param workingThreshold      the threshold to exceed\n     * @return true if the candidate is sufficiently different and false otherwise\n     */\n\n    protected boolean trigger(DiVector candidate, int difference, int baseDimension, DiVector ideal,\n            RCFComputeDescriptor lastAnomalyDescriptor, double workingThreshold) {\n        int dimensions = candidate.getDimensions();\n        if (difference >= dimensions || ideal == null) {\n            return true;\n        }\n        double lastAnomalyScore = lastAnomalyDescriptor.getRCFScore();\n        double differentialRemainder = 0;\n        for (int i = dimensions - difference; i < dimensions; i++) {\n            differentialRemainder += Math.abs(candidate.low[i] - ideal.low[i])\n                    + Math.abs(candidate.high[i] - ideal.high[i]);\n        }\n        return (differentialRemainder > DEFAULT_DIFFERENTIAL_FACTOR * lastAnomalyScore)\n                && differentialRemainder * dimensions / difference > 1.2 * workingThreshold;\n\n    }\n\n    /**\n     * corrects the effect of a last anomaly -- note that an anomaly by definition\n     * will alter the shift and scale of transformations. This computation fixes one\n     * single large anomaly.\n     * \n     * @param transformMethod       the transformation method used\n     * @param gap                   the number of steps the anomaly occurred in the\n     *                              past\n     * @param lastAnomalyDescriptor the descriptor of the last anomaly\n     * @param currentScale          the current scale\n     * @return a correction vector\n     */\n    public double[] getCorrectionOfLastAnomaly(TransformMethod transformMethod, int gap,\n            RCFComputeDescriptor lastAnomalyDescriptor, double[] currentScale) {\n        double[] deltaShift = lastAnomalyDescriptor.getDeltaShift();\n        double[] answer = new double[currentScale.length];\n        // correct the effect of shifts in last observed anomaly because the anomaly may\n        // have skewed the shift and scale -- but the gap cannot last forever\n        // otherwise this will always change the point and force a costlier path\n        if (deltaShift != null && gap < 2 * lastAnomalyDescriptor.getShingleSize()\n                && (transformMethod == TransformMethod.NORMALIZE || transformMethod == TransformMethod.SUBTRACT_MA)) {\n            double factor = exp(-gap * lastAnomalyDescriptor.getTransformDecay());\n            for (int y = 0; y < answer.length; y++) {\n                answer[y] = (currentScale[y] == 0) ? 0 : deltaShift[y] * factor / currentScale[y];\n            }\n        }\n        return answer;\n    }\n\n    /**\n     * a first stage corrector that attempts to fix the after effects of a previous\n     * anomaly which may be in the shingle, or just preceding the shingle\n     *\n     * @param point                 the current (transformed) point under evaluation\n     * @param gap                   the relative position of the previous anomaly\n     *                              being corrected\n     * @param shingleSize           size of the shingle\n     * @param baseDimensions        number of dimensions in each shingle\n     * @param currentScale          scale for current point\n     * @param transformMethod       transformation Method\n     * @param lastAnomalyDescriptor description of the last anomaly\n     * @return the corrected point\n     */\n    protected <P extends AnomalyDescriptor> float[] applyPastCorrector(float[] point, int gap, int shingleSize,\n            int baseDimensions, double[] currentScale, TransformMethod transformMethod,\n            RCFComputeDescriptor lastAnomalyDescriptor) {\n        float[] correctedPoint = Arrays.copyOf(point, point.length);\n\n        // following will fail for first 100ish points and if dimension < 3\n        if (lastAnomalyDescriptor.getExpectedRCFPoint() != null) {\n            float[] lastExpectedPoint = lastAnomalyDescriptor.getExpectedRCFPoint();\n            float[] lastAnomalyPoint = lastAnomalyDescriptor.getRCFPoint();\n            int lastRelativeIndex = lastAnomalyDescriptor.getRelativeIndex();\n\n            // the following will fail for shingleSize 1\n            if (gap < shingleSize) {\n                System.arraycopy(lastExpectedPoint, gap * baseDimensions, correctedPoint, 0,\n                        point.length - gap * baseDimensions);\n            }\n            if (gap <= shingleSize && lastRelativeIndex == 0) {\n                if (transformMethod == TransformMethod.DIFFERENCE\n                        || transformMethod == TransformMethod.NORMALIZE_DIFFERENCE) {\n                    for (int y = 0; y < baseDimensions; y++) {\n                        correctedPoint[point.length - gap * baseDimensions\n                                + y] += lastAnomalyPoint[point.length - baseDimensions + y]\n                                        - lastExpectedPoint[point.length - baseDimensions + y];\n                    }\n                }\n                if (lastAnomalyDescriptor.getForestMode() == ForestMode.TIME_AUGMENTED) {\n                    // definitely correct the time dimension which is always differenced\n                    // this applies to the non-differenced cases\n                    correctedPoint[point.length - (gap - 1) * baseDimensions - 1] += lastAnomalyPoint[point.length - 1]\n                            - lastExpectedPoint[point.length - 1];\n                }\n            }\n        }\n        double[] correctionVector = getCorrectionOfLastAnomaly(transformMethod, gap, lastAnomalyDescriptor,\n                currentScale);\n        int number = min(gap, shingleSize);\n        for (int y = 0; y < baseDimensions; y++) {\n            for (int j = 0; j < number; j++) {\n                correctedPoint[point.length - (number - j) * baseDimensions + y] += correctionVector[y];\n            }\n        }\n        return correctedPoint;\n    }\n\n    /**\n     * The following verifies that the overall shingled point is not explainable by\n     * floating point precision. It then verifies that the point is not within\n     * noiseFactor of the standard deviation of the successive differences (in the\n     * multivariate setting). Finally, it caps the maximum grade possible for this\n     * point\n     * \n     * @param result the transcript of the current point\n     * @param point  the current point\n     * @param <P>    Either AnomalyDescriptor of ForecastDescriptor\n     * @return a cap on the grade (can be 0 for filtering out)\n     */\n    protected <P extends AnomalyDescriptor> double centeredTransformPass(P result, float[] point) {\n        double maxFactor = 0;\n        // check entire point or some large value\n        double[] scale = result.getScale();\n        double[] shift = result.getShift();\n        double[] deviations = result.getDeviations();\n        for (int i = 0; i < point.length && maxFactor == 0; i++) {\n            double scaleFactor = (scale == null) ? 1.0 : scale[i % baseDimension];\n            double shiftBase = (shift == null) ? 0 : shift[i % baseDimension];\n            if (Math.abs(point[i]) * scaleFactor > DEFAULT_NORMALIZATION_PRECISION * (1 + Math.abs(shiftBase))) {\n                maxFactor = 1;\n            }\n        }\n        // check most recent input\n        if (maxFactor > 0) {\n            for (int i = 0; i < baseDimension; i++) {\n                double scaleFactor = (scale == null) ? 1.0 : Math.abs(scale[i]);\n                double z = Math.abs(point[point.length - baseDimension + i]) * scaleFactor;\n                double deviation = (deviations == null) ? 0 : Math.abs(deviations[i + baseDimension]);\n                if (z > noiseFactor * deviation) {\n                    maxFactor = (deviation == 0) ? 1 : min(1.0, max(maxFactor, z / (3 * deviation)));\n                }\n            }\n        }\n        return maxFactor;\n    }\n\n    /**\n     * The following is useful for managing late detection of anomalies -- this\n     * calculates the zig-zag over the values in the late detection\n     * \n     * @param point         the point being scored\n     * @param startPosition the position of the block where we think the anomaly\n     *                      started\n     * @param index         the specific index in the block being tracked\n     * @param baseDimension the size of the block\n     * @param differenced   has differencing been performed already\n     * @return the average L1 deviation\n     */\n    double calculatePathDeviation(float[] point, int startPosition, int index, int baseDimension, boolean differenced) {\n        int position = startPosition;\n        double variation = 0;\n        int observation = 0;\n        while (position + index + baseDimension < point.length) {\n            variation += (differenced) ? Math.abs(point[position + index])\n                    : Math.abs(point[position + index] - point[position + baseDimension + index]);\n            position += baseDimension;\n            ++observation;\n        }\n        return (observation == 0) ? 0 : variation / observation;\n    }\n\n    protected <P extends AnomalyDescriptor> DiVector constructUncertaintyBox(float[] point, int startPosition,\n            P result) {\n        TransformMethod method = result.getTransformMethod();\n        boolean differenced = (method == TransformMethod.DIFFERENCE)\n                || (method == TransformMethod.NORMALIZE_DIFFERENCE);\n        double[] scale = result.getScale();\n        double[] shift = result.getShift();\n        int baseDimensions = result.getDimension() / result.getShingleSize();\n        double[] gapLow = new double[baseDimensions];\n        double[] gapHigh = new double[baseDimensions];\n        for (int y = 0; y < baseDimensions; y++) {\n            // 'a' represents the scaled value of the current point for dimension 'y'.\n            // Given that 'point[startPosition + y]' is the normalized value of the actual\n            // data point (x - mean) / std,\n            // and 'scale[y]' is the standard deviation (std), we have:\n            // a = std * ((x - mean) / std) = x - mean\n            double a = scale[y] * point[startPosition + y];\n\n            // 'shiftBase' is the shift value for dimension 'y', which is the mean (mean)\n            double shiftBase = shift[y];\n\n            // Initialize 'shiftAmount' to zero. This will account for numerical precision\n            // adjustments later\n            double shiftAmount = 0;\n\n            // If the mean ('shiftBase') is not zero, adjust 'shiftAmount' to account for\n            // numerical precision\n            if (shiftBase != 0) {\n                // 'shiftAmount' accounts for potential numerical errors due to shifting and\n                // scaling\n                shiftAmount += DEFAULT_NORMALIZATION_PRECISION * (scale[y] + Math.abs(shiftBase));\n            }\n\n            // Calculate the average L1 deviation along the path for dimension 'y'.\n            // This function computes the average absolute difference between successive\n            // values in the shingle,\n            // helping to capture recent fluctuations or trends in the data.\n            double pathGap = calculatePathDeviation(point, startPosition, y, baseDimension, differenced);\n\n            // 'noiseGap' is calculated based on the noise factor and the deviation for\n            // dimension 'y'.\n            // It represents the expected variation due to noise, scaled appropriately.\n            double noiseGap = noiseFactor * result.getDeviations()[baseDimension + y];\n\n            // 'gap' is the maximum of the scaled 'pathGap' and 'noiseGap', adjusted by\n            // 'shiftAmount'\n            // and a small constant to ensure it's not zero. This gap accounts for recent\n            // deviations and noise,\n            // and serves as a baseline threshold for detecting anomalies.\n            double gap = max(scale[y] * pathGap, noiseGap) + shiftAmount + DEFAULT_NORMALIZATION_PRECISION;\n\n            // Compute 'gapLow[y]' and 'gapHigh[y]', which are thresholds to determine if\n            // the deviation is significant\n            // Since 'a = x - mean' and 'shiftBase = mean', then 'a + shiftBase = x - mean +\n            // mean = x'\n            // Therefore, 'Math.abs(a + shiftBase)' simplifies to the absolute value of the\n            // actual data point |x|\n            // For 'gapLow[y]', calculate the maximum of:\n            // - 'ignoreNearExpectedFromBelow[y]', an absolute threshold for ignoring small\n            // deviations below expected\n            // - 'ignoreNearExpectedFromBelowByRatio[y] * |x|', a relative threshold based\n            // on the actual value x\n            // - 'gap', the calculated deviation adjusted for noise and precision\n            // This ensures that minor deviations within the specified ratio or fixed\n            // threshold are ignored,\n            // reducing false positives.\n            gapLow[y] = max(max(ignoreNearExpectedFromBelow[y],\n                    ignoreNearExpectedFromBelowByRatio[y] * (Math.abs(a + shiftBase))), gap);\n\n            // Similarly, for 'gapHigh[y]':\n            // - 'ignoreNearExpectedFromAbove[y]', an absolute threshold for ignoring small\n            // deviations above expected\n            // - 'ignoreNearExpectedFromAboveByRatio[y] * |x|', a relative threshold based\n            // on the actual value x\n            // - 'gap', the calculated deviation adjusted for noise and precision\n            // This threshold helps in ignoring anomalies that are within an acceptable\n            // deviation ratio from the expected value.\n            gapHigh[y] = max(max(ignoreNearExpectedFromAbove[y],\n                    ignoreNearExpectedFromAboveByRatio[y] * (Math.abs(a + shiftBase))), gap);\n        }\n        return new DiVector(gapHigh, gapLow);\n    }\n\n    protected boolean withinGap(DiVector gap, int startPosition, double[] scale, float[] point, float[] otherPoint,\n            int baseDimension) {\n        boolean answer = false;\n        // only for input dimensions, for which scale is defined currently\n        for (int y = 0; y < baseDimension && !answer; y++) {\n            double a = scale[y] * point[startPosition + y];\n            double b = scale[y] * otherPoint[startPosition + y];\n            boolean lower = (a < b - gap.low[y]);\n            boolean upper = (a > b + gap.high[y]);\n            answer = lower || upper;\n        }\n        return !answer;\n    }\n\n    /**\n     * uses the native approximate near neighbor in RCF to determine what fraction\n     * of samples from different trees are in the uncertainty box around the queried\n     * point\n     * \n     * @param uncertaintyBox the potentially asymmetric box around a point (original\n     *                       space)\n     * @param point          the point in question\n     * @param correctedPoint any correction applied to the point based on prior\n     *                       anomalies\n     * @param startPosition  the potential location of the anomaly\n     * @param result         the transcript of the current estimation\n     * @param forest         the resident RCF\n     * @param <P>            an extension of AnomalyDescriptor (to support forecast)\n     * @return true if there is enough mass within the box\n     */\n    protected <P extends AnomalyDescriptor> boolean explainedByConditionalField(DiVector uncertaintyBox, float[] point,\n            float[] correctedPoint, int startPosition, P result, RandomCutForest forest) {\n        List<Neighbor> list = forest.getNearNeighborsInSample(correctedPoint);\n        double averageDistance = list.stream().mapToDouble(e -> e.distance).average().getAsDouble();\n        double weight = 0;\n        for (Neighbor e : list) {\n            if (e.distance < 1.1 * averageDistance && withinGap(uncertaintyBox, startPosition, result.getScale(), point,\n                    e.point, point.length / result.getShingleSize())) {\n                weight += e.count;\n            }\n        }\n        return (weight >= samplingSupport * forest.getNumberOfTrees());\n    }\n\n    /**\n     * populates the scores and sets the score and attribution vectors; note some\n     * attributions can remain null (for efficiency reasons)\n     *\n     * @param strategy          the scoring strategy\n     * @param point             the current point being evaluated\n     * @param forest            the resident RCF\n     * @param scoreVector       the vector of scores\n     * @param attributionVector the vector of attributions\n     * @return the index of the score/attribution that is relevant\n     */\n\n    protected int populateScores(ScoringStrategy strategy, float[] point, RandomCutForest forest, double[] scoreVector,\n            DiVector[] attributionVector) {\n        if (strategy != ScoringStrategy.DISTANCE) {\n            scoreVector[EXPECTED_INVERSE_DEPTH_INDEX] = forest.getAnomalyScore(point);\n            if (strategy == ScoringStrategy.MULTI_MODE || strategy == ScoringStrategy.MULTI_MODE_RECALL) {\n                attributionVector[DISTANCE_INDEX] = forest.getSimpleDensity(point).distances;\n                scoreVector[DISTANCE_INDEX] = attributionVector[DISTANCE_INDEX].getHighLowSum();\n            }\n            return 0;\n        } else {\n            attributionVector[DISTANCE_INDEX] = forest.getSimpleDensity(point).distances;\n            scoreVector[DISTANCE_INDEX] = attributionVector[DISTANCE_INDEX].getHighLowSum();\n            return 1;\n        }\n    }\n\n    /**\n     * returned the attribution vector; it tries to reuse cached version to save\n     * computation\n     * \n     * @param choice            the mode of the attribution in question\n     * @param point             the point being considered\n     * @param attributionVector the vector (cached) of attributions\n     * @param forest            the resident RCF\n     * @return the attribution correspond to the mode of attribution\n     */\n    DiVector getCachedAttribution(int choice, float[] point, DiVector[] attributionVector, RandomCutForest forest) {\n        if (attributionVector[choice] == null) {\n            checkArgument(choice == EXPECTED_INVERSE_DEPTH_INDEX, \"incorrect cached state of scores\");\n            attributionVector[EXPECTED_INVERSE_DEPTH_INDEX] = forest.getAnomalyAttribution(point);\n        }\n        return attributionVector[choice];\n    }\n\n    /**\n     * computes the attribution of a (candidate) point based on mode, when the\n     * results are not expected to be cached\n     * \n     * @param choice the mode\n     * @param point  the point in question\n     * @param forest the resident RCF\n     * @return the attribution of that mode\n     */\n    DiVector getNewAttribution(int choice, float[] point, RandomCutForest forest) {\n        if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {\n            return forest.getAnomalyAttribution(point);\n        } else {\n            return forest.getSimpleDensity(point).distances;\n        }\n    }\n\n    /**\n     * same as getNewAttribution, except when just the score suffices\n     * \n     * @param choice the mode in question\n     * @param point  the point in question\n     * @param forest the resident RCF\n     * @return the score corresponding to the mode\n     */\n    double getNewScore(int choice, float[] point, RandomCutForest forest) {\n        if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {\n            return forest.getAnomalyScore(point);\n        } else {\n            return forest.getSimpleDensity(point).distances.getHighLowSum();\n        }\n    }\n\n    /**\n     * returns the threshold and grade corresponding to a mode choice (based on\n     * scoring strategy) currently the scoring strategy is unused, but would likely\n     * be used in future\n     * \n     * @param strategy    the scoring strategy\n     * @param choice      the chosen mode\n     * @param scoreVector the vector of scores\n     * @param method      the transformation method used\n     * @param dimension   the number of dimensions in RCF (used in auto adjustment\n     *                    of thresholds)\n     * @param shingleSize the shingle size (used in auto adjustment of thresholds)\n     * @return a weighted object where the index is the threshold and the weight is\n     *         the grade\n     */\n    protected Weighted<Double> getThresholdAndGrade(ScoringStrategy strategy, int choice, double[] scoreVector,\n            TransformMethod method, int dimension, int shingleSize) {\n        if (choice == EXPECTED_INVERSE_DEPTH_INDEX) {\n            return thresholders[EXPECTED_INVERSE_DEPTH_INDEX]\n                    .getThresholdAndGrade(scoreVector[EXPECTED_INVERSE_DEPTH_INDEX], method, dimension, shingleSize);\n        } else {\n            return thresholders[DISTANCE_INDEX].getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);\n        }\n    }\n\n    /**\n     * the strategy to save scores based on the scoring strategy\n     * \n     * @param strategy       the strategy\n     * @param choice         the mode for which corrected score applies\n     * @param scoreVector    the vector of scores\n     * @param correctedScore the estimated score with corrections (can be the same\n     *                       as score)\n     * @param method         the transformation method used\n     * @param shingleSize    the shingle size\n     */\n    protected void saveScores(ScoringStrategy strategy, int choice, double[] scoreVector, double correctedScore,\n            TransformMethod method, int shingleSize) {\n\n        if (scoreVector[EXPECTED_INVERSE_DEPTH_INDEX] > 0) {\n            double temp = (choice == EXPECTED_INVERSE_DEPTH_INDEX) ? correctedScore\n                    : scoreVector[EXPECTED_INVERSE_DEPTH_INDEX];\n            double last = (strategy == lastStrategy) ? lastScore[EXPECTED_INVERSE_DEPTH_INDEX] : 0;\n            thresholders[EXPECTED_INVERSE_DEPTH_INDEX].update(scoreVector[EXPECTED_INVERSE_DEPTH_INDEX], temp, last,\n                    method);\n        }\n        if (scoreVector[DISTANCE_INDEX] > 0) {\n            thresholders[DISTANCE_INDEX].update(scoreVector[DISTANCE_INDEX], lastScore[DISTANCE_INDEX]);\n        }\n        if (shingleSize > 1) {\n            for (int i = 0; i < NUMBER_OF_MODES; i++) {\n                lastScore[i] = scoreVector[i];\n            }\n        }\n    }\n\n    /**\n     * the core of the predictor-corrector thresholding for shingled data points. It\n     * uses a simple threshold provided by the basic thresholder. It first checks if\n     * obvious effects of the present; and absent such, for repeated breaches, how\n     * critical is the new current information\n     *\n     * @param result                    returns the augmented description\n     * @param lastSignificantDescriptor state of the computation for the last\n     *                                  candidate anomaly\n     * @param forest                    the resident RCF\n     * @return the anomaly descriptor result (which has plausibly mutated)\n     */\n    protected <P extends AnomalyDescriptor> P detect(P result, RCFComputeDescriptor lastSignificantDescriptor,\n            RandomCutForest forest) {\n        if (result.getRCFPoint() == null) {\n            lastDescriptor = result.copyOf();\n            return result;\n        }\n        float[] point = result.getRCFPoint();\n        ScoringStrategy strategy = result.getScoringStrategy();\n        double[] scoreVector = new double[NUMBER_OF_MODES];\n        DiVector[] attributionVector = new DiVector[NUMBER_OF_MODES];\n\n        final int originalChoice = populateScores(strategy, point, forest, scoreVector, attributionVector);\n\n        DiVector attribution = null;\n        final double score = scoreVector[originalChoice];\n\n        // we will not alter the basic score from RCF under any circumstance\n        result.setRCFScore(score);\n\n        // we will not have zero scores affect any thresholding\n        if (score == 0) {\n            lastDescriptor = result.copyOf();\n            return result;\n        }\n\n        long internalTimeStamp = result.getInternalTimeStamp();\n\n        int shingleSize = result.getShingleSize();\n\n        Weighted<Double> thresholdAndGrade = getThresholdAndGrade(strategy, originalChoice, scoreVector,\n                result.getTransformMethod(), point.length, shingleSize);\n        final double originalThreshold = thresholdAndGrade.index;\n        double workingThreshold = originalThreshold;\n        double workingGrade = thresholdAndGrade.weight;\n        // we will not alter this\n        result.setThreshold(originalThreshold);\n\n        boolean candidate = false;\n\n        if (workingGrade > 0 && lastDescriptor != null) {\n            if (score > lastDescriptor.getRCFScore()) {\n                candidate = true;\n            } else {\n                double runDiscount = max(workingThreshold, lastDescriptor.getThreshold())\n                        * (1 + max(0.2, runLength / (2.0 * max(10, shingleSize))));\n                if (lastDescriptor.getRCFScore() - lastDescriptor.getThreshold() > score - runDiscount) {\n                    // the 'run' or the sequence of observations that create large scores\n                    // because of data (concept?) drift is defined to increase permissively\n                    // so that it is clear when the threshold is above the scores\n                    // a consequence of this can be masking -- anomalies just after a run/drift\n                    // would be difficult to determine -- but those should be difficult to determine\n                    candidate = true;\n                }\n            }\n        }\n\n        if (workingGrade > 0 && strategy == ScoringStrategy.MULTI_MODE) {\n            Weighted<Double> temp = thresholders[DISTANCE_INDEX]\n                    .getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);\n            if (temp.index > 0 && temp.weight == 0) {\n                // there is a valid threshold and the grade is 0\n                workingGrade = 0;\n                result.setCorrectionMode(CorrectionMode.MULTI_MODE);\n            }\n        }\n\n        if (lastDescriptor != null && lastDescriptor.getExpectedRCFPoint() != null) {\n            lastSignificantDescriptor = lastDescriptor;\n        }\n\n        int gap = (int) (internalTimeStamp - lastSignificantDescriptor.getInternalTimeStamp());\n        int difference = gap * baseDimension;\n\n        float[] correctedPoint = null;\n        double correctedScore = score;\n        float[] expectedPoint = null;\n        boolean inHighScoreRegion = false;\n        int index = 0;\n        int relative = (gap >= shingleSize) ? -shingleSize : -gap;\n\n        int choice = originalChoice;\n        if (strategy == ScoringStrategy.MULTI_MODE_RECALL && workingGrade == 0 && gap >= shingleSize) {\n            // if overlapping shingles are being ruled out, then reconsidering those may not\n            // be useful\n            Weighted<Double> temp = thresholders[DISTANCE_INDEX]\n                    .getPrimaryThresholdAndGrade(scoreVector[DISTANCE_INDEX]);\n            choice = DISTANCE_INDEX;\n            correctedScore = scoreVector[DISTANCE_INDEX];\n            workingGrade = temp.weight;\n            workingThreshold = temp.index;\n        }\n\n        // we perform basic correction\n        correctedPoint = applyPastCorrector(point, gap, shingleSize, point.length / shingleSize, result.getScale(),\n                result.getTransformMethod(), lastSignificantDescriptor);\n\n        /**\n         * we check if the point is too close to 0 for centered transforms as well as\n         * explainable by the default distribution of differences this acts as a filter\n         * and an upper bound for the grade\n         */\n        if (workingGrade > 0) {\n            workingGrade *= centeredTransformPass(result, correctedPoint);\n            if (workingGrade == 0) {\n                result.setCorrectionMode(CorrectionMode.NOISE);\n            }\n        }\n\n        if (workingGrade > 0) {\n            inHighScoreRegion = true;\n\n            if (!Arrays.equals(correctedPoint, point)) {\n                attribution = getNewAttribution(choice, correctedPoint, forest);\n                correctedScore = attribution.getHighLowSum();\n                if (correctedScore > workingThreshold) {\n                    int tempIndex = maxContribution(attribution, point.length / shingleSize, relative) + 1;\n                    // use the additional new data for explanation\n                    int tempStartPosition = point.length + (tempIndex - 1) * point.length / shingleSize;\n                    float[] tempPoint = getExpectedPoint(attribution, tempStartPosition, point.length / shingleSize,\n                            correctedPoint, forest);\n                    if (tempPoint != null) {\n                        DiVector tempAttribution = getNewAttribution(choice, tempPoint, forest);\n                        correctedScore = tempAttribution.getHighLowSum();\n                        if (!trigger(attribution, difference, point.length / shingleSize, tempAttribution,\n                                lastSignificantDescriptor, workingThreshold)) {\n                            workingGrade = 0;\n                            result.setCorrectionMode(CorrectionMode.ANOMALY_IN_SHINGLE);\n                        }\n                    }\n                }\n            } else {\n                attribution = getCachedAttribution(choice, point, attributionVector, forest);\n            }\n\n            if (workingGrade > 0 && result.getScale() != null) {\n                index = (shingleSize == 1) ? 0 : maxContribution(attribution, point.length / shingleSize, relative) + 1;\n\n                int startPosition = point.length + (index - 1) * point.length / shingleSize;\n                DiVector uncertaintyBox = constructUncertaintyBox(point, startPosition, result);\n\n                if (autoAdjust && explainedByConditionalField(uncertaintyBox, point, correctedPoint, startPosition,\n                        result, forest)) {\n                    workingGrade = 0;\n                    result.setCorrectionMode(CONDITIONAL_FORECAST);\n                } else {\n\n                    expectedPoint = getExpectedPoint(attribution, startPosition, point.length / shingleSize,\n                            correctedPoint, forest);\n                    if (expectedPoint != null) {\n                        if (difference < point.length) {\n                            DiVector newAttribution = getNewAttribution(choice, expectedPoint, forest);\n                            correctedScore = newAttribution.getHighLowSum();\n                            if (!trigger(attribution, difference, point.length / shingleSize, newAttribution,\n                                    lastSignificantDescriptor, workingThreshold)) {\n                                workingGrade = 0;\n                                result.setCorrectionMode(CorrectionMode.ANOMALY_IN_SHINGLE);\n                            }\n                        } else {\n                            // attribution will not be used\n                            correctedScore = getNewScore(choice, point, forest);\n                        }\n\n                        if (workingGrade > 0 && withinGap(uncertaintyBox, startPosition, result.getScale(), point,\n                                expectedPoint, point.length / shingleSize)) {\n                            workingGrade = 0;\n                            result.setCorrectionMode(CorrectionMode.FORECAST);\n                        }\n                    }\n                }\n            }\n            if (workingGrade == 0) {\n                // note score is the original score\n                correctedScore = score;\n            }\n        }\n\n        if (candidate) {\n            if (autoAdjust) {\n                for (int y = 0; y < baseDimension; y++) {\n                    deviationsActual[y].update(point[point.length - baseDimension + y]);\n                    if (expectedPoint != null) {\n                        deviationsExpected[y].update(expectedPoint[point.length - baseDimension + y]);\n                    }\n                }\n                if (runLength > DEFAULT_RUN_ALLOWED && workingGrade > 0) {\n                    boolean within = true;\n                    for (int y = 0; y < baseDimension && within; y++) {\n                        within = Math\n                                .abs(deviationsActual[y].getMean() - point[point.length - baseDimension + y]) < max(\n                                        2 * deviationsActual[y].getDeviation(),\n                                        // results are in original space -- need to scale back to RCF space\n                                        noiseFactor * result.getDeviations()[baseDimension + y] / result.getScale()[y]);\n                        // estimation of noise from within the run as well as a long term estimation\n                        if (expectedPoint != null) {\n                            double u = Math.abs(\n                                    deviationsExpected[y].getMean() - expectedPoint[point.length - baseDimension + y]);\n                            within = within && Math.abs(deviationsExpected[y].getMean()\n                                    - expectedPoint[point.length - baseDimension + y]) < 2\n                                            * max(deviationsExpected[y].getDeviation(),\n                                                    deviationsActual[y].getDeviation())\n                                            + 0.1 * Math.abs(\n                                                    deviationsActual[y].getMean() - deviationsExpected[y].getMean());\n                            // forecasts cannot be more accurate than actuals; and forecasting would\n                            // not be exact\n                        }\n                    }\n                    if (within) {\n                        result.setCorrectionMode(DATA_DRIFT);\n                        workingGrade = 0;\n                    }\n                }\n            }\n            if (ignoreDrift && workingGrade > 0) {\n                if (runLength > 0 && gap < shingleSize) {\n                    result.setCorrectionMode(ALERT_ONCE);\n                    workingGrade = 0;\n                }\n            }\n        }\n\n        result.setAnomalyGrade(workingGrade);\n        result.setInHighScoreRegion(inHighScoreRegion);\n\n        if (workingGrade > 0) {\n            if (expectedPoint != null) {\n                result.setExpectedRCFPoint(expectedPoint);\n            }\n            attribution.renormalize(result.getRCFScore());\n            result.setStartOfAnomaly(true);\n            result.setAttribution(attribution);\n            result.setRelativeIndex(index);\n            ++runLength;\n        } else if (result.getCorrectionMode() == NONE) {\n            runLength = 0;\n            if (autoAdjust) {\n                for (int y = 0; y < baseDimension; y++) {\n                    deviationsActual[y].reset();\n                    deviationsExpected[y].reset();\n                }\n            }\n        } else if (runLength > 0) {\n            // cannot start a run; but the run can be sustained\n            ++runLength;\n        }\n\n        lastDescriptor = result.copyOf();\n        saveScores(strategy, choice, scoreVector, correctedScore, result.getTransformMethod(), shingleSize);\n        return result;\n    }\n\n    public void setZfactor(double factor) {\n        for (int i = 0; i < thresholders.length; i++) {\n            thresholders[i].setZfactor(factor);\n        }\n    }\n\n    public void setAbsoluteThreshold(double lower) {\n        // only applies to thresholder 0\n        thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setAbsoluteThreshold(lower);\n    }\n\n    public void setScoreDifferencing(double persistence) {\n        // only applies to thresholder 0\n        thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setScoreDifferencing(persistence);\n    }\n\n    public void setInitialThreshold(double initial) {\n        // only applies to thresholder 0\n        thresholders[EXPECTED_INVERSE_DEPTH_INDEX].setInitialThreshold(initial);\n    }\n\n    public void setNumberOfAttributors(int numberOfAttributors) {\n        checkArgument(numberOfAttributors > 0, \"cannot be negative\");\n        this.numberOfAttributors = numberOfAttributors;\n    }\n\n    public int getNumberOfAttributors() {\n        return numberOfAttributors;\n    }\n\n    public double[] getLastScore() {\n        return lastScore;\n    }\n\n    public void setLastScore(double[] score) {\n        if (score != null) {\n            System.arraycopy(score, 0, lastScore, 0, min(NUMBER_OF_MODES, score.length));\n        }\n    }\n\n    void validateIgnore(double[] shift, int length) {\n        checkArgument(shift.length == length,\n                () -> String.format(Locale.ROOT, \"has to be of length %d but is %d\", length, shift.length));\n        for (double element : shift) {\n            checkArgument(element >= 0, \"has to be non-negative\");\n        }\n    }\n\n    public void setIgnoreNearExpectedFromAbove(double[] ignoreSimilarShift) {\n        if (ignoreSimilarShift != null) {\n            validateIgnore(ignoreSimilarShift, baseDimension);\n            System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAbove, 0, baseDimension);\n        }\n    }\n\n    public void setIgnoreNearExpectedFromBelow(double[] ignoreSimilarShift) {\n        if (ignoreSimilarShift != null) {\n            validateIgnore(ignoreSimilarShift, baseDimension);\n            System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromBelow, 0, baseDimension);\n        }\n    }\n\n    public void setIgnoreNearExpectedFromAboveByRatio(double[] ignoreSimilarShift) {\n        if (ignoreSimilarShift != null) {\n            validateIgnore(ignoreSimilarShift, baseDimension);\n            System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAboveByRatio, 0, baseDimension);\n        }\n    }\n\n    public void setIgnoreNearExpectedFromBelowByRatio(double[] ignoreSimilarShift) {\n        if (ignoreSimilarShift != null) {\n            validateIgnore(ignoreSimilarShift, baseDimension);\n            System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromBelowByRatio, 0, baseDimension);\n        }\n    }\n\n    // to be used for the state classes only\n    public void setIgnoreNearExpected(double[] ignoreSimilarShift) {\n        if (ignoreSimilarShift != null) {\n            validateIgnore(ignoreSimilarShift, 4 * baseDimension);\n            System.arraycopy(ignoreSimilarShift, 0, ignoreNearExpectedFromAbove, 0, baseDimension);\n            System.arraycopy(ignoreSimilarShift, baseDimension, ignoreNearExpectedFromBelow, 0, baseDimension);\n            System.arraycopy(ignoreSimilarShift, 2 * baseDimension, ignoreNearExpectedFromAboveByRatio, 0,\n                    baseDimension);\n            System.arraycopy(ignoreSimilarShift, 3 * baseDimension, ignoreNearExpectedFromBelowByRatio, 0,\n                    baseDimension);\n        }\n    }\n\n    public double[] getIgnoreNearExpected() {\n        double[] answer = new double[4 * baseDimension];\n        System.arraycopy(ignoreNearExpectedFromAbove, 0, answer, 0, baseDimension);\n        System.arraycopy(ignoreNearExpectedFromBelow, 0, answer, baseDimension, baseDimension);\n        System.arraycopy(ignoreNearExpectedFromAboveByRatio, 0, answer, 2 * baseDimension, baseDimension);\n        System.arraycopy(ignoreNearExpectedFromBelowByRatio, 0, answer, 3 * baseDimension, baseDimension);\n        return answer;\n    }\n\n    public long getRandomSeed() {\n        return randomSeed;\n    }\n\n    public BasicThresholder[] getThresholders() {\n        return thresholders;\n    }\n\n    public int getBaseDimension() {\n        return baseDimension;\n    }\n\n    public ScoringStrategy getLastStrategy() {\n        return lastStrategy;\n    }\n\n    public void setLastStrategy(ScoringStrategy strategy) {\n        this.lastStrategy = strategy;\n    }\n\n    public Deviation[] getDeviations() {\n        if (!autoAdjust) {\n            return null;\n        }\n        checkArgument(deviationsActual.length == deviationsExpected.length, \"incorrect state\");\n        checkArgument(deviationsActual.length == baseDimension, \"length should be base dimension\");\n\n        Deviation[] answer = new Deviation[2 * deviationsActual.length];\n        for (int i = 0; i < deviationsActual.length; i++) {\n            answer[i] = deviationsActual[i];\n        }\n        for (int i = 0; i < deviationsExpected.length; i++) {\n            answer[i + deviationsActual.length] = deviationsExpected[i];\n        }\n        return answer;\n    }\n\n    public double getSamplingRate() {\n        return samplingRate;\n    }\n\n    public void setSamplingRate(double samplingRate) {\n        checkArgument(samplingRate > 0, \" cannot be negative\");\n        checkArgument(samplingRate < 1.0, \" has to be in [0,1)\");\n        this.samplingRate = samplingRate;\n    }\n\n    public double[] getModeInformation() {\n        return modeInformation;\n    }\n\n    // to be used in future\n    public void setModeInformation(double[] modeInformation) {\n    }\n\n    public boolean isAutoAdjust() {\n        return autoAdjust;\n    }\n\n    public void setAutoAdjust(boolean autoAdjust) {\n        this.autoAdjust = autoAdjust;\n    }\n\n    public double getNoiseFactor() {\n        return noiseFactor;\n    }\n\n    public void setNoiseFactor(double noiseFactor) {\n        this.noiseFactor = noiseFactor;\n    }\n\n    public void setIgnoreDrift(boolean ignoreDrift) {\n        this.ignoreDrift = ignoreDrift;\n    }\n\n    public boolean isIgnoreDrift() {\n        return ignoreDrift;\n    }\n\n    public void setLastDescriptor(RCFComputeDescriptor lastDescriptor) {\n        this.lastDescriptor = lastDescriptor.copyOf();\n    }\n\n    public RCFComputeDescriptor getLastDescriptor() {\n        return lastDescriptor;\n    }\n\n    public int getRunLength() {\n        return runLength;\n    }\n\n    public void setRunLength(int runLength) {\n        this.runLength = runLength;\n    }\n\n    public double getSamplingSupport() {\n        return samplingSupport;\n    }\n\n    public void setSamplingSupport(double sampling) {\n        checkArgument(sampling >= 0, \" cannot be negative \");\n        checkArgument(sampling < 2 * DEFAULT_SAMPLING_SUPPORT,\n                \" cannot be more than \" + (2 * DEFAULT_SAMPLING_SUPPORT));\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/RCFCaster.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static java.lang.Math.max;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Optional;\nimport java.util.function.Function;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.calibration.ErrorHandler;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\n\n@Getter\n@Setter\npublic class RCFCaster extends ThresholdedRandomCutForest {\n\n    public static double DEFAULT_ERROR_PERCENTILE = 0.1;\n\n    public static Calibration DEFAULT_CALIBRATION = Calibration.SIMPLE;\n\n    protected int forecastHorizon;\n    protected ErrorHandler errorHandler;\n    protected int errorHorizon;\n    protected Calibration calibrationMethod;\n\n    public static class Builder extends ThresholdedRandomCutForest.Builder<Builder> {\n        int forecastHorizon;\n        int errorHorizon;\n        double percentile = DEFAULT_ERROR_PERCENTILE;\n        protected Calibration calibrationMethod = DEFAULT_CALIBRATION;\n\n        // default is to use less space\n        protected boolean useRCF = false;\n\n        Optional<float[]> upperLimit = Optional.empty();\n\n        Optional<float[]> lowerLimit = Optional.empty();\n\n        Builder() {\n            super();\n            // changing the default;\n            transformMethod = TransformMethod.NORMALIZE;\n        }\n\n        public Builder forecastHorizon(int horizon) {\n            this.forecastHorizon = horizon;\n            return this;\n        }\n\n        public Builder errorHorizon(int errorHorizon) {\n            this.errorHorizon = errorHorizon;\n            return this;\n        }\n\n        public Builder percentile(double percentile) {\n            this.percentile = percentile;\n            return this;\n        }\n\n        public Builder calibration(Calibration calibrationMethod) {\n            this.calibrationMethod = calibrationMethod;\n            return this;\n        }\n\n        public Builder lowerLimit(float[] lowerLimit) {\n            this.lowerLimit = Optional.of(lowerLimit);\n            return this;\n        }\n\n        public Builder upperLimit(float[] upperLimit) {\n            this.upperLimit = Optional.of(upperLimit);\n            return this;\n        }\n\n        public Builder useRCFCallibration(boolean use) {\n            useRCF = use;\n            return this;\n        }\n\n        @Override\n        public RCFCaster build() {\n            checkArgument(forecastHorizon > 0, \"need non-negative horizon\");\n            checkArgument(shingleSize > 0, \"need shingle size > 1\");\n            checkArgument(forestMode != ForestMode.STREAMING_IMPUTE,\n                    \"error estimation with on the fly imputation should not be abstracted, \"\n                            + \"either estimate errors outside of this object \"\n                            + \"or perform on the fly imputation outside this code\");\n            checkArgument(forestMode != ForestMode.TIME_AUGMENTED,\n                    \"error estimation when time is used as a field in the forest should not be abstracted\"\n                            + \"perform estimation outside this code\");\n            checkArgument(!internalShinglingEnabled.isPresent() || internalShinglingEnabled.get(),\n                    \"internal shingling only\");\n            int inputLength = dimensions / shingleSize;\n            if (errorHorizon == 0) {\n                errorHorizon = max(sampleSize, 2 * forecastHorizon);\n            }\n            validate();\n            return new RCFCaster(this);\n        }\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n\n    public RCFCaster(Builder builder) {\n        super(builder);\n        forecastHorizon = builder.forecastHorizon;\n        errorHorizon = builder.errorHorizon;\n        ErrorHandler.Builder errorBuilder = ErrorHandler.builder().dimensions(builder.dimensions)\n                .shingleSize(builder.shingleSize).forecastHorizon(builder.forecastHorizon)\n                .percentile(builder.percentile).errorHorizon(builder.errorHorizon).useRCF(builder.useRCF);\n        builder.lowerLimit.ifPresent(errorBuilder::lowerLimit);\n        builder.upperLimit.ifPresent(errorBuilder::upperLimit);\n\n        errorHandler = new ErrorHandler(errorBuilder);\n        calibrationMethod = builder.calibrationMethod;\n    }\n\n    // for mappers\n    public RCFCaster(ForestMode forestMode, TransformMethod transformMethod, ScoringStrategy scoringStrategy,\n            RandomCutForest forest, PredictorCorrector predictorCorrector, Preprocessor preprocessor,\n            RCFComputeDescriptor descriptor, int forecastHorizon, ErrorHandler errorHandler, int errorHorizon,\n            Calibration calibrationMethod) {\n        super(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector, preprocessor, descriptor);\n        this.forecastHorizon = forecastHorizon;\n        this.errorHandler = errorHandler;\n        this.errorHorizon = errorHorizon;\n        this.calibrationMethod = calibrationMethod;\n    }\n\n    /**\n     * a single call that preprocesses data, compute score/grade, generates forecast\n     * and updates state\n     *\n     * @param inputPoint current input point\n     * @param timestamp  time stamp of input\n     * @return forecast descriptor for the current input point\n     */\n\n    @Override\n    public ForecastDescriptor process(double[] inputPoint, long timestamp) {\n        return process(inputPoint, timestamp, null);\n    }\n\n    void augment(ForecastDescriptor answer) {\n        super.augment(answer);\n        TimedRangeVector timedForecast = new TimedRangeVector(\n                forest.getDimensions() * forecastHorizon / preprocessor.getShingleSize(), forecastHorizon);\n\n        // forest is ready mens that we can forecast -- but there is an implicit\n        // assumption that preprocessor is ready\n        if (forest.isOutputReady() && preprocessor.isOutputReady()) {\n            if (errorHandler.getSequenceIndex() > 0) {\n                // if not then there is no forecast stored\n                // forecast has to be there first\n                errorHandler.updateActuals(answer.getCurrentInput(), answer.getPostDeviations());\n                errorHandler.augmentDescriptor(answer);\n            }\n\n            timedForecast = extrapolate(forecastHorizon, true, 1.0);\n\n            // note that internal timestamp of answer is 1 step in the past\n            // outputReady corresponds to first (and subsequent) forecast\n            errorHandler.updateForecasts(timedForecast.rangeVector);\n        }\n        answer.setTimedForecast(timedForecast);\n    }\n\n    /**\n     * a single call that preprocesses data, compute score/grade and updates state\n     * when the current input has potentially missing values\n     *\n     * @param inputPoint    current input point\n     * @param timestamp     time stamp of input\n     * @param missingValues this is not meaningful for forecast; but kept as a\n     *                      parameter since it conforms to (sometimes used)\n     *                      ThresholdedRCF\n     * @return forecast descriptor for the current input point\n     */\n\n    @Override\n    public ForecastDescriptor process(double[] inputPoint, long timestamp, int[] missingValues) {\n        checkArgument(missingValues == null, \"on the fly imputation and error estimation should not mix\");\n        ForecastDescriptor answer = new ForecastDescriptor(inputPoint, timestamp, forecastHorizon);\n        answer.setScoringStrategy(scoringStrategy);\n        boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);\n        try {\n            if (cacheDisabled) {\n                // turn caching on temporarily\n                forest.setBoundingBoxCacheFraction(1.0);\n            }\n            augment(answer);\n        } finally {\n            if (cacheDisabled) {\n                // turn caching off\n                forest.setBoundingBoxCacheFraction(0);\n            }\n        }\n\n        return answer;\n    }\n\n    public void calibrate(double[] actuals, Calibration calibration, RangeVector ranges) {\n        errorHandler.calibrate(actuals, calibration, ranges);\n    }\n\n    @Override\n    public TimedRangeVector extrapolate(int horizon, boolean correct, double centrality) {\n        return this.extrapolate(calibrationMethod, horizon, correct, centrality);\n    }\n\n    public TimedRangeVector extrapolate(Calibration calibration, int horizon, boolean correct, double centrality) {\n        TimedRangeVector answer = super.extrapolate(horizon, correct, centrality);\n        double[] last = getPreprocessor().getShingledInput(getPreprocessor().getShingleSize() - 1);\n        calibrate(last, calibration, answer.rangeVector);\n        return answer;\n    }\n\n    @Override\n    public List<AnomalyDescriptor> processSequentially(double[][] data, Function<AnomalyDescriptor, Boolean> filter) {\n        if (data == null || data.length == 0) {\n            return new ArrayList<>();\n        }\n\n        long timestamp = preprocessor.getInternalTimeStamp();\n        long[] timestamps = new long[data.length];\n        for (int i = 0; i < data.length; i++) {\n            timestamps[i] = ++timestamp;\n        }\n\n        return processSequentially(data, timestamps, filter);\n    }\n\n    public List<AnomalyDescriptor> processSequentially(double[][] data, long[] timestamps,\n            Function<AnomalyDescriptor, Boolean> filter) {\n        // Precondition checks\n        checkArgument(filter != null, \"filter must not be null\");\n        if (data != null && data.length > 0) {\n            checkArgument(timestamps != null, \"timestamps must not be null when data is non-empty\");\n            checkArgument(timestamps.length == data.length, String.format(Locale.ROOT,\n                    \"timestamps length (%s) must equal data length (%s)\", timestamps.length, data.length));\n            for (int i = 1; i < timestamps.length; i++) {\n                checkArgument(timestamps[i] > timestamps[i - 1],\n                        String.format(Locale.ROOT,\n                                \"timestamps must be strictly ascending: \"\n                                        + \"timestamps[%s]=%s is not > timestamps[%s]=%s\",\n                                i, timestamps[i], i - 1, timestamps[i - 1]));\n            }\n        }\n\n        ArrayList<AnomalyDescriptor> answer = new ArrayList<>();\n        if (data != null) {\n            if (data.length > 0) {\n                boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);\n                try {\n                    if (cacheDisabled) { // turn caching on temporarily\n                        forest.setBoundingBoxCacheFraction(1.0);\n                    }\n                    int length = preprocessor.getInputLength();\n                    for (int i = 0; i < data.length; i++) {\n                        double[] point = data[i];\n                        checkArgument(point != null, \" data should not be null \");\n                        checkArgument(point.length == length, \" nonuniform lengths \");\n                        ForecastDescriptor description = new ForecastDescriptor(point, timestamps[i], forecastHorizon);\n                        augment(description);\n                        if (filter.apply(description)) {\n                            answer.add(description);\n                        }\n                    }\n                } finally {\n                    if (cacheDisabled) { // turn caching off\n                        forest.setBoundingBoxCacheFraction(0);\n                    }\n                }\n            }\n        }\n        return answer;\n    }\n\n    public void setUpperLimit(float[] upperLimit) {\n        errorHandler.setUpperLimit(upperLimit);\n    }\n\n    public void setLowerLimit(float[] lowerLimit) {\n        errorHandler.setLowerLimit(lowerLimit);\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/SequentialAnalysis.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_NUMBER_OF_TREES;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.parkservices.returntypes.AnalysisDescriptor;\n\npublic class SequentialAnalysis {\n\n    /**\n     * provides a list of anomalies given a block of data. While this is a fairly\n     * simple function, it is provided as a reference such that users do not have\n     * depend on interpretations of sequentian analysis\n     * \n     * @param data            the array containing the values\n     * @param shingleSize     shinglesize of RCF\n     * @param sampleSize      sampleSize of RCF\n     * @param numberOfTrees   the numberOfTres used by RCF\n     * @param timeDecay       the time decay parameter of RCF; think of half life of\n     *                        data\n     * @param outputAfter     the value after which we\n     * @param transformMethod the transformation used in preprocessing\n     * @param transformDecay  the half life of data in preprocessing (if in doubt,\n     *                        use the same as timeDecay)\n     * @param seed            a random seed\n     * @return a list of anomalies\n     */\n    public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, int sampleSize,\n            int numberOfTrees, double timeDecay, int outputAfter, TransformMethod transformMethod,\n            double transformDecay, long seed) {\n        checkArgument(data != null, \"cannot be a null array\");\n        int inputDimension = data[0].length;\n        int dimensions = inputDimension * shingleSize;\n        double fraction = 1.0 * outputAfter / sampleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD).timeDecay(timeDecay)\n                .transformMethod(transformMethod).outputAfter(outputAfter).transformDecay(transformDecay)\n                .initialAcceptFraction(fraction).build();\n        return forest.processSequentially(data);\n    }\n\n    public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, int sampleSize,\n            double timeDecay, TransformMethod transformMethod, long seed) {\n        return detectAnomalies(data, shingleSize, sampleSize, DEFAULT_NUMBER_OF_TREES, timeDecay, sampleSize / 4,\n                transformMethod, timeDecay, seed);\n    }\n\n    public static List<AnomalyDescriptor> detectAnomalies(double[][] data, int shingleSize, double timeDecay,\n            TransformMethod transformMethod, double transformDecay, long seed) {\n        return detectAnomalies(data, shingleSize, DEFAULT_SAMPLE_SIZE, DEFAULT_NUMBER_OF_TREES, timeDecay,\n                DEFAULT_SAMPLE_SIZE / 4, transformMethod, transformDecay, seed);\n    }\n\n    /**\n     * Same as the anomaly detector but provides a list of anomalies as well as a\n     * calibrated (with testing) interval and forecasts.\n     * \n     * @param inputArray      the input\n     * @param shingleSize     shingle size of RCF\n     * @param sampleSize      samplesize of RCF\n     * @param timeDecay       timedecay of RCF\n     * @param outputAfter     the input after which we perform score evaluation\n     * @param transformMethod transformation method of preprocessing\n     * @param transformDecay  the time decay of preprocessing\n     * @param forecastHorizon the number of steps to forecast (during and at the\n     *                        end)\n     * @param errorHorizon    the number of steps to perform calibration (during the\n     *                        sequence)\n     * @param percentile      the percentile of error one is interested in\n     *                        calibrating (we recommend 0.1)\n     * @param seed            random seed\n     * @return a list of anomalies and the final forecast wilh callibration\n     */\n    public static AnalysisDescriptor forecastWithAnomalies(double[][] inputArray, int shingleSize, int sampleSize,\n            double timeDecay, int outputAfter, TransformMethod transformMethod, double transformDecay,\n            int forecastHorizon, int errorHorizon, double percentile, Calibration calibration, long seed) {\n        checkArgument(inputArray != null, \" input cannot be null\");\n        int inputDimension = inputArray[0].length;\n        int dimensions = shingleSize * inputDimension;\n        int numberOfTrees = 50;\n        double fraction = 1.0 * outputAfter / sampleSize;\n        RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).anomalyRate(0.01)\n                .forestMode(ForestMode.STANDARD).timeDecay(timeDecay).transformMethod(transformMethod)\n                .outputAfter(outputAfter).calibration(calibration).initialAcceptFraction(fraction)\n                .forecastHorizon(forecastHorizon).transformDecay(transformDecay).errorHorizon(errorHorizon)\n                .percentile(percentile).build();\n\n        ArrayList<AnomalyDescriptor> descriptors = new ArrayList<>();\n        ForecastDescriptor last = null;\n        for (double[] input : inputArray) {\n            ForecastDescriptor descriptor = caster.process(input, 0L);\n            if (descriptor.getAnomalyGrade() > 0) {\n                descriptors.add(descriptor);\n            }\n            last = descriptor;\n        }\n        return new AnalysisDescriptor(descriptors, last);\n    }\n\n    public static AnalysisDescriptor forecastWithAnomalies(double[][] inputArray, int shingleSize, int sampleSize,\n            double timeDecay, TransformMethod transformMethod, int forecastHorizon, int errorHorizon, long seed) {\n        return forecastWithAnomalies(inputArray, shingleSize, sampleSize, timeDecay, sampleSize / 4, transformMethod,\n                timeDecay, forecastHorizon, errorHorizon, 0.1, Calibration.SIMPLE, seed);\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/ThresholdedRandomCutForest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_CENTER_OF_MASS_ENABLED;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INITIAL_ACCEPT_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_INTERNAL_SHINGLING_ENABLED;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_NUMBER_OF_TREES;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_OUTPUT_AFTER_FRACTION;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_PARALLEL_EXECUTION_ENABLED;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SHINGLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_ABSOLUTE_THRESHOLD;\nimport static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_SCORE_DIFFERENCING;\nimport static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_Z_FACTOR;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.DEFAULT_START_NORMALIZATION;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.DEFAULT_STOP_NORMALIZATION;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Optional;\nimport java.util.Random;\nimport java.util.function.Function;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.preprocessor.IPreprocessor;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\n\n/**\n * This class provides a combined RCF and thresholder, both of which operate in\n * a streaming manner and respect the arrow of time.\n */\n@Getter\n@Setter\npublic class ThresholdedRandomCutForest {\n\n    // saved description of the last seen anomaly\n    RCFComputeDescriptor lastAnomalyDescriptor;\n\n    // forestMode of operation\n    protected ForestMode forestMode = ForestMode.STANDARD;\n\n    protected TransformMethod transformMethod = TransformMethod.NONE;\n\n    protected ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;\n\n    protected RandomCutForest forest;\n\n    protected PredictorCorrector predictorCorrector;\n\n    protected IPreprocessor preprocessor;\n\n    public ThresholdedRandomCutForest(Builder<?> builder) {\n\n        forestMode = builder.forestMode;\n        transformMethod = builder.transformMethod;\n        scoringStrategy = builder.scoringStrategy;\n        Preprocessor.Builder<?> preprocessorBuilder = Preprocessor.builder().shingleSize(builder.shingleSize)\n                .transformMethod(builder.transformMethod).forestMode(builder.forestMode);\n\n        int inputLength;\n        if (builder.forestMode == ForestMode.TIME_AUGMENTED) {\n            inputLength = builder.dimensions / builder.shingleSize;\n            preprocessorBuilder.inputLength(inputLength);\n            builder.dimensions += builder.shingleSize;\n            preprocessorBuilder.normalizeTime(builder.normalizeTime);\n            // force internal shingling for this option\n            builder.internalShinglingEnabled = Optional.of(true);\n        } else if (builder.forestMode == ForestMode.STREAMING_IMPUTE) {\n            // already validated\n            inputLength = builder.dimensions / builder.shingleSize;\n            preprocessorBuilder.inputLength(inputLength);\n\n            preprocessorBuilder.imputationMethod(builder.imputationMethod);\n            preprocessorBuilder.normalizeTime(true);\n            if (builder.fillValues != null) {\n                preprocessorBuilder.fillValues(builder.fillValues);\n            }\n            // forcing external for the forest to control admittance\n            builder.internalShinglingEnabled = Optional.of(true);\n            preprocessorBuilder.useImputedFraction(builder.useImputedFraction.orElse(0.5));\n        } else {\n            // STANDARD\n            boolean smallInput = builder.internalShinglingEnabled.orElse(DEFAULT_INTERNAL_SHINGLING_ENABLED);\n            inputLength = (smallInput) ? builder.dimensions / builder.shingleSize : builder.dimensions;\n            preprocessorBuilder.inputLength(inputLength);\n        }\n\n        forest = builder.buildForest();\n        validateNonNegativeArray(builder.weights);\n\n        preprocessorBuilder.weights(builder.weights);\n        preprocessorBuilder.weightTime(builder.weightTime.orElse(1.0));\n        preprocessorBuilder.transformDecay(builder.transformDecay.orElse(1.0 / builder.sampleSize));\n        // to be used later\n        preprocessorBuilder.randomSeed(builder.randomSeed.orElse(0L) + 1);\n        preprocessorBuilder.dimensions(builder.dimensions);\n        preprocessorBuilder.stopNormalization(builder.stopNormalization.orElse(DEFAULT_STOP_NORMALIZATION));\n        preprocessorBuilder.startNormalization(builder.startNormalization.orElse(DEFAULT_START_NORMALIZATION));\n\n        preprocessor = preprocessorBuilder.build();\n        predictorCorrector = new PredictorCorrector(forest.getTimeDecay(), builder.anomalyRate, builder.autoAdjust,\n                builder.dimensions / builder.shingleSize, builder.randomSeed.orElse(0L));\n        lastAnomalyDescriptor = new RCFComputeDescriptor(null, 0, builder.forestMode, builder.transformMethod,\n                builder.imputationMethod);\n\n        // when autoAdjust is true, the lowerThreshold is dynamically calculated\n        if (!builder.autoAdjust) {\n            predictorCorrector.setAbsoluteThreshold(builder.lowerThreshold.orElse(DEFAULT_ABSOLUTE_THRESHOLD));\n        }\n\n        predictorCorrector.setZfactor(builder.zFactor);\n\n        predictorCorrector.setScoreDifferencing(builder.scoreDifferencing.orElse(DEFAULT_SCORE_DIFFERENCING));\n        builder.ignoreNearExpectedFromAbove.ifPresent(predictorCorrector::setIgnoreNearExpectedFromAbove);\n        builder.ignoreNearExpectedFromBelow.ifPresent(predictorCorrector::setIgnoreNearExpectedFromBelow);\n        builder.ignoreNearExpectedFromAboveByRatio.ifPresent(predictorCorrector::setIgnoreNearExpectedFromAboveByRatio);\n        builder.ignoreNearExpectedFromBelowByRatio.ifPresent(predictorCorrector::setIgnoreNearExpectedFromBelowByRatio);\n        predictorCorrector.setLastStrategy(builder.scoringStrategy);\n        predictorCorrector.setIgnoreDrift(builder.alertOnceInDrift);\n    }\n\n    void validateNonNegativeArray(double[] array) {\n        if (array != null) {\n            for (double element : array) {\n                checkArgument(element >= 0, \" has to be non-negative\");\n            }\n        }\n    }\n\n    // for mappers\n    public ThresholdedRandomCutForest(ForestMode forestMode, TransformMethod transformMethod,\n            ScoringStrategy scoringStrategy, RandomCutForest forest, PredictorCorrector predictorCorrector,\n            Preprocessor preprocessor, RCFComputeDescriptor descriptor) {\n        this.forestMode = forestMode;\n        this.transformMethod = transformMethod;\n        this.scoringStrategy = scoringStrategy;\n        this.forest = forest;\n        this.predictorCorrector = predictorCorrector;\n        this.preprocessor = preprocessor;\n        this.lastAnomalyDescriptor = descriptor;\n    }\n\n    // this constructor produces an internally shingled ThresholdedRCF model from an\n    // externally shingled RCF model -- possibly as a part of an externally shingled\n    // ThresholdedRCF, absent any transformations and augmentations.\n    // (these externally shingled models may or may not be in use in current version\n    // of OpenSearch)\n    // A benefit of this conversion would be that imputations would be accessible\n    // to ThresholdedRCF -- that is, even if not every value of the input tuple is\n    // known\n    // the function process() would be able to provide an anomaly score (which is\n    // likely near\n    // minimum, since RCF is used to fill in the missing values). As a result, high\n    // values of the\n    // anomaly score will continue to be likely anomalies.\n    // Note that the basic RandomCutForest cannot be changed easily\n    // but the process() function would only require a fraction of the input\n    // see ThresholdedRandomCutForestMapperTest\n    public ThresholdedRandomCutForest(RandomCutForest forest, double futureAnomalyRate, List<Double> values,\n            double[] lastShingledInput) {\n        this.forest = forest;\n        int dimensions = forest.getDimensions();\n\n        int inputLength = dimensions / forest.getShingleSize();\n        Preprocessor preprocessor = new Preprocessor.Builder<>().transformMethod(TransformMethod.NONE)\n                .dimensions(dimensions).shingleSize(forest.getShingleSize()).inputLength(inputLength)\n                .initialShingledInput(lastShingledInput).initialPoint(toFloatArray(lastShingledInput))\n                .imputationMethod(RCF).startNormalization(0).build();\n        this.predictorCorrector = new PredictorCorrector(new BasicThresholder(values, futureAnomalyRate), inputLength);\n        preprocessor.setValuesSeen((int) forest.getTotalUpdates());\n        preprocessor.getDataQuality()[0].update(1.0);\n        this.preprocessor = preprocessor;\n        this.lastAnomalyDescriptor = new RCFComputeDescriptor(null, forest.getTotalUpdates());\n    }\n\n    protected <T extends AnomalyDescriptor> boolean saveDescriptor(T lastDescriptor) {\n        return (lastDescriptor.getAnomalyGrade() > 0);\n    }\n\n    protected <P extends AnomalyDescriptor> void augment(P description) {\n        description.setScoringStrategy(scoringStrategy);\n        initialSetup(description, lastAnomalyDescriptor, forest);\n        predictorCorrector.detect(description, lastAnomalyDescriptor, forest);\n        postProcess(description);\n        if (saveDescriptor(description)) {\n            lastAnomalyDescriptor = description.copyOf();\n        }\n    }\n\n    /**\n     * a single call that prepreprocesses data, compute score/grade and updates\n     * state\n     * \n     * @param inputPoint current input point\n     * @param timestamp  time stamp of input\n     * @return anomaly descriptor for the current input point\n     */\n    public AnomalyDescriptor process(double[] inputPoint, long timestamp) {\n        return process(inputPoint, timestamp, null);\n    }\n\n    /**\n     * a single call that prepreprocesses data, compute score/grade and updates\n     * state when the current input has potentially missing values\n     *\n     * @param inputPoint    current input point\n     * @param timestamp     time stamp of input\n     * @param missingValues indices of the input which are missing/questionable\n     *                      values\n     * @return anomaly descriptor for the current input point\n     */\n    public AnomalyDescriptor process(double[] inputPoint, long timestamp, int[] missingValues) {\n\n        AnomalyDescriptor description = new AnomalyDescriptor(inputPoint, timestamp);\n        description.setScoringStrategy(scoringStrategy);\n        boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);\n        try {\n            if (cacheDisabled) { // turn caching on temporarily\n                forest.setBoundingBoxCacheFraction(1.0);\n            }\n            if (missingValues != null) {\n                checkArgument(missingValues.length <= inputPoint.length, \" incorrect data\");\n                for (int i = 0; i < missingValues.length; i++) {\n                    checkArgument(missingValues[i] >= 0, \" missing values cannot be at negative position\");\n                    checkArgument(missingValues[i] < inputPoint.length,\n                            \"missing values cannot be at position larger than input length\");\n                }\n                description.setMissingValues(missingValues);\n            }\n            augment(description);\n        } finally {\n            if (cacheDisabled) { // turn caching off\n                forest.setBoundingBoxCacheFraction(0);\n            }\n        }\n        if (saveDescriptor(description)) {\n            lastAnomalyDescriptor = description.copyOf();\n        }\n        return description;\n    }\n\n    /**\n     * the following function processes a list of vectors sequentially; the main\n     * benefit of this invocation is the caching is persisted from one data point to\n     * another and thus the execution is efficient. Moreover in many scenarios where\n     * serialization deserialization is expensive then it may be of benefit of\n     * invoking sequential process on a contiguous chunk of input (we avoid the use\n     * of the word batch -- the entire goal of this procedure is to provide\n     * sequential processing and not standard batch processing). The procedure\n     * avoids transfer of ephemeral transient objects for non-anomalies and thereby\n     * can have additional benefits.\n     *\n     * @param data   a vectors of vectors (each of which has to have the same\n     *               inputLength)\n     * @param filter a condition to drop desriptor (recommended filter: anomalyGrade\n     *               positive)\n     * @return collection of descriptors of the anomalies filtered by the condition\n     */\n    public List<AnomalyDescriptor> processSequentially(double[][] data, Function<AnomalyDescriptor, Boolean> filter) {\n        if (data == null || data.length == 0) {\n            return new ArrayList<>();\n        }\n\n        long timestamp = preprocessor.getInternalTimeStamp();\n        long[] timestamps = new long[data.length];\n        for (int i = 0; i < data.length; i++) {\n            timestamps[i] = ++timestamp;\n        }\n\n        return processSequentially(data, timestamps, filter);\n    }\n\n    /**\n     * the following function processes a list of vectors sequentially; the main\n     * benefit of this invocation is the caching is persisted from one data point to\n     * another and thus the execution is efficient. Moreover in many scenarios where\n     * serialization deserialization is expensive then it may be of benefit of\n     * invoking sequential process on a contiguous chunk of input (we avoid the use\n     * of the word batch -- the entire goal of this procedure is to provide\n     * sequential processing and not standard batch processing). The procedure\n     * avoids transfer of ephemeral transient objects for non-anomalies and thereby\n     * can have additional benefits. At the moment the operation does not support\n     * external timestamps.\n     *\n     *\n     *\n     * @param data       a vectors of vectors (each of which has to have the same\n     *                   inputLength). Mising values are represented by Double.NaN\n     *                   in a vector.\n     * @param timestamps a vector of timestamps (in the same order as the data, has\n     *                   to be same length as data, and ascending)\n     * @param filter     a condition to drop desriptor (recommended filter:\n     *                   anomalyGrade positive)\n     * @return collection of descriptors of the anomalies filtered by the condition\n     * @throws IllegalArgumentException if\n     *                                  <ul>\n     *                                  <li>data is non-null but timestamps is\n     *                                  null</li>\n     *                                  <li>timestamps.length != data.length</li>\n     *                                  <li>timestamps is not strictly\n     *                                  ascending</li>\n     *                                  <li>any data[i].length !=\n     *                                  preprocessor.getInputLength()</li>\n     *                                  </ul>\n     */\n    public List<AnomalyDescriptor> processSequentially(double[][] data, long[] timestamps,\n            Function<AnomalyDescriptor, Boolean> filter) {\n        // Precondition checks\n        checkArgument(filter != null, \"filter must not be null\");\n        if (data != null && data.length > 0) {\n            checkArgument(timestamps != null, \"timestamps must not be null when data is non-empty\");\n            checkArgument(timestamps.length == data.length, String.format(Locale.ROOT,\n                    \"timestamps length (%s) must equal data length (%s)\", timestamps.length, data.length));\n            for (int i = 1; i < timestamps.length; i++) {\n                checkArgument(timestamps[i] > timestamps[i - 1],\n                        String.format(Locale.ROOT,\n                                \"timestamps must be strictly ascending: \"\n                                        + \"timestamps[%s]=%s is not > timestamps[%s]=%s\",\n                                i, timestamps[i], i - 1, timestamps[i - 1]));\n            }\n        }\n\n        ArrayList<AnomalyDescriptor> answer = new ArrayList<>();\n\n        if (data != null && data.length > 0) {\n            boolean cacheDisabled = (forest.getBoundingBoxCacheFraction() == 0);\n            try {\n                if (cacheDisabled) { // turn caching on temporarily\n                    forest.setBoundingBoxCacheFraction(1.0);\n                }\n                int length = preprocessor.getInputLength();\n                for (int i = 0; i < data.length; i++) {\n                    double[] point = data[i];\n                    long timestamp = timestamps[i];\n                    checkArgument(point != null, \" data should not be null \");\n                    checkArgument(point.length == length, \" nonuniform lengths \");\n                    AnomalyDescriptor description = new AnomalyDescriptor(point, timestamp);\n                    // check missing values in point.\n                    int[] missingValues = generateMissingIndicesArray(point);\n                    if (missingValues != null) {\n                        description.setMissingValues(missingValues);\n                    }\n                    augment(description);\n                    if (saveDescriptor(description)) {\n                        lastAnomalyDescriptor = description.copyOf();\n                    }\n                    if (filter.apply(description)) {\n                        answer.add(description);\n                    }\n                }\n            } finally {\n                if (cacheDisabled) { // turn caching off\n                    forest.setBoundingBoxCacheFraction(0);\n                }\n            }\n        }\n        return answer;\n    }\n\n    // recommended filter\n    public List<AnomalyDescriptor> processSequentially(double[][] data) {\n        return processSequentially(data, x -> x.getAnomalyGrade() > 0);\n    }\n\n    private int[] generateMissingIndicesArray(double[] point) {\n        List<Integer> intArray = new ArrayList<>();\n        for (int i = 0; i < point.length; i++) {\n            if (Double.isNaN(point[i])) {\n                intArray.add(i);\n            }\n        }\n        // Return null if the array is empty\n        if (intArray.size() == 0) {\n            return null;\n        }\n        return intArray.stream().mapToInt(Integer::intValue).toArray();\n    }\n\n    /**\n     * a function that extrapolates the data seen by the ThresholdedRCF model, and\n     * uses the transformations allowed (as opposed to just using RCFs). The\n     * forecasting also allows for predictor-corrector pattern which implies that\n     * some noise can be eliminated -- this can be important for various\n     * transformations. While the algorithm can function for STREAMING_IMPUTE mode\n     * where missing data is imputed on the fly, it may require effort to validate\n     * that the internal imputation is reasonably consistent with extrapolation. In\n     * general, since the STREAMING_IMPUTE can use non-RCF options to fill in\n     * missing data, the internal imputation and extrapolation need not be\n     * consistent.\n     * \n     * @param horizon    the length of time in the future which is being forecast\n     * @param correct    a boolean indicating if predictor-corrector subroutine\n     *                   should be turned on; this is specially helpful if there has\n     *                   been an anomaly in the recent past\n     * @param centrality in general RCF predicts the p50 value of conditional\n     *                   samples (centrality = 1). This parameter relaxes the\n     *                   conditional sampling. Using assumptions about input data\n     *                   (hence external to this code) it may be possible to use\n     *                   this parameter and the range information for confidence\n     *                   bounds.\n     * @return a timed range vector where the values[i] correspond to the forecast\n     *         for horizon (i+1). The upper and lower arrays indicate the\n     *         corresponding bounds based on the conditional sampling (and\n     *         transformation). Note that TRCF manages time in process() and thus\n     *         the forecasts always have timestamps associated which makes it easier\n     *         to execute the same code for various forest modes such as\n     *         STREAMING_IMPUTE, STANDARD and TIME_AUGMENTED. For STREAMING_IMPUTE\n     *         the time components of the prediction will be 0 because the time\n     *         information is already being used to fill in missing entries. For\n     *         STANDARD mode the time components would correspond to average arrival\n     *         difference. For TIME_AUGMENTED mode the time componentes would be the\n     *         result of the joint prediction. Finally note that setting weight of\n     *         time or any of the input columns will also 0 out the corresponding\n     *         forecast.\n     */\n\n    public TimedRangeVector extrapolate(int horizon, boolean correct, double centrality) {\n\n        int shingleSize = preprocessor.getShingleSize();\n        checkArgument(shingleSize > 1, \"extrapolation is not meaningful for shingle size = 1\");\n        // note the forest may have external shingling ...\n        int dimensions = forest.getDimensions();\n        int blockSize = dimensions / shingleSize;\n        float[] lastPoint = preprocessor.getLastShingledPoint();\n        if (forest.isOutputReady()) {\n            int gap = (int) (preprocessor.getInternalTimeStamp() - lastAnomalyDescriptor.getInternalTimeStamp());\n\n            float[] newPoint = lastPoint;\n\n            // gap will be at least 1\n            if (gap <= shingleSize && correct && lastAnomalyDescriptor.getExpectedRCFPoint() != null) {\n                if (gap == 1) {\n                    newPoint = lastAnomalyDescriptor.getExpectedRCFPoint();\n                } else {\n                    newPoint = predictorCorrector.applyPastCorrector(newPoint, gap, shingleSize, blockSize,\n                            preprocessor.getScale(), transformMethod, lastAnomalyDescriptor);\n                }\n            }\n            RangeVector answer = forest.extrapolateWithRanges(newPoint, horizon, blockSize, false, 0, centrality);\n            return preprocessor.invertForecastRange(answer, lastAnomalyDescriptor.getInputTimestamp(),\n                    lastAnomalyDescriptor.getDeltaShift(), lastAnomalyDescriptor.getExpectedRCFPoint() != null,\n                    lastAnomalyDescriptor.getExpectedTimeStamp());\n        } else {\n            return new TimedRangeVector(new TimedRangeVector(horizon * blockSize, horizon));\n        }\n    }\n\n    public TimedRangeVector extrapolate(int horizon) {\n        return extrapolate(horizon, true, 1.0);\n    }\n\n    public RandomCutForest getForest() {\n        return forest;\n    }\n\n    public void setZfactor(double factor) {\n        predictorCorrector.setZfactor(factor);\n    }\n\n    public void setLowerThreshold(double lower) {\n        predictorCorrector.setAbsoluteThreshold(lower);\n    }\n\n    @Deprecated\n    public void setHorizon(double horizon) {\n        predictorCorrector.setScoreDifferencing(1 - horizon);\n    }\n\n    public void setScoreDifferencing(double scoreDifferencing) {\n        predictorCorrector.setScoreDifferencing(scoreDifferencing);\n    }\n\n    public void setIgnoreNearExpectedFromAbove(double[] ignoreSimilarFromAbove) {\n        predictorCorrector.setIgnoreNearExpectedFromAbove(ignoreSimilarFromAbove);\n    }\n\n    public void setIgnoreNearExpectedFromAboveByRatio(double[] ignoreSimilarFromAbove) {\n        predictorCorrector.setIgnoreNearExpectedFromAboveByRatio(ignoreSimilarFromAbove);\n    }\n\n    public void setIgnoreNearExpectedFromBelow(double[] ignoreSimilarFromBelow) {\n        predictorCorrector.setIgnoreNearExpectedFromBelow(ignoreSimilarFromBelow);\n    }\n\n    public void setIgnoreNearExpectedFromBelowByRatio(double[] ignoreSimilarFromBelow) {\n        predictorCorrector.setIgnoreNearExpectedFromBelowByRatio(ignoreSimilarFromBelow);\n    }\n\n    public void setScoringStrategy(ScoringStrategy strategy) {\n        this.scoringStrategy = strategy;\n    }\n\n    @Deprecated\n    public void setInitialThreshold(double initial) {\n        predictorCorrector.setInitialThreshold(initial);\n    }\n\n    /**\n     * sets up the AnomalyDescriptor object\n     *\n     * @param description           description of the input point\n     * @param lastAnomalyDescriptor the descriptor of the last anomaly\n     * @param forest                the RCF\n     * @return the descriptor to be used for anomaly scoring\n     */\n    <P extends AnomalyDescriptor> P initialSetup(P description, RCFComputeDescriptor lastAnomalyDescriptor,\n            RandomCutForest forest) {\n        description.setForestMode(forestMode);\n        description.setTransformMethod(transformMethod);\n        description.setImputationMethod(preprocessor.getImputationMethod());\n        description.setNumberOfTrees(forest.getNumberOfTrees());\n        description.setTotalUpdates(forest.getTotalUpdates());\n        description.setLastAnomalyInternalTimestamp(lastAnomalyDescriptor.getInternalTimeStamp());\n        description.setLastExpectedRCFPoint(lastAnomalyDescriptor.getExpectedRCFPoint());\n        description.setDataConfidence(forest.getTimeDecay(), preprocessor.getValuesSeen(), forest.getOutputAfter(),\n                preprocessor.dataQuality());\n        description.setShingleSize(preprocessor.getShingleSize());\n        description.setInputLength(preprocessor.getInputLength());\n        description.setDimension(forest.getDimensions());\n        description.setReasonableForecast(forest.isOutputReady() && forest.getDimensions() >= 4);\n        description.setScale(preprocessor.getScale());\n        description.setShift(preprocessor.getShift());\n        description.setDeviations(preprocessor.getSmoothedDeviations());\n        description.setNumberOfNewImputes(preprocessor.numberOfImputes(description.getInputTimestamp()));\n        description.setInternalTimeStamp(preprocessor.getInternalTimeStamp() + description.getNumberOfNewImputes());\n        description.setRCFPoint(preprocessor.getScaledShingledInput(description.getCurrentInput(),\n                description.getInputTimestamp(), description.getMissingValues(), forest));\n        return description;\n    }\n\n    <P extends AnomalyDescriptor> void postProcess(P result) {\n\n        float[] point = result.getRCFPoint();\n\n        if (point != null) {\n\n            // first populate the description with current knowledge\n            // then update the preprocessor\n            // then update the RCF\n            if (result.getAnomalyGrade() > 0) {\n                /**\n                 * adds information of expected point to the result descriptor (provided it is\n                 * marked anomalous) Note that is uses relativeIndex; that is, it can determine\n                 * that the anomaly occurred in the past (but within the shingle) and not at the\n                 * current point -- even though the detection has triggered now While this may\n                 * appear to be improper, information theoretically we may have a situation\n                 * where an anomaly is only discoverable after the \"horse has bolted\" -- suppose\n                 * that we see a random mixture of the triples { 1, 2, 3} and {2, 4, 5}\n                 * corresponding to \"slow weeks\" and \"busy weeks\". For example 1, 2, 3, 1, 2, 3,\n                 * 2, 4, 5, 1, 2, 3, 2, 4, 5, ... etc. If we see { 2, 2, X } (at positions 0 and\n                 * 1 (mod 3)) and are yet to see X, then we can infer that the pattern is\n                 * anomalous -- but we cannot determine which of the 2's are to blame. If it\n                 * were the first 2, then the detection is late. If X = 3 then we know it is the\n                 * first 2 in that unfinished triple; and if X = 5 then it is the second 2. In a\n                 * sense we are only truly wiser once the bolted horse has returned! But if we\n                 * were to say that the anomaly was always at the second 2 then that appears to\n                 * be suboptimal -- one natural path can be based on the ratio of the triples {\n                 * 1, 2, 3} and {2, 4, 5} seen before. Even better, we can attempt to estimate a\n                 * dynamic time dependent ratio -- and that is what RCF would do.\n                 *\n                 * @param result the description of the current point\n                 */\n                int shingleSize = result.getShingleSize();\n                int dimension = result.getDimension();\n                int base = dimension / shingleSize;\n                double[] reference = result.getCurrentInput();\n                float[] newPoint = result.getExpectedRCFPoint();\n\n                int index = result.getRelativeIndex();\n                if (index < 0) {\n                    reference = preprocessor.getShingledInput(shingleSize + index);\n                    result.setPastTimeStamp(preprocessor.getTimeStamp(shingleSize + index));\n                }\n\n                // relative index is the source of truth. Past values always have value:\n                // either current input or previous input.\n                result.setPastValues(reference);\n\n                if (newPoint != null) {\n                    double[] values = preprocessor.getExpectedValue(index, reference, point, newPoint);\n                    if (forestMode == ForestMode.TIME_AUGMENTED) {\n                        int endPosition = (shingleSize + index) * base;\n                        double timeGap = (newPoint[endPosition - 1] - point[endPosition - 1]);\n                        long expectedTimestamp = (timeGap == 0) ? result.getInputTimestamp() : (long) values[base - 1];\n                        if (index < 0) {\n                            expectedTimestamp = (timeGap == 0) ? preprocessor.getTimeStamp(shingleSize - 1 + index)\n                                    : (long) values[base - 1];\n                        }\n                        result.setExpectedTimeStamp(expectedTimestamp);\n                        double[] plausibleValues = Arrays.copyOf(values, base - 1);\n                        result.setExpectedValues(0, plausibleValues, 1.0);\n                    } else {\n                        result.setExpectedValues(0, values, 1.0);\n                    }\n                }\n\n                int startPosition = (shingleSize - 1 + result.getRelativeIndex()) * base;\n                DiVector attribution = result.getAttribution();\n\n                if (forestMode == ForestMode.TIME_AUGMENTED) {\n                    --base;\n                }\n                double[] flattenedAttribution = new double[base];\n\n                for (int i = 0; i < base; i++) {\n                    flattenedAttribution[i] = attribution.getHighLowSum(startPosition + i);\n                }\n                result.setRelevantAttribution(flattenedAttribution);\n                if (forestMode == ForestMode.TIME_AUGMENTED) {\n                    result.setTimeAttribution(attribution.getHighLowSum(startPosition + base));\n                }\n            }\n        }\n        // will update the forest\n        preprocessor.update(result.getCurrentInput(), point, result.getInputTimestamp(), result.getMissingValues(),\n                forest);\n        if (point != null) {\n            if (result.getAnomalyGrade() > 0) {\n                double[] postShift = preprocessor.getShift(); // may have changed\n                result.setPostShift(postShift);\n                result.setTransformDecay(preprocessor.getTransformDecay());\n            }\n        }\n        if (preprocessor.isOutputReady()) {\n            result.setPostDeviations(preprocessor.getSmoothedDeviations());\n        }\n    }\n\n    /**\n     * @return a new builder.\n     */\n    public static Builder<?> builder() {\n        return new Builder<>();\n    }\n\n    public static class Builder<T extends Builder<T>> {\n\n        // We use Optional types for optional primitive fields when it doesn't make\n        // sense to use a constant default.\n\n        protected int dimensions;\n        protected int sampleSize = DEFAULT_SAMPLE_SIZE;\n        protected Optional<Integer> outputAfter = Optional.empty();\n        protected Optional<Integer> startNormalization = Optional.empty();\n        protected Optional<Integer> stopNormalization = Optional.empty();\n        protected int numberOfTrees = DEFAULT_NUMBER_OF_TREES;\n        protected Optional<Double> timeDecay = Optional.empty();\n        protected Optional<Double> scoreDifferencing = Optional.empty();\n        protected Optional<Double> lowerThreshold = Optional.empty();\n        protected Optional<Double> weightTime = Optional.empty();\n        protected Optional<Long> randomSeed = Optional.empty();\n        protected boolean storeSequenceIndexesEnabled = DEFAULT_STORE_SEQUENCE_INDEXES_ENABLED;\n        protected boolean centerOfMassEnabled = DEFAULT_CENTER_OF_MASS_ENABLED;\n        protected boolean parallelExecutionEnabled = DEFAULT_PARALLEL_EXECUTION_ENABLED;\n        protected Optional<Integer> threadPoolSize = Optional.empty();\n        protected double boundingBoxCacheFraction = DEFAULT_BOUNDING_BOX_CACHE_FRACTION;\n        protected int shingleSize = DEFAULT_SHINGLE_SIZE;\n        protected Optional<Boolean> internalShinglingEnabled = Optional.empty();\n        protected double initialAcceptFraction = DEFAULT_INITIAL_ACCEPT_FRACTION;\n        protected double anomalyRate = 0.01;\n        protected TransformMethod transformMethod = TransformMethod.NONE;\n        protected ImputationMethod imputationMethod = RCF;\n        protected ForestMode forestMode = ForestMode.STANDARD;\n        protected ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;\n        protected boolean normalizeTime = false;\n        protected double[] fillValues = null;\n        protected double[] weights = null;\n        protected Optional<Double> useImputedFraction = Optional.empty();\n        protected boolean autoAdjust = false;\n        protected double zFactor = DEFAULT_Z_FACTOR;\n        protected boolean alertOnceInDrift = false;\n        protected Optional<Double> transformDecay = Optional.empty();\n        protected Optional<double[]> ignoreNearExpectedFromAbove = Optional.empty();\n        protected Optional<double[]> ignoreNearExpectedFromBelow = Optional.empty();\n        protected Optional<double[]> ignoreNearExpectedFromAboveByRatio = Optional.empty();\n        protected Optional<double[]> ignoreNearExpectedFromBelowByRatio = Optional.empty();\n\n        void validate() {\n            if (forestMode == ForestMode.TIME_AUGMENTED) {\n                if (internalShinglingEnabled.isPresent()) {\n                    checkArgument(shingleSize == 1 || internalShinglingEnabled.get(),\n                            \" shingle size has to be 1 or \" + \"internal shingling must turned on\");\n                    checkArgument(transformMethod == TransformMethod.NONE || internalShinglingEnabled.get(),\n                            \" internal shingling must turned on for transforms\");\n                } else {\n                    internalShinglingEnabled = Optional.of(true);\n                }\n                if (useImputedFraction.isPresent()) {\n                    throw new IllegalArgumentException(\" imputation infeasible\");\n                }\n            } else if (forestMode == ForestMode.STREAMING_IMPUTE) {\n                checkArgument(shingleSize > 1, \"imputation with shingle size 1 is not meaningful\");\n                internalShinglingEnabled.ifPresent(x -> checkArgument(x,\n                        \" input cannot be shingled (even if internal representation is different) \"));\n            } else {\n                if (!internalShinglingEnabled.isPresent()) {\n                    internalShinglingEnabled = Optional.of(true);\n                }\n                if (useImputedFraction.isPresent()) {\n                    throw new IllegalArgumentException(\" imputation infeasible\");\n                }\n            }\n            if (startNormalization.isPresent()) {\n                // we should not be setting normalizations unless we are careful\n                if (outputAfter.isPresent()) {\n                    // can be overspecified\n                    checkArgument(outputAfter.get() + shingleSize - 1 > startNormalization.get(),\n                            \"output after has to wait till normalization, reduce normalization\");\n                } else {\n                    int n = startNormalization.get();\n                    checkArgument(n > 0, \" startNormalization has to be positive\");\n                    // if start normalization is low then first few output can be 0\n                    outputAfter = Optional\n                            .of(max(max(1, (int) (sampleSize * DEFAULT_OUTPUT_AFTER_FRACTION)), n - shingleSize + 1));\n                }\n            } else {\n                if (outputAfter.isPresent()) {\n                    startNormalization = Optional.of(min(DEFAULT_START_NORMALIZATION, outputAfter.get()));\n                }\n            }\n        }\n\n        public ThresholdedRandomCutForest build() {\n            validate();\n            return new ThresholdedRandomCutForest(this);\n        }\n\n        protected RandomCutForest buildForest() {\n            RandomCutForest.Builder builder = new RandomCutForest.Builder().dimensions(dimensions)\n                    .sampleSize(sampleSize).numberOfTrees(numberOfTrees)\n                    .storeSequenceIndexesEnabled(storeSequenceIndexesEnabled).centerOfMassEnabled(centerOfMassEnabled)\n                    .parallelExecutionEnabled(parallelExecutionEnabled)\n                    .boundingBoxCacheFraction(boundingBoxCacheFraction).shingleSize(shingleSize)\n                    .internalShinglingEnabled(internalShinglingEnabled.get())\n                    .initialAcceptFraction(initialAcceptFraction);\n            if (forestMode != ForestMode.STREAMING_IMPUTE) {\n                outputAfter.ifPresent(builder::outputAfter);\n            } else {\n                // forcing the change between internal and external shingling\n                outputAfter.ifPresent(n -> {\n                    int num = max(startNormalization.orElse(DEFAULT_START_NORMALIZATION), n) - shingleSize + 1;\n                    checkArgument(num > 0, \" max(start normalization, output after) should be at least \" + shingleSize);\n                    builder.outputAfter(num);\n                });\n            }\n            timeDecay.ifPresent(builder::timeDecay);\n            randomSeed.ifPresent(builder::randomSeed);\n            threadPoolSize.ifPresent(builder::threadPoolSize);\n            return builder.build();\n        }\n\n        public T dimensions(int dimensions) {\n            this.dimensions = dimensions;\n            return (T) this;\n        }\n\n        public T sampleSize(int sampleSize) {\n            this.sampleSize = sampleSize;\n            return (T) this;\n        }\n\n        public T startNormalization(int startNormalization) {\n            this.startNormalization = Optional.of(startNormalization);\n            return (T) this;\n        }\n\n        public T stopNormalization(int stopNormalization) {\n            this.stopNormalization = Optional.of(stopNormalization);\n            return (T) this;\n        }\n\n        public T outputAfter(int outputAfter) {\n            this.outputAfter = Optional.of(outputAfter);\n            return (T) this;\n        }\n\n        public T numberOfTrees(int numberOfTrees) {\n            this.numberOfTrees = numberOfTrees;\n            return (T) this;\n        }\n\n        public T shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return (T) this;\n        }\n\n        public T timeDecay(double timeDecay) {\n            this.timeDecay = Optional.of(timeDecay);\n            return (T) this;\n        }\n\n        public T transformDecay(double transformDecay) {\n            this.transformDecay = Optional.of(transformDecay);\n            return (T) this;\n        }\n\n        public T zFactor(double zFactor) {\n            this.zFactor = zFactor;\n            return (T) this;\n        }\n\n        public T useImputedFraction(double fraction) {\n            this.useImputedFraction = Optional.of(fraction);\n            return (T) this;\n        }\n\n        public T randomSeed(long randomSeed) {\n            this.randomSeed = Optional.of(randomSeed);\n            return (T) this;\n        }\n\n        public T centerOfMassEnabled(boolean centerOfMassEnabled) {\n            this.centerOfMassEnabled = centerOfMassEnabled;\n            return (T) this;\n        }\n\n        public T parallelExecutionEnabled(boolean parallelExecutionEnabled) {\n            this.parallelExecutionEnabled = parallelExecutionEnabled;\n            return (T) this;\n        }\n\n        public T threadPoolSize(int threadPoolSize) {\n            this.threadPoolSize = Optional.of(threadPoolSize);\n            return (T) this;\n        }\n\n        public T storeSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n            return (T) this;\n        }\n\n        @Deprecated\n        public T compact(boolean compact) {\n            return (T) this;\n        }\n\n        public T internalShinglingEnabled(boolean internalShinglingEnabled) {\n            this.internalShinglingEnabled = Optional.of(internalShinglingEnabled);\n            return (T) this;\n        }\n\n        @Deprecated\n        public T precision(Precision precision) {\n            return (T) this;\n        }\n\n        public T boundingBoxCacheFraction(double boundingBoxCacheFraction) {\n            this.boundingBoxCacheFraction = boundingBoxCacheFraction;\n            return (T) this;\n        }\n\n        public T initialAcceptFraction(double initialAcceptFraction) {\n            this.initialAcceptFraction = initialAcceptFraction;\n            return (T) this;\n        }\n\n        public Random getRandom() {\n            // If a random seed was given, use it to create a new Random. Otherwise, call\n            // the 0-argument constructor\n            return randomSeed.map(Random::new).orElseGet(Random::new);\n        }\n\n        public T anomalyRate(double anomalyRate) {\n            this.anomalyRate = anomalyRate;\n            return (T) this;\n        }\n\n        public T imputationMethod(ImputationMethod imputationMethod) {\n            this.imputationMethod = imputationMethod;\n            return (T) this;\n        }\n\n        public T fillValues(double[] values) {\n            // values cannot be a null\n            this.fillValues = Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T weights(double[] values) {\n            // values cannot be a null\n            this.weights = Arrays.copyOf(values, values.length);\n            return (T) this;\n        }\n\n        public T normalizeTime(boolean normalizeTime) {\n            this.normalizeTime = normalizeTime;\n            return (T) this;\n        }\n\n        public T transformMethod(TransformMethod method) {\n            this.transformMethod = method;\n            return (T) this;\n        }\n\n        public T forestMode(ForestMode forestMode) {\n            this.forestMode = forestMode;\n            return (T) this;\n        }\n\n        public T scoreDifferencing(double persistence) {\n            this.scoreDifferencing = Optional.of(persistence);\n            return (T) this;\n        }\n\n        public T autoAdjust(boolean autoAdjust) {\n            this.autoAdjust = autoAdjust;\n            return (T) this;\n        }\n\n        public T weightTime(double value) {\n            this.weightTime = Optional.of(value);\n            return (T) this;\n        }\n\n        public T ignoreNearExpectedFromAbove(double[] ignoreSimilarFromAbove) {\n            this.ignoreNearExpectedFromAbove = Optional.ofNullable(ignoreSimilarFromAbove);\n            return (T) this;\n        }\n\n        public T ignoreNearExpectedFromBelow(double[] ignoreSimilarFromBelow) {\n            this.ignoreNearExpectedFromBelow = Optional.ofNullable(ignoreSimilarFromBelow);\n            return (T) this;\n        }\n\n        public T ignoreNearExpectedFromAboveByRatio(double[] ignoreSimilarFromAboveByRatio) {\n            this.ignoreNearExpectedFromAboveByRatio = Optional.ofNullable(ignoreSimilarFromAboveByRatio);\n            return (T) this;\n        }\n\n        public T ignoreNearExpectedFromBelowByRatio(double[] ignoreSimilarFromBelowByRatio) {\n            this.ignoreNearExpectedFromBelowByRatio = Optional.ofNullable(ignoreSimilarFromBelowByRatio);\n            return (T) this;\n        }\n\n        public T scoringStrategy(ScoringStrategy scoringStrategy) {\n            this.scoringStrategy = scoringStrategy;\n            return (T) this;\n        }\n\n        public T alertOnce(boolean alertOnceInDrift) {\n            this.alertOnceInDrift = alertOnceInDrift;\n            return (T) this;\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/calibration/ErrorHandler.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.calibration;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.parkservices.RCFCaster.DEFAULT_ERROR_PERCENTILE;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\n\nimport java.util.Arrays;\nimport java.util.Optional;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.PredictiveRandomCutForest;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.ForecastDescriptor;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.SampleSummary;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n// we recommend the article \"Regret in the On-Line Decision Problem\", by Foster and Vohra,\n// Games and Economic Behavior, Vol=29 (1-2), 1999\n// the discussion is applicable to non-regret scenarios as well; but essentially boils down to\n// fixed point/minimax computation. One could use multiplicative update type methods which would be\n// uniform over all quantiles, provided sufficient data and a large enough calibration horizon.\n// Multiplicative updates are scale free -- but providing scale free forecasting over a stream raises the\n// issue \"what is the current scale of the stream\". While such questions can be answered, that discussion\n// can be involved and out of current scope of this library. We simplify the issue to calibrating two\n// fixed quantiles and hence additive updates are reasonable.\n\n@Getter\n@Setter\npublic class ErrorHandler {\n\n    int sequenceIndex;\n    double percentile;\n    int forecastHorizon;\n    int errorHorizon;\n    // the following arrays store the state of the sequential computation\n    // these can be optimized -- for example once could store the errors; which\n    // would see much fewer increments.\n    // However, for a small enough errorHorizon, the generality of\n    // changing the error function\n    // outweighs the benefit of recomputation. The search in the ensemble tree is\n    // still a larger bottleneck than\n    // these computations at the moment; not to mention issues of saving and\n    // restoring state.\n    protected RangeVector[] pastForecasts;\n\n    RangeVector errorDistribution;\n    DiVector errorRMSE;\n    float[] errorMean;\n    Deviation[] intervalPrecision;\n    Deviation[] rmseHighDeviations;\n    Deviation[] rmseLowDeviations;\n    float[] lowerLimit;\n    float[] upperLimit;\n    double[] lastInputs;\n    PredictiveRandomCutForest estimator;\n    float[] lastDataDeviations;\n\n    // We keep the multipliers defined for potential\n    // future use.\n\n    RangeVector multipliers;\n    RangeVector adders;\n\n    public ErrorHandler(Builder builder) {\n        checkArgument(builder.forecastHorizon > 0, \"has to be positive\");\n        checkArgument(builder.errorHorizon >= builder.forecastHorizon,\n                \"intervalPrecision horizon should be at least as large as forecast horizon\");\n        checkArgument(builder.errorHorizon <= 1024, \"reduce error horizon\");\n        forecastHorizon = builder.forecastHorizon;\n        errorHorizon = builder.errorHorizon;\n        int inputLength = (builder.dimensions / builder.shingleSize);\n        int length = inputLength * forecastHorizon;\n        percentile = builder.percentile;\n        pastForecasts = new RangeVector[forecastHorizon];\n        for (int i = 0; i < forecastHorizon; i++) {\n            pastForecasts[i] = new RangeVector(length);\n        }\n        sequenceIndex = 0;\n        lastInputs = new double[2 * inputLength];\n        rmseHighDeviations = new Deviation[length];\n        rmseLowDeviations = new Deviation[length];\n        intervalPrecision = new Deviation[length];\n        for (int i = 0; i < length; i++) {\n            rmseHighDeviations[i] = new Deviation(1.0 / errorHorizon);\n            rmseLowDeviations[i] = new Deviation(1.0 / errorHorizon);\n            intervalPrecision[i] = new Deviation(1.0 / errorHorizon);\n        }\n        errorMean = new float[length];\n        errorRMSE = new DiVector(length);\n        lastDataDeviations = new float[inputLength];\n        errorDistribution = new RangeVector(length);\n        if (builder.upperLimit.isPresent()) {\n            checkArgument(builder.upperLimit.get().length == inputLength, \"incorrect length\");\n            upperLimit = Arrays.copyOf(builder.upperLimit.get(), inputLength);\n        } else {\n            upperLimit = new float[inputLength];\n            Arrays.fill(upperLimit, Float.MAX_VALUE);\n        }\n        if (builder.lowerLimit.isPresent()) {\n            checkArgument(builder.lowerLimit.get().length == inputLength, \"incorrect length\");\n            for (int y = 0; y < inputLength; y++) {\n                checkArgument(builder.lowerLimit.get()[y] <= upperLimit[y], \"incorrect limits\");\n            }\n            lowerLimit = Arrays.copyOf(builder.lowerLimit.get(), inputLength);\n        } else {\n            lowerLimit = new float[inputLength];\n            Arrays.fill(lowerLimit, -Float.MAX_VALUE);\n        }\n        // uses lastInputs as a markov input, the +2 corresponds to lookahead and\n        // forecasthorizon - lookahed; the 2*inputlength correspond to the\n        // (plausibly correlated) positive and negative errors\n        // There are potentially many different variations -- use a difference encoding\n        // for the\n        // last two values (keeps lastInputs the same length) -- or change lastInputs to\n        // be a full\n        // on wavelet transform, etc.\n        //\n        if (builder.useRCF) {\n            int inputDimensions = lastInputs.length + 2 * inputLength + 2;\n            double[] weights = new double[inputDimensions];\n            Arrays.fill(weights, 1.0);\n            // the lookahead has 1/3 the weight of the total\n            weights[lastInputs.length] = lastInputs.length;\n            weights[lastInputs.length + 1] = lastInputs.length;\n            estimator = new PredictiveRandomCutForest.Builder<>().inputDimensions(inputDimensions).weights(weights)\n                    .randomSeed(13).outputAfter(50).transformMethod(TransformMethod.NORMALIZE).startNormalization(49)\n                    .build();\n        }\n    }\n\n    // for mappers\n    public ErrorHandler(int errorHorizon, int forecastHorizon, int sequenceIndex, double percentile, int inputLength,\n            float[] pastForecastsFlattened, float[] lastDataDeviations, double[] lastInput, Deviation[] deviations,\n            PredictiveRandomCutForest estimator, float[] auxiliary) {\n        checkArgument(forecastHorizon > 0, \" incorrect forecast horizon\");\n        checkArgument(errorHorizon >= forecastHorizon, \"incorrect error horizon\");\n        checkArgument(inputLength > 0, \"incorrect parameters\");\n        checkArgument(sequenceIndex >= 0, \"cannot be negative\");\n        checkArgument(Math.abs(percentile - 0.25) < 0.24, \"has to be between (0,0.5) \");\n        checkArgument(deviations.length == 3 * inputLength * forecastHorizon, \"incorrect length\");\n        checkArgument(lastInput.length == 2 * inputLength, \"incorrect length\");\n\n        this.sequenceIndex = sequenceIndex;\n        this.errorHorizon = errorHorizon;\n        this.percentile = percentile;\n        this.forecastHorizon = forecastHorizon;\n        this.pastForecasts = new RangeVector[forecastHorizon];\n        this.lastInputs = Arrays.copyOf(lastInput, lastInput.length);\n\n        int length = forecastHorizon * inputLength;\n        checkArgument(lastDataDeviations.length >= inputLength, \"incorrect length\");\n        this.lastDataDeviations = Arrays.copyOf(lastDataDeviations, lastDataDeviations.length);\n        this.errorMean = new float[length];\n        this.errorRMSE = new DiVector(length);\n        this.errorDistribution = new RangeVector(length);\n        this.intervalPrecision = new Deviation[inputLength * forecastHorizon];\n        this.rmseHighDeviations = new Deviation[inputLength * forecastHorizon];\n        this.rmseLowDeviations = new Deviation[inputLength * forecastHorizon];\n        for (int y = 0; y < inputLength * forecastHorizon; y++) {\n            this.intervalPrecision[y] = deviations[y].copy();\n            this.rmseHighDeviations[y] = deviations[y + inputLength * forecastHorizon].copy();\n            this.rmseLowDeviations[y] = deviations[y + 2 * inputLength * forecastHorizon].copy();\n        }\n        lowerLimit = new float[inputLength];\n        Arrays.fill(lowerLimit, -Float.MAX_VALUE);\n        upperLimit = new float[inputLength];\n        Arrays.fill(upperLimit, Float.MAX_VALUE);\n        this.estimator = estimator;\n        int arrayLength = pastForecastsFlattened.length / (3 * length);\n        checkArgument(arrayLength * 3 * length == pastForecastsFlattened.length, \" has to be multiple of 3\");\n        for (int i = 0; i < arrayLength; i++) {\n            float[] values = Arrays.copyOfRange(pastForecastsFlattened, i * 3 * length, (i * 3 + 1) * length);\n            float[] upper = Arrays.copyOfRange(pastForecastsFlattened, (i * 3 + 1) * length, (i * 3 + 2) * length);\n            float[] lower = Arrays.copyOfRange(pastForecastsFlattened, (i * 3 + 2) * length, (i * 3 + 3) * length);\n            pastForecasts[i] = new RangeVector(values, upper, lower);\n\n        }\n        for (int i = arrayLength; i < forecastHorizon; i++) {\n            pastForecasts[i] = new RangeVector(length);\n        }\n        recomputeErrors(lastInputs, inputLength);\n    }\n\n    public void setUpperLimit(float[] upperLimit) {\n        if (upperLimit != null) {\n            checkArgument(upperLimit.length == this.upperLimit.length, \"incorrect Length\");\n            System.arraycopy(upperLimit, 0, this.upperLimit, 0, upperLimit.length);\n        }\n    }\n\n    public void setLowerLimit(float[] lowerLimit) {\n        if (lowerLimit != null) {\n            checkArgument(lowerLimit.length == this.lowerLimit.length, \"incorrect Length\");\n            for (int i = 0; i < lowerLimit.length; i++) {\n                checkArgument(lowerLimit[i] <= this.upperLimit[i], \"lower limit is higher than upper limit\");\n                this.lowerLimit[i] = lowerLimit[i];\n            }\n        }\n    }\n\n    /**\n     * updates the stored information (actuals) and recomputes the calibrations\n     * \n     * @param input      the actual input\n     * @param deviations the deviations (post the current input)\n     */\n\n    public void updateActuals(double[] input, double[] deviations) {\n        int arrayLength = pastForecasts.length;\n        int inputLength = input.length;\n\n        for (int i = 0; i < lastInputs.length - inputLength; i++) {\n            lastInputs[i] = lastInputs[i + inputLength];\n        }\n        System.arraycopy(input, 0, lastInputs, lastInputs.length - inputLength, inputLength);\n\n        if (sequenceIndex > 0) {\n            // sequenceIndex indicates the first empty place for input\n            // note the predictions have already been stored\n            int inputIndex = (sequenceIndex + arrayLength - 1) % arrayLength;\n            float[] errorTuple = new float[lastInputs.length + 2 * inputLength + 2];\n            for (int y = 0; y < lastInputs.length; y++) {\n                errorTuple[y] = (float) lastInputs[y];\n            }\n            for (int i = 0; i < forecastHorizon; i++) {\n                if (sequenceIndex > i) {\n                    for (int j = 0; j < inputLength; j++) {\n                        RangeVector a = pastForecasts[inputIndex];\n                        int offset = i * inputLength;\n                        errorTuple[lastInputs.length] = i;\n                        errorTuple[lastInputs.length + 1] = forecastHorizon - i;\n                        if (input[j] <= a.upper[offset + j] && input[j] >= a.lower[offset + j]) {\n                            intervalPrecision[offset + j].update(1.0);\n                        } else {\n                            intervalPrecision[offset + j].update(0);\n                        }\n                        double error = input[j] - a.values[offset + j];\n                        if (error >= 0) {\n                            rmseHighDeviations[offset + j].update(error);\n                            rmseLowDeviations[offset + j].update(0);\n                            errorTuple[lastInputs.length + 2 + j] = (float) error;\n                            errorTuple[lastInputs.length + inputLength + 2 + j] = 0;\n                        } else {\n                            rmseLowDeviations[offset + j].update(error);\n                            rmseHighDeviations[offset + j].update(0);\n                            errorTuple[lastInputs.length + inputLength + 2 + j] = (float) (error);\n                            errorTuple[lastInputs.length + 2 + j] = 0;\n                        }\n                    }\n                    if (estimator != null) {\n                        estimator.update(errorTuple, 0L);\n                    }\n                }\n                inputIndex = (inputIndex + arrayLength - 1) % arrayLength;\n            }\n        }\n        lastDataDeviations = toFloatArray(deviations);\n        recomputeErrors(lastInputs, inputLength);\n    }\n\n    void recomputeErrors(double[] lastInputs, int inputLength) {\n        double a;\n        if (estimator != null) {\n            a = (double) (sequenceIndex) / (estimator.getForest().getOutputAfter());\n        } else {\n            a = (double) (sequenceIndex) / (10 * forecastHorizon);\n        }\n        float[] query = new float[lastInputs.length + inputLength * 2 + 2];\n        System.arraycopy(toFloatArray(lastInputs), 0, query, 0, lastInputs.length);\n        float[] errorHigh = new float[intervalPrecision.length];\n        float[] errorLow = new float[intervalPrecision.length];\n        if (a < 1) {\n            for (int y = 0; y < intervalPrecision.length; y++) {\n                errorRMSE.high[y] = errorRMSE.low[y] = lastDataDeviations[y % inputLength];\n                errorHigh[y] = errorLow[y] = lastDataDeviations[y % inputLength];\n            }\n        } else {\n            if (a < 2) {\n                for (int y = 0; y < errorRMSE.high.length; y++) {\n                    double offset = (2 - a) * lastDataDeviations[y % inputLength];\n                    errorRMSE.high[y] = (offset + (a - 1) * rmseHighDeviations[y].getDeviation());\n                    errorRMSE.low[y] = (offset + (a - 1) * rmseLowDeviations[y].getDeviation());\n                }\n            } else {\n                for (int y = 0; y < errorRMSE.high.length; y++) {\n                    errorRMSE.high[y] = rmseHighDeviations[y].getDeviation();\n                    errorRMSE.low[y] = rmseLowDeviations[y].getDeviation();\n                }\n            }\n\n            if (estimator != null) {\n                for (int i = 0; i < forecastHorizon; i++) {\n                    int[] missing = new int[inputLength];\n                    query[lastInputs.length] = i;\n                    query[lastInputs.length + 1] = forecastHorizon - i;\n                    for (int j = 0; j < inputLength; j++) {\n                        missing[j] = lastInputs.length + 2 + j;\n                    }\n                    // at this moment we use the PredictiveRCF more for the shorter term estimation,\n                    // and use an interpolation\n                    // with the observed error for the longer term\n                    SampleSummary answer = estimator.predict(query, 0, missing, 1, 0.5, 0.7);\n                    for (int j = 0; j < inputLength; j++) {\n\n                        errorHigh[i * inputLength + j] = (forecastHorizon - i)\n                                * max(0, answer.deviation[lastInputs.length + 2 + j]) / forecastHorizon\n                                + (float) (i * rmseHighDeviations[i * inputLength + j].getDeviation()\n                                        / forecastHorizon);\n\n                    }\n                    for (int j = 0; j < inputLength; j++) {\n                        missing[j] = lastInputs.length + inputLength + 2 + j;\n                    }\n                    answer = estimator.predict(query, 0, missing, 1, 0.5, 0.7);\n                    for (int j = 0; j < inputLength; j++) {\n\n                        errorLow[i * inputLength + j] = (forecastHorizon - i)\n                                * max(0, answer.deviation[lastInputs.length + inputLength + 2 + j]) / forecastHorizon\n                                + (float) (i * rmseLowDeviations[i * inputLength + j].getDeviation() / forecastHorizon);\n\n                    }\n                }\n            } else {\n                for (int y = 0; y < errorRMSE.high.length; y++) {\n                    errorHigh[y] = (float) errorRMSE.high[y];\n                    errorLow[y] = (float) errorRMSE.low[y];\n                }\n            }\n        }\n        // a control loop\n        for (int y = 0; y < intervalPrecision.length; y++) {\n            if (intervalPrecision[y].getMean() < 1.0 - percentile) {\n                errorHigh[y] = (float) max(1.0, 1.0 / (intervalPrecision[y].getMean() + 0.1)) * errorHigh[y];\n                errorLow[y] = (float) max(1.0, 1.0 / (intervalPrecision[y].getMean() + 0.1)) * errorLow[y];\n            }\n        }\n\n        for (int i = 0; i < errorMean.length; i++) {\n            errorMean[i] = (float) (rmseHighDeviations[i].getMean() + rmseLowDeviations[i].getMean());\n            errorDistribution.values[i] = errorMean[i];\n            errorDistribution.upper[i] = errorMean[i] + (float) (1.3 * errorHigh[i]);\n            errorDistribution.lower[i] = errorMean[i] - (float) (1.3 * errorLow[i]);\n        }\n    }\n\n    public void augmentDescriptor(ForecastDescriptor descriptor) {\n        int inputLength = descriptor.getInputLength();\n        float[] iPrecision = new float[inputLength * forecastHorizon];\n        for (int i = 0; i < errorMean.length; i++) {\n            iPrecision[i] = (float) intervalPrecision[i].getMean();\n        }\n        descriptor.setErrorMean(errorMean);\n        descriptor.setErrorRMSE(errorRMSE);\n        descriptor.setObservedErrorDistribution(errorDistribution);\n        descriptor.setIntervalPrecision(iPrecision);\n    }\n\n    /**\n     * saves the forecast -- note that this section assumes that updateActuals() has\n     * been invoked prior (to recompute the deviations)\n     * \n     * @param vector the forecast\n     */\n    public void updateForecasts(RangeVector vector) {\n        ++sequenceIndex;\n        int arrayLength = pastForecasts.length;\n        int storedForecastIndex = (sequenceIndex + arrayLength - 1) % (arrayLength);\n        int length = pastForecasts[0].values.length;\n        System.arraycopy(vector.values, 0, pastForecasts[storedForecastIndex].values, 0, length);\n        System.arraycopy(vector.upper, 0, pastForecasts[storedForecastIndex].upper, 0, length);\n        System.arraycopy(vector.lower, 0, pastForecasts[storedForecastIndex].lower, 0, length);\n    }\n\n    public RangeVector getErrorDistribution() {\n        return new RangeVector(errorDistribution);\n    }\n\n    public float[] getErrorMean() {\n        return Arrays.copyOf(errorMean, errorMean.length);\n    }\n\n    public DiVector getErrorRMSE() {\n        return new DiVector(errorRMSE);\n    }\n\n    public Deviation[] getDeviationList() {\n        Deviation[] list = new Deviation[3 * intervalPrecision.length];\n        for (int i = 0; i < intervalPrecision.length; i++) {\n            list[i] = intervalPrecision[i].copy();\n            list[i + intervalPrecision.length] = rmseHighDeviations[i].copy();\n            list[i + 2 * intervalPrecision.length] = rmseLowDeviations[i].copy();\n        }\n        return list;\n    }\n\n    public float[] getIntervalPrecision() {\n        float[] iPrecision = new float[intervalPrecision.length];\n        for (int i = 0; i < iPrecision.length; i++) {\n            iPrecision[i] = (float) (intervalPrecision[i].getMean());\n        }\n        return iPrecision;\n    }\n\n    public void calibrate(double[] input, Calibration calibration, RangeVector ranges) {\n        if (calibration != Calibration.NONE) {\n            int inputLength = intervalPrecision.length / forecastHorizon;\n            checkArgument(input.length == inputLength, \"incorrect input\");\n            checkArgument(intervalPrecision.length == ranges.values.length, \"mismatched lengths\");\n            for (int y = 0; y < intervalPrecision.length; y++) {\n                if (calibration == Calibration.SIMPLE) {\n                    ranges.values[y] = min(\n                            max(ranges.values[y] + errorDistribution.values[y], lowerLimit[y % inputLength]),\n                            upperLimit[y % inputLength]);\n                } else {\n                    ranges.values[y] = min(max(ranges.values[y], lowerLimit[y % inputLength]),\n                            upperLimit[y % inputLength]);\n                }\n                ranges.upper[y] = min(max(ranges.upper[y], ranges.values[y] + errorDistribution.upper[y]),\n                        upperLimit[y % inputLength]);\n                ranges.lower[y] = max(min(ranges.lower[y], ranges.values[y] + errorDistribution.lower[y]),\n                        lowerLimit[y % inputLength]);\n            }\n        }\n    }\n\n    public int getInputLength() {\n        return lastInputs.length / 2;\n    }\n\n    /**\n     * produces the stored forecasts as a non-null array\n     */\n    public float[] getPastForecastsFlattened() {\n        int arrayLength = min(sequenceIndex, pastForecasts.length);\n        int length = intervalPrecision.length;\n        float[] answer = new float[3 * length * arrayLength];\n        for (int i = 0; i < arrayLength; i++) {\n            System.arraycopy(pastForecasts[i].values, 0, answer, 3 * i * length, length);\n            System.arraycopy(pastForecasts[i].upper, 0, answer, 3 * i * length + length, length);\n            System.arraycopy(pastForecasts[i].lower, 0, answer, 3 * i * length + 2 * length, length);\n        }\n        return answer;\n    }\n\n    public static Builder builder() {\n        return new Builder();\n    }\n\n    public static class Builder {\n        protected int dimensions;\n        protected int shingleSize = 1;\n        protected int forecastHorizon;\n        protected boolean useRCF = true;\n        protected int errorHorizon = 100; // easy for percentile\n        protected double percentile = DEFAULT_ERROR_PERCENTILE;\n        protected Optional<float[]> upperLimit = Optional.empty();\n        protected Optional<float[]> lowerLimit = Optional.empty();\n\n        public Builder dimensions(int dimensions) {\n            this.dimensions = dimensions;\n            return this;\n        }\n\n        public Builder shingleSize(int shingleSize) {\n            this.shingleSize = shingleSize;\n            return this;\n        }\n\n        public Builder forecastHorizon(int horizon) {\n            this.forecastHorizon = horizon;\n            return this;\n        }\n\n        public Builder errorHorizon(int errorHorizon) {\n            this.errorHorizon = errorHorizon;\n            return this;\n        }\n\n        public Builder percentile(double percentile) {\n            this.percentile = percentile;\n            return this;\n        }\n\n        public Builder lowerLimit(float[] lowerLimit) {\n            this.lowerLimit = Optional.of(lowerLimit);\n            return this;\n        }\n\n        public Builder upperLimit(float[] upperLimit) {\n            this.upperLimit = Optional.of(upperLimit);\n            return this;\n        }\n\n        public Builder useRCF(boolean use) {\n            useRCF = use;\n            return this;\n        }\n\n        public ErrorHandler build() {\n            return new ErrorHandler(this);\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/config/Calibration.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.config;\n\npublic enum Calibration {\n\n    NONE,\n\n    /**\n     * a basic staring point where the intervals are adjusted to be the minimal\n     * necessary based on past error the intervals are smaller -- but the interval\n     * precision will likely be close to 1 - 2 * percentile\n     */\n    MINIMAL,\n\n    /**\n     * a Markov inequality based interval, where the past error and model errors are\n     * additive. The interval precision is likely higher than MINIMAL but so are the\n     * intervals.\n     */\n    SIMPLE;\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/config/CorrectionMode.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.config;\n\n/**\n * Options for using RCF, specially with thresholds\n */\npublic enum CorrectionMode {\n\n    /**\n     * default behavior, no correction\n     */\n    NONE,\n\n    /**\n     * due to transforms, or due to input noise\n     */\n    NOISE,\n\n    /**\n     * elimination due to multi mode operation\n     */\n\n    MULTI_MODE,\n\n    /**\n     * effect of an anomaly in shingle\n     */\n\n    ANOMALY_IN_SHINGLE,\n\n    /**\n     * conditional forecast, using conditional fields\n     */\n\n    CONDITIONAL_FORECAST,\n\n    /**\n     * forecasted value was not very different\n     */\n\n    FORECAST,\n\n    /**\n     * data drifts and level shifts, will not be corrected unless level shifts are\n     * turned on\n     */\n\n    DATA_DRIFT,\n\n    // forced suppression - do not use for extended anomalies\n    ALERT_ONCE\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/config/ScoringStrategy.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.config;\n\n/**\n * Options for using RCF, specially with thresholds\n */\npublic enum ScoringStrategy {\n\n    /**\n     * default behavior to be optimized; currently EXPECTED_INVERSE_DEPTH\n     */\n    EXPECTED_INVERSE_DEPTH,\n\n    /**\n     * This is the same as STANDARD mode where the scoring function is switched to\n     * distances between the vectors. Since RCFs build a multiresolution tree, and\n     * in the aggregate, preserves distances to some approximation, this provides an\n     * alternate anomaly detection mechanism which can be useful for shingleSize = 1\n     * and (dynamic) population analysis via RCFs. Specifically it switches the\n     * scoring to be based on the distance computation in the Density Estimation\n     * (interpolation). This allows for a direct comparison of clustering based\n     * outlier detection and RCFs over numeric vectors. All transformations\n     * available to the STANDARD mode in the ThresholdedRCF are available for this\n     * mode as well; this does not affect RandomCutForest core in any way. For\n     * timeseries analysis the STANDARD mode is recommended, but this does provide\n     * another option in combination with the TransformMethods.\n     */\n    DISTANCE,\n\n    /**\n     * RCFs are an updatable data structure that can support multiple difference\n     * inference methods. Given the longstanding interest in ensembles of different\n     * models, this strategy uses the multiple inference capabilities to increase\n     * precision. It does not escape our attention that multi-mode allows the\n     * functionality of multi-models yet use a significantly smaller state/memory\n     * footprint since all the modes use RCF. The different modes are probed with\n     * computational efficiency in mind.\n     */\n\n    MULTI_MODE,\n\n    /**\n     * Same as above, except optimized for increasing recall.\n     */\n\n    MULTI_MODE_RECALL;\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/AnalysisDescriptor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.returntypes;\n\nimport java.util.ArrayList;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ForecastDescriptor;\n\n@Getter\n@Setter\npublic class AnalysisDescriptor {\n\n    /**\n     * the intent of this class is to describe the list of anomalies and the final\n     * forecast of some data this is most useful in sequential analysis when that\n     * data is processed sequentially\n     */\n    ArrayList<AnomalyDescriptor> anomalies;\n    ForecastDescriptor forecastDescriptor;\n\n    public AnalysisDescriptor(ArrayList<AnomalyDescriptor> anomalies, ForecastDescriptor forecastDescriptor) {\n        this.anomalies = anomalies;\n        this.forecastDescriptor = forecastDescriptor;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/GenericAnomalyDescriptor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.returntypes;\n\nimport java.util.List;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.util.Weighted;\n\n@Getter\n@Setter\npublic class GenericAnomalyDescriptor<P> {\n\n    // the following corresponds to the list of extected points in AnomalyDetector,\n    // which is returned from\n    // TRCF. The list corresponds to plausible values (cluster centers) and a weight\n    // representing the likelihood\n    // The list is sorted in decreasing order of likelihood. Most often, the first\n    // element should suffice.\n    // in case of an anomalous point, however the information here can provide more\n    // insight\n    List<Weighted<P>> representativeList;\n\n    // standard, as in AnomalyDetector; we do not recommend attempting to\n    // disambiguate scores of non-anomalous\n    // points. Note that scores can be low.\n    double score;\n\n    // standard as in AnomalyDetector\n    double threshold;\n\n    // a value between [0,1] indicating the strength of the anomaly, it can be\n    // viewed as a confidence score\n    // projected by the algorithm.\n    double anomalyGrade;\n\n    public GenericAnomalyDescriptor(List<Weighted<P>> representative, double score, double threshold,\n            double anomalyGrade) {\n        this.representativeList = representative;\n        this.score = score;\n        this.threshold = threshold;\n        this.anomalyGrade = anomalyGrade;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/returntypes/RCFComputeDescriptor.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\n\nimport java.util.Arrays;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.inputtypes.Point;\nimport com.amazon.randomcutforest.parkservices.config.CorrectionMode;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.returntypes.DiVector;\n\n/**\n * a basic class that is used to store the internal state of the streaming\n * processing in ThresholdedRandomCutForest and others.\n */\n@Getter\n@Setter\npublic class RCFComputeDescriptor extends Point {\n\n    ForestMode forestMode = ForestMode.STANDARD;\n\n    TransformMethod transformMethod = TransformMethod.NONE;\n\n    ImputationMethod imputationMethod = ImputationMethod.PREVIOUS;\n\n    ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;\n\n    CorrectionMode correctionMode = CorrectionMode.NONE;\n\n    // the most important parameter of the forest\n    int shingleSize;\n\n    // the actual dimensions\n    int dimension;\n\n    // the input length; useful for standalone analysis\n    int inputLength;\n\n    // sequence index (the number of updates to RCF) -- it is possible in imputation\n    // that\n    // the number of updates more than the input tuples seen by the overall program\n    long totalUpdates;\n\n    // determines if values can be input and or expected point calculated\n    boolean reasonableForecast;\n\n    // internal timestamp (basically a sequence index, but can be scaled and\n    // jittered as in\n    // the example);\n    // kept as long for potential future use\n    long internalTimeStamp;\n\n    // number of trees in the forest\n    int numberOfTrees;\n\n    // current missing values, if any\n    int[] missingValues;\n\n    // potential number of imputes before processing current point\n    int numberOfNewImputes;\n\n    // actual, potentially transformed point on which compute occurs\n    float[] RCFPoint;\n\n    // score for various postprocessing\n    double RCFScore;\n\n    // the following describes the grade of the anomaly in the range [0:1] where\n    // 0 is not an anomaly\n    double anomalyGrade;\n\n    // the threshold used in inference\n    double threshold;\n\n    // same for attribution; this is basic RCF attribution which has high/low\n    // information\n    DiVector attribution;\n\n    /**\n     * position of the anomaly vis a vis the current time (can be -ve) if anomaly is\n     * detected late, which can and should happen sometime; for shingle size 1; this\n     * is always 0\n     */\n    int relativeIndex;\n\n    // useful for detecting noise\n    double[] deviations;\n\n    // useful for calibration in RCFCaster\n    double[] postDeviations;\n\n    // the multiplication factors to convert RCF representation to actuals/input\n    double[] scale;\n\n    // the addition performed (after multiplications) to convert RCF representation\n    // to actuals/input\n    double[] shift;\n\n    // effects of a specific anomaly\n    double[] postShift;\n\n    // how long the effects last\n    double transformDecay;\n\n    // expected RCFPoint for the current point\n    float[] expectedRCFPoint;\n\n    // internal timestamp of last anomaly\n    long lastAnomalyInternalTimestamp;\n\n    // expected point of last anomaly\n    float[] lastExpectedRCFPoint;\n\n    // if the anomaly is due to timestamp when it is augmented only for current time\n    long expectedTimeStamp;\n\n    // used for streaming imputation\n    double[][] imputedPoints;\n\n    public RCFComputeDescriptor(double[] input, long inputTimeStamp) {\n        super(input, inputTimeStamp);\n    }\n\n    public RCFComputeDescriptor(double[] input, long inputTimeStamp, ForestMode forestMode,\n            TransformMethod transformMethod, ImputationMethod imputationMethod) {\n        super(input, inputTimeStamp);\n        this.forestMode = forestMode;\n        this.transformMethod = transformMethod;\n        this.imputationMethod = imputationMethod;\n    }\n\n    public void setShift(double[] shift) {\n        this.shift = copyIfNotnull(shift);\n    }\n\n    public void setPostShift(double[] shift) {\n        this.postShift = copyIfNotnull(shift);\n    }\n\n    public double[] getShift() {\n        return copyIfNotnull(shift);\n    }\n\n    public void setScale(double[] scale) {\n        this.scale = copyIfNotnull(scale);\n    }\n\n    public double[] getScale() {\n        return copyIfNotnull(scale);\n    }\n\n    public double[] getDeltaShift() {\n        if (shift == null || postShift == null) {\n            return null;\n        }\n        double[] answer = new double[shift.length];\n        for (int i = 0; i < shift.length; i++) {\n            answer[i] = postShift[i] - shift[i];\n        }\n        return answer;\n    }\n\n    public void setExpectedRCFPoint(float[] point) {\n        expectedRCFPoint = copyIfNotnull(point);\n    }\n\n    public float[] getExpectedRCFPoint() {\n        return copyIfNotnull(expectedRCFPoint);\n    }\n\n    public void setRCFPoint(float[] point) {\n        RCFPoint = copyIfNotnull(point);\n    }\n\n    public float[] getRCFPoint() {\n        return copyIfNotnull(RCFPoint);\n    }\n\n    public void setLastExpectedRCFdPoint(float[] point) {\n        lastExpectedRCFPoint = copyIfNotnull(point);\n    }\n\n    public float[] getLastExpectedRCFPoint() {\n        return copyIfNotnull(lastExpectedRCFPoint);\n    }\n\n    public void setAttribution(DiVector attribution) {\n        this.attribution = (attribution == null) ? null : new DiVector(attribution);\n    }\n\n    public DiVector getAttribution() {\n        return (attribution == null) ? null : new DiVector(attribution);\n    }\n\n    public int[] getMissingValues() {\n        return (missingValues == null) ? null : Arrays.copyOf(missingValues, missingValues.length);\n    }\n\n    public void setMissingValues(int[] values) {\n        missingValues = (values == null) ? null : Arrays.copyOf(values, values.length);\n    }\n\n    protected float[] copyIfNotnull(float[] array) {\n        return array == null ? null : Arrays.copyOf(array, array.length);\n    }\n\n    public void setImputedPoint(int index, double[] impute) {\n        checkArgument(numberOfNewImputes > 0, \" no imputation is indicated\");\n        checkArgument(impute != null && impute.length == inputLength, \"incorrect length\");\n        if (imputedPoints == null) {\n            imputedPoints = new double[Math.min(numberOfNewImputes, shingleSize - 1)][];\n        }\n        checkArgument(imputedPoints.length > index && index >= 0 && imputedPoints[index] == null, \"already set!\");\n        imputedPoints[index] = Arrays.copyOf(impute, inputLength);\n    }\n\n    // an explicit copy operation to control the stored state\n    public RCFComputeDescriptor copyOf() {\n        RCFComputeDescriptor answer = new RCFComputeDescriptor(getCurrentInput(), getInputTimestamp(), forestMode,\n                transformMethod, imputationMethod);\n        answer.setShingleSize(shingleSize);\n        answer.setDimension(dimension);\n        answer.setInputLength(inputLength);\n        answer.setReasonableForecast(reasonableForecast);\n        answer.setAttribution(attribution);\n        answer.setRCFPoint(RCFPoint);\n        answer.setRCFScore(RCFScore);\n        answer.setInternalTimeStamp(internalTimeStamp);\n        answer.setExpectedRCFPoint(expectedRCFPoint);\n        answer.setNumberOfTrees(numberOfTrees);\n        answer.setTotalUpdates(totalUpdates);\n        answer.setNumberOfNewImputes(numberOfNewImputes);\n        answer.setLastAnomalyInternalTimestamp(lastAnomalyInternalTimestamp);\n        answer.setLastExpectedRCFdPoint(lastExpectedRCFPoint);\n        answer.setScoringStrategy(scoringStrategy);\n        answer.setShift(shift);\n        answer.setScale(scale);\n        answer.setPostShift(postShift);\n        answer.setTransformDecay(transformDecay);\n        answer.setAnomalyGrade(anomalyGrade);\n        answer.setThreshold(threshold);\n        answer.setCorrectionMode(correctionMode);\n        return answer;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/RCFCasterMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.PredictorCorrector;\nimport com.amazon.randomcutforest.parkservices.RCFCaster;\nimport com.amazon.randomcutforest.parkservices.calibration.ErrorHandler;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.parkservices.state.errorhandler.ErrorHandlerMapper;\nimport com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;\nimport com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\n\n@Getter\n@Setter\npublic class RCFCasterMapper implements IStateMapper<RCFCaster, RCFCasterState> {\n\n    @Override\n    public RCFCasterState toState(RCFCaster model) {\n        RCFCasterState state = new RCFCasterState();\n\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        randomCutForestMapper.setPartialTreeStateEnabled(true);\n        randomCutForestMapper.setSaveTreeStateEnabled(true);\n        randomCutForestMapper.setCompressionEnabled(true);\n        randomCutForestMapper.setSaveCoordinatorStateEnabled(true);\n        randomCutForestMapper.setSaveExecutorContextEnabled(true);\n\n        state.setForestState(randomCutForestMapper.toState(model.getForest()));\n\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n        state.setPreprocessorStates(\n                new PreprocessorState[] { preprocessorMapper.toState((Preprocessor) model.getPreprocessor()) });\n\n        state.setPredictorCorrectorState(new PredictorCorrectorMapper().toState(model.getPredictorCorrector()));\n        state.setLastDescriptorState(\n                new ComputeDescriptorMapper().toState((RCFComputeDescriptor) model.getLastAnomalyDescriptor()));\n        state.setForestMode(model.getForestMode().name());\n        state.setTransformMethod(model.getTransformMethod().name());\n\n        state.setForecastHorizon(model.getForecastHorizon());\n\n        ErrorHandlerMapper errorHandlerMapper = new ErrorHandlerMapper();\n        state.setErrorHandler(errorHandlerMapper.toState(model.getErrorHandler()));\n\n        state.setErrorHorizon(model.getErrorHorizon());\n        state.setCalibrationMethod(model.getCalibrationMethod().name());\n        state.setScoringStrategy(model.getScoringStrategy().name());\n        return state;\n    }\n\n    @Override\n    public RCFCaster toModel(RCFCasterState state, long seed) {\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n\n        RandomCutForest forest = randomCutForestMapper.toModel(state.getForestState());\n        Preprocessor preprocessor = preprocessorMapper.toModel(state.getPreprocessorStates()[0]);\n\n        ForestMode forestMode = ForestMode.valueOf(state.getForestMode());\n        TransformMethod transformMethod = TransformMethod.valueOf(state.getTransformMethod());\n\n        RCFComputeDescriptor descriptor = new ComputeDescriptorMapper().toModel(state.getLastDescriptorState());\n        descriptor.setForestMode(forestMode);\n        descriptor.setTransformMethod(transformMethod);\n        descriptor\n                .setImputationMethod(ImputationMethod.valueOf(state.getPreprocessorStates()[0].getImputationMethod()));\n        descriptor.setShingleSize(preprocessor.getShingleSize());\n\n        PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();\n        PredictorCorrector predictorCorrector = mapper.toModel(state.getPredictorCorrectorState());\n\n        ErrorHandlerMapper errorHandlerMapper = new ErrorHandlerMapper();\n        ErrorHandler errorHandler = errorHandlerMapper.toModel(state.getErrorHandler());\n\n        Calibration calibrationMethod = Calibration.valueOf(state.getCalibrationMethod());\n        ScoringStrategy scoringStrategy = ScoringStrategy.valueOf(state.getScoringStrategy());\n\n        return new RCFCaster(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector, preprocessor,\n                descriptor, state.getForecastHorizon(), errorHandler, state.getErrorHorizon(), calibrationMethod);\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/RCFCasterState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static com.amazon.randomcutforest.state.Version.V3_8;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.parkservices.state.errorhandler.ErrorHandlerState;\n\n@Data\npublic class RCFCasterState extends ThresholdedRandomCutForestState {\n    private static final long serialVersionUID = 1L;\n    private String version = V3_8;\n\n    private int forecastHorizon;\n    private ErrorHandlerState errorHandler;\n    private int errorHorizon;\n    private String calibrationMethod;\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArrayNullable;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.PredictorCorrector;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;\nimport com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;\nimport com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderMapper;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorMapper;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\nimport com.amazon.randomcutforest.state.returntypes.DiVectorMapper;\n\n@Getter\n@Setter\npublic class ThresholdedRandomCutForestMapper\n        implements IStateMapper<ThresholdedRandomCutForest, ThresholdedRandomCutForestState> {\n\n    @Override\n    public ThresholdedRandomCutForest toModel(ThresholdedRandomCutForestState state, long seed) {\n\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n\n        RandomCutForest forest = randomCutForestMapper.toModel(state.getForestState());\n        Preprocessor preprocessor = preprocessorMapper.toModel(state.getPreprocessorStates()[0]);\n\n        ForestMode forestMode = ForestMode.valueOf(state.getForestMode());\n        TransformMethod transformMethod = TransformMethod.valueOf(state.getTransformMethod());\n\n        ScoringStrategy scoringStrategy = ScoringStrategy.EXPECTED_INVERSE_DEPTH;\n        if (state.getScoringStrategy() != null && !state.getScoringStrategy().isEmpty()) {\n            scoringStrategy = ScoringStrategy.valueOf(state.getScoringStrategy());\n        }\n\n        RCFComputeDescriptor descriptor;\n\n        if (state.getLastDescriptorState() == null) {\n            descriptor = new RCFComputeDescriptor(null, 0L);\n            descriptor.setRCFScore(state.getLastAnomalyScore());\n            descriptor.setInternalTimeStamp(state.getLastAnomalyTimeStamp());\n            descriptor.setAttribution(new DiVectorMapper().toModel(state.getLastAnomalyAttribution()));\n            descriptor.setRCFPoint(toFloatArrayNullable(state.getLastAnomalyPoint()));\n            descriptor.setExpectedRCFPoint(toFloatArrayNullable(state.getLastExpectedPoint()));\n            descriptor.setRelativeIndex(state.getLastRelativeIndex());\n            descriptor.setScoringStrategy(scoringStrategy);\n        } else {\n            descriptor = new ComputeDescriptorMapper().toModel(state.getLastDescriptorState());\n        }\n\n        descriptor.setShingleSize(preprocessor.getShingleSize());\n        descriptor.setForestMode(forestMode);\n        descriptor.setTransformMethod(transformMethod);\n        descriptor.setScoringStrategy(scoringStrategy);\n        descriptor\n                .setImputationMethod(ImputationMethod.valueOf(state.getPreprocessorStates()[0].getImputationMethod()));\n\n        PredictorCorrector predictorCorrector;\n        if (state.getPredictorCorrectorState() == null) {\n            BasicThresholderMapper thresholderMapper = new BasicThresholderMapper();\n            BasicThresholder thresholder = thresholderMapper.toModel(state.getThresholderState());\n            predictorCorrector = new PredictorCorrector(thresholder, preprocessor.getInputLength());\n            predictorCorrector.setNumberOfAttributors(state.getNumberOfAttributors());\n            predictorCorrector.setLastScore(new double[] { state.getLastScore() });\n        } else {\n            PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();\n            predictorCorrector = mapper.toModel(state.getPredictorCorrectorState());\n        }\n\n        return new ThresholdedRandomCutForest(forestMode, transformMethod, scoringStrategy, forest, predictorCorrector,\n                preprocessor, descriptor);\n    }\n\n    @Override\n    public ThresholdedRandomCutForestState toState(ThresholdedRandomCutForest model) {\n        ThresholdedRandomCutForestState state = new ThresholdedRandomCutForestState();\n        RandomCutForestMapper randomCutForestMapper = new RandomCutForestMapper();\n        randomCutForestMapper.setPartialTreeStateEnabled(true);\n        randomCutForestMapper.setSaveTreeStateEnabled(true);\n        randomCutForestMapper.setCompressionEnabled(true);\n        randomCutForestMapper.setSaveCoordinatorStateEnabled(true);\n        randomCutForestMapper.setSaveExecutorContextEnabled(true);\n\n        state.setForestState(randomCutForestMapper.toState(model.getForest()));\n\n        PreprocessorMapper preprocessorMapper = new PreprocessorMapper();\n        state.setPreprocessorStates(\n                new PreprocessorState[] { preprocessorMapper.toState((Preprocessor) model.getPreprocessor()) });\n\n        state.setPredictorCorrectorState(new PredictorCorrectorMapper().toState(model.getPredictorCorrector()));\n        state.setForestMode(model.getForestMode().name());\n        state.setTransformMethod(model.getTransformMethod().name());\n        state.setScoringStrategy(model.getScoringStrategy().name());\n\n        state.setLastDescriptorState(new ComputeDescriptorMapper().toState(model.getLastAnomalyDescriptor()));\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static com.amazon.randomcutforest.state.Version.V3_8;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorState;\nimport com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorState;\nimport com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.state.preprocessor.PreprocessorState;\nimport com.amazon.randomcutforest.state.returntypes.DiVectorState;\n\n@Data\npublic class ThresholdedRandomCutForestState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V3_8;\n    RandomCutForestState forestState;\n    // deprecated but not marked due to 2.1 models\n    private BasicThresholderState thresholderState;\n    private PreprocessorState[] preprocessorStates;\n\n    // following fields are deprecated, but not removed for compatibility with 2.1\n    // models\n    private double ignoreSimilarFactor;\n    private double triggerFactor;\n    private long lastAnomalyTimeStamp;\n    private double lastAnomalyScore;\n    private DiVectorState lastAnomalyAttribution;\n    private double lastScore;\n    private double[] lastAnomalyPoint;\n    private double[] lastExpectedPoint;\n    private boolean previousIsPotentialAnomaly;\n    private boolean inHighScoreRegion;\n    private boolean ignoreSimilar;\n    private int numberOfAttributors;\n    // end deprecated segment\n\n    private long randomSeed;\n\n    private String forestMode;\n    private String transformMethod;\n    private String scoringStrategy;\n    private int lastRelativeIndex;\n    private int lastReset;\n    private PredictorCorrectorState predictorCorrectorState;\n    private ComputeDescriptorState lastDescriptorState;\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/errorhandler/ErrorHandlerMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.errorhandler;\n\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getDeviations;\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getStates;\n\nimport com.amazon.randomcutforest.PredictiveRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.calibration.ErrorHandler;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.PredictiveRandomCutForestMapper;\nimport com.amazon.randomcutforest.state.statistics.DeviationMapper;\n\npublic class ErrorHandlerMapper implements IStateMapper<ErrorHandler, ErrorHandlerState> {\n\n    @Override\n    public ErrorHandlerState toState(ErrorHandler model) {\n        ErrorHandlerState errorHandlerState = new ErrorHandlerState();\n        errorHandlerState.setSequenceIndex(model.getSequenceIndex());\n        errorHandlerState.setPercentile(model.getPercentile());\n        errorHandlerState.setForecastHorizon(model.getForecastHorizon());\n        errorHandlerState.setErrorHorizon(model.getErrorHorizon());\n        errorHandlerState.setLastDataDeviations(model.getLastDataDeviations());\n        DeviationMapper deviationMapper = new DeviationMapper();\n        errorHandlerState.setDeviationStates(getStates(model.getDeviationList(), deviationMapper));\n        errorHandlerState.setLastInput(model.getLastInputs());\n        errorHandlerState.setInputLength(model.getInputLength());\n        errorHandlerState.setPastForecastsFlattened(model.getPastForecastsFlattened());\n        if (model.getEstimator() != null) {\n            PredictiveRandomCutForestMapper mapper = new PredictiveRandomCutForestMapper();\n            errorHandlerState.setEstimatorState(mapper.toState(model.getEstimator()));\n        }\n        errorHandlerState.setLowerLimit(model.getLowerLimit());\n        errorHandlerState.setUpperLimit(model.getUpperLimit());\n        return errorHandlerState;\n    }\n\n    @Override\n    public ErrorHandler toModel(ErrorHandlerState state, long seed) {\n        PredictiveRandomCutForest forest = null;\n        PredictiveRandomCutForestMapper mapper = new PredictiveRandomCutForestMapper();\n        if (state.getEstimatorState() != null) {\n            forest = mapper.toModel(state.getEstimatorState());\n        }\n        DeviationMapper deviationMapper = new DeviationMapper();\n        ErrorHandler errorHandler = new ErrorHandler(state.getErrorHorizon(), state.getForecastHorizon(),\n                state.getSequenceIndex(), state.getPercentile(), state.getInputLength(),\n                state.getPastForecastsFlattened(), state.getLastDataDeviations(), state.getLastInput(),\n                getDeviations(state.getDeviationStates(), deviationMapper), forest, null);\n        errorHandler.setUpperLimit(state.getUpperLimit());\n        errorHandler.setLowerLimit(state.getLowerLimit());\n        return errorHandler;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/errorhandler/ErrorHandlerState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.errorhandler;\n\nimport static com.amazon.randomcutforest.state.Version.V4_0;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.PredictiveRandomCutForestState;\nimport com.amazon.randomcutforest.state.statistics.DeviationState;\n\n@Data\npublic class ErrorHandlerState implements Serializable {\n    private static final long serialVersionUID = 1L;\n    private String version = V4_0;\n    private int sequenceIndex;\n    private double percentile;\n    private int forecastHorizon;\n    private int errorHorizon;\n    private float[] pastForecastsFlattened;\n    private int inputLength;\n    private float[] lastDataDeviations;\n    private double[] lastInput;\n\n    private float[] upperLimit;\n    private float[] lowerLimit;\n    private DeviationState[] deviationStates;\n    private PredictiveRandomCutForestState estimatorState;\n    // items below are not used now. Kept for regret computation later.\n    // Regret is what we feel when we realize that we should have been better off\n    // had we done something else. A basic requirement of regret computation is that\n    // it should avoid or at least reduce the regret that will be felt.\n    private float[] addersFlattened;\n    private float[] multipliersFlattened;\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/predictorcorrector/PredictorCorrectorMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.predictorcorrector;\n\nimport com.amazon.randomcutforest.parkservices.PredictorCorrector;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorMapper;\nimport com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderMapper;\nimport com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.statistics.DeviationMapper;\nimport com.amazon.randomcutforest.state.statistics.DeviationState;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\npublic class PredictorCorrectorMapper implements IStateMapper<PredictorCorrector, PredictorCorrectorState> {\n\n    @Override\n    public PredictorCorrectorState toState(PredictorCorrector model) {\n        PredictorCorrectorState state = new PredictorCorrectorState();\n        state.setLastScore(model.getLastScore());\n        state.setNumberOfAttributors(model.getNumberOfAttributors());\n        state.setIgnoreNearExpected(model.getIgnoreNearExpected());\n        BasicThresholderMapper mapper = new BasicThresholderMapper();\n        BasicThresholder[] thresholders = model.getThresholders();\n        BasicThresholderState thresholderState[] = new BasicThresholderState[thresholders.length];\n        for (int y = 0; y < thresholders.length; y++) {\n            thresholderState[y] = mapper.toState(thresholders[y]);\n        }\n        state.setThresholderStates(thresholderState);\n        DeviationMapper devMapper = new DeviationMapper();\n        Deviation[] deviations = model.getDeviations();\n        state.setAutoAdjust(model.isAutoAdjust());\n        if (state.isAutoAdjust()) {\n            DeviationState deviationState[] = new DeviationState[deviations.length];\n            for (int y = 0; y < deviations.length; y++) {\n                deviationState[y] = devMapper.toState(deviations[y]);\n            }\n            state.setDeviationStates(deviationState);\n        }\n        state.setNoiseFactor(model.getNoiseFactor());\n        state.setBaseDimension(model.getBaseDimension());\n        state.setLastStrategy(model.getLastStrategy().name());\n        state.setRandomSeed(model.getRandomSeed());\n        if (model.getLastDescriptor() != null) {\n            ComputeDescriptorMapper descriptorMapper = new ComputeDescriptorMapper();\n            state.setLastDescriptor(descriptorMapper.toState(model.getLastDescriptor()));\n        }\n        state.setModeInformation(model.getModeInformation());\n        state.setRunLength(model.getRunLength());\n        state.setIgnoreDrift(model.isIgnoreDrift());\n        state.setSamplingSuppport(model.getSamplingSupport());\n        return state;\n    }\n\n    @Override\n    public PredictorCorrector toModel(PredictorCorrectorState state, long seed) {\n        BasicThresholderMapper mapper = new BasicThresholderMapper();\n        int num = state.getThresholderStates().length;\n        BasicThresholder[] thresholders = new BasicThresholder[num];\n        for (int i = 0; i < num; i++) {\n            thresholders[i] = mapper.toModel(state.getThresholderStates()[i]);\n        }\n        Deviation[] deviations = null;\n        if (state.isAutoAdjust()) {\n            DeviationMapper devMapper = new DeviationMapper();\n            deviations = new Deviation[state.getDeviationStates().length];\n            for (int y = 0; y < deviations.length; y++) {\n                deviations[y] = devMapper.toModel(state.getDeviationStates()[y]);\n            }\n        }\n        PredictorCorrector predictorCorrector = new PredictorCorrector(thresholders, deviations,\n                state.getBaseDimension(), state.getRandomSeed());\n        predictorCorrector.setNumberOfAttributors(state.getNumberOfAttributors());\n        predictorCorrector.setLastStrategy(ScoringStrategy.valueOf(state.getLastStrategy()));\n        predictorCorrector.setLastScore(state.getLastScore());\n        predictorCorrector.setIgnoreNearExpected(state.getIgnoreNearExpected());\n        predictorCorrector.setAutoAdjust(state.isAutoAdjust());\n        predictorCorrector.setNoiseFactor(state.getNoiseFactor());\n        predictorCorrector.setRunLength(state.getRunLength());\n        predictorCorrector.setModeInformation(state.getModeInformation());\n        if (state.getLastDescriptor() != null) {\n            ComputeDescriptorMapper descriptorMapper = new ComputeDescriptorMapper();\n            predictorCorrector.setLastDescriptor(descriptorMapper.toModel(state.getLastDescriptor()));\n        }\n        predictorCorrector.setIgnoreDrift(state.isIgnoreDrift());\n        predictorCorrector.setSamplingSupport(state.getSamplingSuppport());\n        return predictorCorrector;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/predictorcorrector/PredictorCorrectorState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.predictorcorrector;\n\nimport static com.amazon.randomcutforest.state.Version.V3_8;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.parkservices.state.returntypes.ComputeDescriptorState;\nimport com.amazon.randomcutforest.parkservices.state.threshold.BasicThresholderState;\nimport com.amazon.randomcutforest.state.statistics.DeviationState;\n\n@Data\npublic class PredictorCorrectorState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private String version = V3_8;\n    private BasicThresholderState[] thresholderStates;\n    private double[] lastScore;\n    private String lastStrategy;\n    private int numberOfAttributors;\n    private int baseDimension;\n    private long randomSeed;\n    private double noiseFactor;\n    private boolean autoAdjust;\n    private boolean ignoreDrift;\n    private ComputeDescriptorState lastDescriptor;\n    private int runLength;\n    private double samplingSuppport;\n    private double[] modeInformation; // multiple modes -- to be used in future\n    private DeviationState[] deviationStates; // in future to be used for learning deviations\n    private double[] ignoreNearExpected;\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/returntypes/ComputeDescriptorMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.returntypes;\n\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArrayNullable;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArrayNullable;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.parkservices.config.CorrectionMode;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.returntypes.DiVectorMapper;\n\n@Getter\n@Setter\npublic class ComputeDescriptorMapper implements IStateMapper<RCFComputeDescriptor, ComputeDescriptorState> {\n\n    @Override\n    public RCFComputeDescriptor toModel(ComputeDescriptorState state, long seed) {\n\n        RCFComputeDescriptor descriptor = new RCFComputeDescriptor(state.getCurrentInput(), state.getInputTimeStamp());\n        descriptor.setRCFScore(state.getScore());\n        descriptor.setInternalTimeStamp(state.getInternalTimeStamp());\n        descriptor.setAttribution(new DiVectorMapper().toModel(state.getAttribution()));\n        descriptor.setRCFPoint(toFloatArrayNullable(state.getPoint()));\n        descriptor.setExpectedRCFPoint(toFloatArrayNullable(state.getExpectedPoint()));\n        descriptor.setRelativeIndex(state.getRelativeIndex());\n        descriptor.setScoringStrategy(ScoringStrategy.valueOf(state.getStrategy()));\n        descriptor.setShift(state.getShift());\n        descriptor.setPostShift(state.getPostShift());\n        descriptor.setTransformDecay(state.getTransformDecay());\n        descriptor.setPostDeviations(state.getPostDeviations());\n        descriptor.setScale(state.getScale());\n        descriptor.setAnomalyGrade(state.getAnomalyGrade());\n        descriptor.setThreshold(state.getThreshold());\n        descriptor.setCorrectionMode(CorrectionMode.valueOf(state.getCorrectionMode()));\n        return descriptor;\n    }\n\n    @Override\n    public ComputeDescriptorState toState(RCFComputeDescriptor descriptor) {\n\n        ComputeDescriptorState state = new ComputeDescriptorState();\n        state.setInternalTimeStamp(descriptor.getInternalTimeStamp());\n        state.setScore(descriptor.getRCFScore());\n        state.setAttribution(new DiVectorMapper().toState(descriptor.getAttribution()));\n        state.setPoint(toDoubleArrayNullable(descriptor.getRCFPoint()));\n        state.setExpectedPoint(toDoubleArrayNullable(descriptor.getExpectedRCFPoint()));\n        state.setRelativeIndex(descriptor.getRelativeIndex());\n        state.setStrategy(descriptor.getScoringStrategy().name());\n        state.setShift(descriptor.getShift());\n        state.setPostShift(descriptor.getPostShift());\n        state.setTransformDecay(descriptor.getTransformDecay());\n        state.setPostDeviations(descriptor.getPostDeviations());\n        state.setScale(descriptor.getScale());\n        state.setAnomalyGrade(descriptor.getAnomalyGrade());\n        state.setThreshold(descriptor.getThreshold());\n        state.setCorrectionMode(descriptor.getCorrectionMode().name());\n        state.setInputTimeStamp(descriptor.getInputTimestamp());\n        state.setCurrentInput(descriptor.getCurrentInput());\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/returntypes/ComputeDescriptorState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.returntypes;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.returntypes.DiVectorState;\n\n@Data\npublic class ComputeDescriptorState implements Serializable {\n    private static final long serialVersionUID = 2L;\n\n    private long internalTimeStamp;\n    private double score;\n    private DiVectorState attribution;\n    private double lastScore;\n    private double[] point;\n    private double[] expectedPoint;\n    private int relativeIndex;\n    private int lastReset;\n    private String strategy;\n    private double[] shift;\n    private double[] scale;\n    private double[] postShift;\n    private double transformDecay;\n    private double[] postDeviations;\n    private double threshold;\n    private double anomalyGrade;\n    private String correctionMode;\n    private long inputTimeStamp;\n    private double[] currentInput;\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/threshold/BasicThresholderMapper.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.threshold;\n\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getDeviations;\nimport static com.amazon.randomcutforest.state.statistics.DeviationMapper.getStates;\n\nimport lombok.Getter;\nimport lombok.Setter;\n\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.state.IStateMapper;\nimport com.amazon.randomcutforest.state.statistics.DeviationMapper;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\n@Getter\n@Setter\npublic class BasicThresholderMapper implements IStateMapper<BasicThresholder, BasicThresholderState> {\n\n    @Override\n    public BasicThresholder toModel(BasicThresholderState state, long seed) {\n        DeviationMapper deviationMapper = new DeviationMapper();\n        Deviation[] deviations = null;\n        if (state.getDeviationStates() != null) {\n            deviations = getDeviations(state.getDeviationStates(), deviationMapper);\n        } else if (state.getPrimaryDeviationState() != null) {\n            // backward compatility; will be deprecated in 4.0\n            deviations = new Deviation[3];\n            deviations[0] = deviationMapper.toModel(state.getPrimaryDeviationState());\n            deviations[1] = deviationMapper.toModel(state.getSecondaryDeviationState());\n            deviations[2] = deviationMapper.toModel(state.getThresholdDeviationState());\n        }\n        BasicThresholder thresholder = new BasicThresholder(deviations);\n        thresholder.setAbsoluteThreshold(state.getAbsoluteThreshold());\n        thresholder.setLowerThreshold(state.getLowerThreshold());\n        thresholder.setInitialThreshold(state.getInitialThreshold());\n        thresholder.setScoreDifferencing(state.getHorizon());\n        thresholder.setCount(state.getCount());\n        thresholder.setAutoThreshold(state.isAutoThreshold());\n        thresholder.setMinimumScores(state.getMinimumScores());\n        thresholder.setZfactor(state.getZFactor());\n        return thresholder;\n    }\n\n    @Override\n    public BasicThresholderState toState(BasicThresholder model) {\n        BasicThresholderState state = new BasicThresholderState();\n        DeviationMapper deviationMapper = new DeviationMapper();\n\n        state.setZFactor(model.getZFactor());\n        state.setLowerThreshold(model.getLowerThreshold());\n        state.setAbsoluteThreshold(model.getAbsoluteThreshold());\n        state.setInitialThreshold(model.getInitialThreshold());\n        state.setCount(model.getCount());\n        state.setAutoThreshold(model.isAutoThreshold());\n        state.setMinimumScores(model.getMinimumScores());\n        state.setDeviationStates(getStates(model.getDeviations(), deviationMapper));\n        state.setHorizon(model.getScoreDifferencing());\n        return state;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/state/threshold/BasicThresholderState.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state.threshold;\n\nimport java.io.Serializable;\n\nimport lombok.Data;\n\nimport com.amazon.randomcutforest.state.statistics.DeviationState;\n\n@Data\npublic class BasicThresholderState implements Serializable {\n    private static final long serialVersionUID = 1L;\n\n    private long randomseed;\n\n    @Deprecated\n    private boolean inAnomaly;\n\n    @Deprecated\n    private double elasticity;\n\n    @Deprecated\n    private boolean attributionEnabled;\n\n    private int count;\n\n    private int minimumScores;\n\n    // do not use\n    private DeviationState primaryDeviationState;\n\n    // do not use\n    private DeviationState secondaryDeviationState;\n\n    // do not use\n    private DeviationState thresholdDeviationState;\n\n    @Deprecated\n    private double upperThreshold;\n\n    private double lowerThreshold;\n\n    private double absoluteThreshold;\n\n    private boolean autoThreshold;\n\n    private double initialThreshold;\n\n    private double zFactor;\n\n    @Deprecated\n    private double upperZfactor;\n\n    @Deprecated\n    private double absoluteScoreFraction;\n\n    private double horizon;\n\n    private DeviationState[] deviationStates;\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/main/java/com/amazon/randomcutforest/parkservices/threshold/BasicThresholder.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.threshold;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY;\nimport static java.lang.Math.max;\nimport static java.lang.Math.min;\nimport static java.lang.Math.sqrt;\n\nimport java.util.List;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.statistics.Deviation;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class BasicThresholder {\n\n    public static double DEFAULT_SCORE_DIFFERENCING = 0.5;\n    public static int DEFAULT_MINIMUM_SCORES = 10;\n    public static double DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD = 0.9;\n    public static double DEFAULT_ABSOLUTE_THRESHOLD = 0.8;\n    public static double DEFAULT_INITIAL_THRESHOLD = 1.5;\n    public static double DEFAULT_Z_FACTOR = 3.0;\n    public static double MINIMUM_Z_FACTOR = 2.0;\n    public static boolean DEFAULT_AUTO_THRESHOLD = true;\n    public static int DEFAULT_DEVIATION_STATES = 3;\n\n    // keeping a count of the values seen because both deviation variables\n    // primaryDeviation\n    // and secondaryDeviation may not be used always\n    protected int count = 0;\n\n    // horizon = 0 is short term, switches to secondary\n    // horizon = 1 long term, switches to primary\n    protected double scoreDifferencing = DEFAULT_SCORE_DIFFERENCING;\n\n    // below these many observations, deviation is not useful\n    protected int minimumScores = DEFAULT_MINIMUM_SCORES;\n\n    protected Deviation primaryDeviation;\n\n    protected Deviation secondaryDeviation;\n\n    protected Deviation thresholdDeviation;\n\n    protected boolean autoThreshold = DEFAULT_AUTO_THRESHOLD;\n\n    // an absoluteThreshold\n    protected double absoluteThreshold = DEFAULT_ABSOLUTE_THRESHOLD;\n\n    // the upper threshold of scores above which points are likely anomalies\n    protected double factorAdjustmentThreshold = DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD;\n    // initial absolute threshold used to determine anomalies before sufficient\n    // values are seen\n    protected double initialThreshold = DEFAULT_INITIAL_THRESHOLD;\n    // used to determine the surprise coefficient above which we can call a\n    // potential anomaly\n    protected double zFactor = DEFAULT_Z_FACTOR;\n\n    public BasicThresholder(double primaryDiscount, double secondaryDiscount, boolean adjust) {\n        primaryDeviation = new Deviation(primaryDiscount);\n        secondaryDeviation = new Deviation(secondaryDiscount);\n        // a longer horizon to adjust\n        thresholdDeviation = new Deviation(primaryDiscount / 2);\n        autoThreshold = adjust;\n    }\n\n    public BasicThresholder(double discount) {\n        this(discount, discount, false);\n    }\n\n    public BasicThresholder(Deviation[] deviations) {\n        int length = (deviations == null) ? 0 : deviations.length;\n        if (length != DEFAULT_DEVIATION_STATES) {\n            double timeDecay = 1.0 / (DEFAULT_SAMPLE_SIZE * DEFAULT_SAMPLE_SIZE_COEFFICIENT_IN_TIME_DECAY);\n            this.primaryDeviation = new Deviation(timeDecay);\n            this.secondaryDeviation = new Deviation(timeDecay);\n            this.thresholdDeviation = new Deviation(0.1 * timeDecay);\n        } else {\n            this.primaryDeviation = deviations[0];\n            this.secondaryDeviation = deviations[1];\n            this.thresholdDeviation = deviations[2];\n        }\n    }\n\n    public BasicThresholder(List<Double> scores, double rate) {\n        this.primaryDeviation = new Deviation(0);\n        this.secondaryDeviation = new Deviation(0);\n        this.thresholdDeviation = new Deviation(0);\n        if (scores != null) {\n            scores.forEach(s -> update(s, s));\n        }\n        primaryDeviation.setDiscount(rate);\n        secondaryDeviation.setDiscount(rate);\n        thresholdDeviation.setDiscount(0.1 * rate);\n    }\n\n    /**\n     * a boolean that determines if enough values have been seen to be able to\n     * discern deviations\n     * \n     * @return true/false based on counts of various statistic\n     */\n    public boolean isDeviationReady() {\n        if (count < minimumScores) {\n            return false;\n        }\n\n        if (scoreDifferencing != 0) {\n            return secondaryDeviation.getCount() >= minimumScores;\n        }\n        return true;\n    }\n\n    /**\n     * this function helps switch from short term (not able to use deviation, using\n     * absolute scores) which is the first minimumScores observations of the scoring\n     * function to using deviation (and not using absokute scores, except as a lower\n     * bound) at 2*minimumScores It is often the case that the data has \"run\"\n     * effects and the initial scopres can all come in low or can all come in high\n     * \n     * @return a parameter that helps smoot transition of initial to long term\n     *         behavior\n     */\n    protected double intermediateTermFraction() {\n        if (count < minimumScores) {\n            return 0;\n        } else if (count > 2 * minimumScores) {\n            return 1;\n        } else {\n            return (count - minimumScores) * 1.0 / minimumScores;\n        }\n    }\n\n    @Deprecated\n    public double threshold() {\n        return getPrimaryThreshold();\n    }\n\n    public double getPrimaryThreshold() {\n        if (!isDeviationReady()) {\n            return 0;\n        }\n        return primaryDeviation.getMean() + zFactor * primaryDeviation.getDeviation();\n    }\n\n    /**\n     * The simplest thresholder that does not use any auxilliary correction, an can\n     * be used for multiple scoring capabilities.\n     *\n     * @param score the value being thresholded\n     * @return a computation of grade between [-1,1], grades in the range (0,1] are\n     *         to be considered anomalous\n     */\n    public double getPrimaryGrade(double score) {\n        if (!isDeviationReady()) {\n            return 0;\n        }\n        double tFactor = 2 * zFactor;\n        double deviation = primaryDeviation.getDeviation();\n        if (deviation > 0) {\n            tFactor = min(tFactor, (score - primaryDeviation.getMean()) / deviation);\n        } else {\n            return (score > primaryDeviation.getMean() + 1e-10) ? 1.0 : 0;\n        }\n        double t = (tFactor - zFactor) / (zFactor);\n        return max(0, t);\n    }\n\n    public Weighted<Double> getPrimaryThresholdAndGrade(double score) {\n        if (!isDeviationReady() || score <= 0) {\n            return new Weighted<Double>(0.0, 0.0f);\n        }\n        double threshold = getPrimaryThreshold();\n        float grade = (threshold > 0 && score > threshold) ? (float) getPrimaryGrade(score) : 0f;\n        return new Weighted<>(threshold, grade);\n    }\n\n    @Deprecated\n    public double getAnomalyGrade(double score, boolean flag) {\n        return getPrimaryGrade(score);\n    }\n\n    /**\n     * The following adapts the notion of x-sigma (standard deviation) to admit the\n     * case that RCF scores are asymmetric and values lower than 1 (closer to 0.5)\n     * can be more common; whereas anomalies are typically larger the x-factor is\n     * automatically scaled to be calibrated with the average score (bounded below\n     * by an absolute constant like 0.7)\n     * \n     * @param factor    the factor being scaled\n     * @param method    transformation method\n     * @param dimension the dimension of the problem (currently unused)\n     * @return a scaled value of the factor\n     */\n\n    protected double adjustedFactor(double factor, TransformMethod method, int dimension) {\n        double correctedFactor = factor;\n        double base = primaryDeviation.getMean();\n        if (base < factorAdjustmentThreshold && method != TransformMethod.NONE) {\n            correctedFactor = primaryDeviation.getMean() * factor / factorAdjustmentThreshold;\n        }\n        return max(correctedFactor, MINIMUM_Z_FACTOR);\n    }\n\n    /**\n     * The following computes the standard deviation of the scores. But we have\n     * multiple ways of measuring that -- if the scores are typically symmetric then\n     * many of these measures concide. However transformation of the values may\n     * cause the score distribution to be unusual. For example, if NORMALIZATION is\n     * used then the scores (below the average) end up being close to the average\n     * (an example of the asymmetry) and thus only standard deviation is used. But\n     * for other distributions we could directly estimate the deviation of the\n     * scores below the dynamic mean in an online manner, and we do so in\n     * thresholdDeviation. An orthogonal component is the effect of\n     * shingling/differencing which connect up the scores from consecutive input.\n     * \n     * @param method      transformation method\n     * @param shingleSize shinglesize used\n     * @return an estimate of long term deviation from mean of a stochastic series\n     */\n    protected double longTermDeviation(TransformMethod method, int shingleSize) {\n\n        if (shingleSize == 1\n                && !(method == TransformMethod.DIFFERENCE || method == TransformMethod.NORMALIZE_DIFFERENCE)) {\n            // control the effect of large values above a threshold from raising the\n            // threshold\n            return min(sqrt(2.0) * thresholdDeviation.getDeviation(), primaryDeviation.getDeviation());\n        } else {\n            double first = primaryDeviation.getDeviation();\n            first = min(first, max(secondaryDeviation.getDeviation(), sqrt(2.0) * thresholdDeviation.getDeviation()));\n            // there is a role of differencing; either by shingling or by explicit\n            // transformation\n            return scoreDifferencing * first + (1 - scoreDifferencing) * secondaryDeviation.getDeviation();\n        }\n\n    }\n\n    public Weighted<Double> getThresholdAndGrade(double score, TransformMethod method, int dimension, int shingleSize) {\n        return getThresholdAndGrade(score, zFactor, method, dimension, shingleSize);\n    }\n\n    public Weighted<Double> getThresholdAndGrade(double score, double factor, TransformMethod method, int dimension,\n            int shingleSize) {\n        double intermediateFraction = intermediateTermFraction();\n        double newFactor = adjustedFactor(factor, method, dimension);\n        double longTerm = longTermDeviation(method, shingleSize);\n        double scaledDeviation = (newFactor - 1) * longTerm + primaryDeviation.getDeviation();\n\n        double absolute = absoluteThreshold;\n        if (autoThreshold && intermediateFraction >= 1.0 && primaryDeviation.getMean() < factorAdjustmentThreshold) {\n            absolute = primaryDeviation.getMean() * absolute / factorAdjustmentThreshold;\n        }\n        double threshold = (!isDeviationReady()) ? max(initialThreshold, absolute)\n                : max(absolute, intermediateFraction * (primaryDeviation.getMean() + scaledDeviation)\n                        + (1 - intermediateFraction) * initialThreshold);\n        if (score < threshold || threshold <= 0) {\n            return new Weighted<>(threshold, 0);\n        } else {\n            double t = getSurpriseIndex(score, threshold, newFactor, scaledDeviation / newFactor);\n            t = min((Math.floor(t * 20)) / 16, 1.0); // grade 1 at scaledDeviation at 4 sigma\n            if (t == 0) {\n                // round off errors\n                threshold = score;\n            }\n            return new Weighted<>(threshold, (float) t);\n        }\n    }\n\n    /**\n     * how surprised are seeing a value from a series with mean base with deviation,\n     * where factor controls the separation\n     * \n     * @param score     score\n     * @param base      mean of series\n     * @param factor    control parameter for determining surprise\n     * @param deviation relevant deviation for the series\n     * @return a clipped value of the \"surpise\" index\n     */\n    protected float getSurpriseIndex(double score, double base, double factor, double deviation) {\n        if (isDeviationReady()) {\n            double tFactor = 2 * factor;\n            if (deviation > 0) {\n                tFactor = min(factor, (score - base) / deviation);\n            }\n            return max(0, (float) (tFactor / factor));\n        } else {\n            return (float) min(1, max(0, (score - absoluteThreshold) / absoluteThreshold));\n        }\n    }\n\n    // mean or below; uses the asymmetry of the RCF score\n    protected void updateThreshold(double score) {\n        double gap = primaryDeviation.getMean() - score;\n        if (gap > 0) {\n            thresholdDeviation.update(gap);\n        }\n    }\n\n    protected void updatePrimary(double score) {\n        updateThreshold(score);\n        primaryDeviation.update(score);\n        ++count;\n    }\n\n    public void update(double primary, double secondary) {\n        updateThreshold(primary);\n        primaryDeviation.update(primary);\n        secondaryDeviation.update(secondary);\n        ++count;\n    }\n\n    public void update(double score, double secondScore, double lastScore, TransformMethod method) {\n        update(min(score, 2.0), secondScore - lastScore);\n    }\n\n    public Deviation getPrimaryDeviation() {\n        return primaryDeviation;\n    }\n\n    public Deviation getSecondaryDeviation() {\n        return secondaryDeviation;\n    }\n\n    public void setZfactor(double factor) {\n        zFactor = factor;\n    }\n\n    /**\n     * sets the lower threshold -- which is used to scale the factor variable\n     */\n    public void setLowerThreshold(double lower) {\n        factorAdjustmentThreshold = lower;\n    }\n\n    /**\n     * \n     * @param value absolute lower bound thresholds turns off auto adjustment -- to\n     *              respect the direct setting\n     */\n    public void setAbsoluteThreshold(double value) {\n        autoThreshold = false;\n        absoluteThreshold = value;\n    }\n\n    public void setInitialThreshold(double initial) {\n        initialThreshold = initial;\n    }\n\n    public void setScoreDifferencing(double scoreDifferencing) {\n        checkArgument(scoreDifferencing >= 0 && scoreDifferencing <= 1, \"incorrect score differencing parameter\");\n        this.scoreDifferencing = scoreDifferencing;\n    }\n\n    // to be updated as more deviations are added\n    public Deviation[] getDeviations() {\n        Deviation[] deviations = new Deviation[DEFAULT_DEVIATION_STATES];\n        deviations[0] = primaryDeviation.copy();\n        deviations[1] = secondaryDeviation.copy();\n        deviations[2] = thresholdDeviation.copy();\n        return deviations;\n    }\n\n    public boolean isAutoThreshold() {\n        return autoThreshold;\n    }\n\n    public int getCount() {\n        return count;\n    }\n\n    public void setCount(int count) {\n        this.count = count;\n    }\n\n    public double getAbsoluteThreshold() {\n        return absoluteThreshold;\n    }\n\n    public double getLowerThreshold() {\n        return factorAdjustmentThreshold;\n    }\n\n    public double getInitialThreshold() {\n        return initialThreshold;\n    }\n\n    public double getScoreDifferencing() {\n        return scoreDifferencing;\n    }\n\n    public double getZFactor() {\n        return zFactor;\n    }\n\n    public int getMinimumScores() {\n        return minimumScores;\n    }\n\n    public void setMinimumScores(int minimumScores) {\n        this.minimumScores = minimumScores;\n    }\n\n    public void setAutoThreshold(boolean autoThreshold) {\n        this.autoThreshold = autoThreshold;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/AnomalyDescriptorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class AnomalyDescriptorTest {\n\n    @ParameterizedTest\n    @EnumSource(ScoringStrategy.class)\n    public void PastValuesTest(ScoringStrategy strategy) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 10; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int outputAfter = 2 + 1;\n            int shingleSize = 1 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(rng.nextLong())\n                    .outputAfter(outputAfter).scoringStrategy(strategy).internalShinglingEnabled(true)\n                    .shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            int count = 0;\n            for (double[] point : dataWithKeys.data) {\n                if (count == 82) {\n                    point[0] += 10000; // introducing an anomaly\n                }\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                assertArrayEquals(firstResult.getCurrentInput(), point, 1e-6);\n                assertEquals(firstResult.getScoringStrategy(), strategy);\n                if (count < outputAfter || count < shingleSize) {\n                    assertEquals(firstResult.getRCFScore(), 0);\n                } else {\n                    // distances can be 0\n                    assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getRCFScore() > 0);\n                    assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getThreshold() > 0);\n                    assertEquals(firstResult.getScale().length, baseDimensions);\n                    assertEquals(firstResult.getShift().length, baseDimensions);\n                    assertTrue(firstResult.getRelativeIndex() <= 0);\n                    if (count == 82 && strategy != ScoringStrategy.DISTANCE) {\n                        // because distances are 0 till sampleSize; by which time\n                        // forecasts would be reasonable\n                        assertTrue(firstResult.getAnomalyGrade() > 0);\n                    }\n                    if (firstResult.getAnomalyGrade() > 0) {\n                        assertNotNull(firstResult.getPastValues());\n                        assertEquals(firstResult.getPastValues().length, baseDimensions);\n                        if (firstResult.getRelativeIndex() == 0) {\n                            assertArrayEquals(firstResult.getPastValues(), firstResult.getCurrentInput(), 1e-10);\n                        }\n\n                        assertNotNull(firstResult.getRelevantAttribution());\n                        assertEquals(firstResult.getRelevantAttribution().length, baseDimensions);\n                        assertEquals(firstResult.getAttribution().getHighLowSum(), firstResult.getRCFScore(), 1e-6);\n                        // the reverse of this condition need not be true -- the predictor corrector\n                        // often may declare grade 0 even when score is greater than threshold, to\n                        // account for shingling and initial results that populate the thresholder\n                        assertTrue(strategy == ScoringStrategy.MULTI_MODE_RECALL\n                                || firstResult.getRCFScore() >= firstResult.getThreshold());\n                    } else {\n                        assertTrue(firstResult.getRelativeIndex() == 0);\n                    }\n                }\n                ++count;\n\n            }\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(ScoringStrategy.class)\n    public void TimeAugmentedTest(ScoringStrategy strategy) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 10; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int outputAfter = 2 + 1;\n            int shingleSize = 1 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(rng.nextLong())\n                    .outputAfter(outputAfter).forestMode(ForestMode.TIME_AUGMENTED).scoringStrategy(strategy)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            int count = 0;\n            for (double[] point : dataWithKeys.data) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                assertArrayEquals(firstResult.getCurrentInput(), point, 1e-6);\n                assertEquals(firstResult.getScoringStrategy(), strategy);\n                if (count < outputAfter || count < shingleSize) {\n                    assertEquals(firstResult.getRCFScore(), 0);\n                } else {\n                    // distances can be 0\n                    assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getRCFScore() > 0);\n                    assertTrue(strategy == ScoringStrategy.DISTANCE || firstResult.getThreshold() > 0);\n                    assertEquals(firstResult.getScale().length, baseDimensions + 1);\n                    assertEquals(firstResult.getShift().length, baseDimensions + 1);\n                    assertTrue(firstResult.getRelativeIndex() <= 0);\n                    if (firstResult.getAnomalyGrade() > 0) {\n                        assertNotNull(firstResult.getPastValues());\n                        assertEquals(firstResult.getPastValues().length, baseDimensions);\n                        if (firstResult.getRelativeIndex() == 0) {\n                            assertArrayEquals(firstResult.getPastValues(), firstResult.getCurrentInput(), 1e-10);\n                        }\n                        assertEquals(firstResult.getAttribution().getHighLowSum(), firstResult.getRCFScore(), 1e-6);\n                        assertNotNull(firstResult.getRelevantAttribution());\n                        assertEquals(firstResult.getRelevantAttribution().length, baseDimensions);\n                        assertTrue(strategy == ScoringStrategy.MULTI_MODE_RECALL\n                                || firstResult.getRCFScore() >= firstResult.getThreshold());\n                    }\n                }\n                ++count;\n\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ConsistencyTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n@Tag(\"functional\")\npublic class ConsistencyTest {\n\n    @Test\n    public void InternalShinglingTest() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        int numTrials = 1; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).internalShinglingEnabled(true).shingleSize(shingleSize)\n                    .randomSeed(seed).build();\n\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    seed + i, baseDimensions);\n\n            for (double[] point : dataWithKeys.data) {\n\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);\n                forest.update(point);\n            }\n        }\n    }\n\n    @Test\n    public void ExternalShinglingTest() {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        int numTrials = 1; // just once since testing exact equality\n        int length = 400 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            RandomCutForest forest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).internalShinglingEnabled(false).shingleSize(shingleSize)\n                    .randomSeed(seed).build();\n\n            RandomCutForest copyForest = RandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .precision(Precision.FLOAT_32).internalShinglingEnabled(false).shingleSize(1).randomSeed(seed)\n                    .build();\n\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build();\n\n            ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .internalShinglingEnabled(false).shingleSize(1).anomalyRate(0.01).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(length, 50,\n                    shingleSize, baseDimensions, seed);\n\n            int gradeDifference = 0;\n\n            for (double[] point : dataWithKeys.data) {\n\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                AnomalyDescriptor secondResult = second.process(point, 0L);\n\n                assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);\n                assertEquals(firstResult.getRCFScore(), copyForest.getAnomalyScore(point), 1e-10);\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n\n                if ((firstResult.getAnomalyGrade() > 0) != (secondResult.getAnomalyGrade() > 0)) {\n                    ++gradeDifference;\n                    // thresholded random cut forest uses shingle size in the corrector step\n                    // this is supposed to be different\n                }\n                forest.update(point);\n                copyForest.update(point);\n            }\n            assertTrue(gradeDifference > 0);\n        }\n    }\n\n    @Test\n    public void MixedShinglingTest() {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        System.out.println(seed);\n\n        Random rng = new Random(seed);\n\n        int numTrials = 5; // test is exact equality, reducing the number of trials\n        int numberOfTrees = 30; // and using fewer trees to speed up test\n        int length = 40 * sampleSize;\n        int testLength = length;\n        for (int i = 0; i < numTrials; i++) {\n\n            long newSeed = rng.nextLong();\n            int outputAfter = rng.nextInt(sampleSize * 10) + 1;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(newSeed)\n                    .numberOfTrees(numberOfTrees).internalShinglingEnabled(true)\n                    // increasing outputAfter for internal shingling\n                    .outputAfter(outputAfter + shingleSize - 1).shingleSize(shingleSize).anomalyRate(0.01).build();\n\n            ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(newSeed)\n                    .numberOfTrees(numberOfTrees).internalShinglingEnabled(false).outputAfter(outputAfter)\n                    .shingleSize(shingleSize).anomalyRate(0.01).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length + testLength, 50,\n                    100, 5, newSeed + i, baseDimensions);\n\n            double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);\n\n            assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n            int count = shingleSize - 1;\n            // insert initial points\n            for (int j = 0; j < shingleSize - 1; j++) {\n                first.process(dataWithKeys.data[j], 0L);\n            }\n\n            for (int j = 0; j < length; j++) {\n                // validate equality of points\n                for (int y = 0; y < baseDimensions; y++) {\n                    assertEquals(dataWithKeys.data[count][y], shingledData[j][(shingleSize - 1) * baseDimensions + y],\n                            1e-10);\n                }\n\n                AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], 0L);\n                ++count;\n                AnomalyDescriptor secondResult = second.process(shingledData[j], 0L);\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n                // grades will not match\n            }\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            ThresholdedRandomCutForest fourth = mapper.toModel(mapper.toState(first));\n            for (int j = length; j < shingledData.length; j++) {\n                // validate eaulity of points\n                for (int y = 0; y < baseDimensions; y++) {\n                    assertEquals(dataWithKeys.data[count][y], shingledData[j][(shingleSize - 1) * baseDimensions + y],\n                            1e-10);\n                }\n\n                AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], 0L);\n                AnomalyDescriptor secondResult = second.process(shingledData[j], 0L);\n                AnomalyDescriptor fourthResult = fourth.process(dataWithKeys.data[count], 0L);\n                ++count;\n\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getRCFScore(), fourthResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getAnomalyGrade(), fourthResult.getAnomalyGrade(), 1e-10);\n\n            }\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void TimeAugmentedTest(TransformMethod transformMethod) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n\n        int numTrials = 1; // test is exact equality, reducing the number of trials\n        int numberOfTrees = 30; // and using fewer trees to speed up test\n        int length = 10 * sampleSize;\n        int dataSize = 2 * length;\n        for (int i = 0; i < numTrials; i++) {\n            Precision precision = Precision.FLOAT_32;\n            long seed = new Random().nextLong();\n            System.out.println(\"seed = \" + seed);\n\n            ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                    .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                    .forestMode(ForestMode.STANDARD).weightTime(0).transformMethod(transformMethod).normalizeTime(true)\n                    .outputAfter(32).initialAcceptFraction(0.125).build();\n            ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                    .sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                    .forestMode(ForestMode.TIME_AUGMENTED).weightTime(0).transformMethod(transformMethod)\n                    .normalizeTime(true).outputAfter(32).initialAcceptFraction(0.125).build();\n\n            // ensuring that the parameters are the same; otherwise the grades/scores cannot\n            // be the same\n            // weighTime has to be 0 in the above\n            first.setLowerThreshold(1.1);\n            second.setLowerThreshold(1.1);\n            first.setHorizon(0.75);\n            second.setHorizon(0.75);\n\n            Random noise = new Random(0);\n\n            // change the last argument seed for a different run\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1,\n                    50, 100, 5, seed, baseDimensions);\n\n            int count = 0;\n            for (int j = 0; j < length; j++) {\n\n                long timestamp = 100 * count + noise.nextInt(10) - 5;\n                AnomalyDescriptor result = first.process(dataWithKeys.data[j], timestamp);\n                AnomalyDescriptor test = second.process(dataWithKeys.data[j], timestamp);\n                assertEquals(result.getRCFScore(), test.getRCFScore(), 1e-10);\n                // grade will not be the same because dimension changes\n                ++count;\n            }\n\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n            for (int j = length; j < 2 * length; j++) {\n\n                // can be a different gap\n                long timestamp = 150 * count + noise.nextInt(10) - 5;\n                AnomalyDescriptor firstResult = first.process(dataWithKeys.data[count], timestamp);\n                AnomalyDescriptor secondResult = second.process(dataWithKeys.data[count], timestamp);\n                AnomalyDescriptor thirdResult = third.process(dataWithKeys.data[count], timestamp);\n\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n                assertEquals(secondResult.getAnomalyGrade(), thirdResult.getAnomalyGrade(), 1e-10);\n            }\n        }\n    }\n\n    // streaming impute changes normalizations\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void ImputeTest(TransformMethod transformMethod) {\n\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n\n        int numTrials = 1; // test is exact equality, reducing the number of trials\n        int numberOfTrees = 30; // and using fewer trees to speed up test\n        int length = 10 * sampleSize;\n        int dataSize = 2 * length;\n        for (int i = 0; i < numTrials; i++) {\n            Precision precision = Precision.FLOAT_32;\n            long seed = new Random().nextLong();\n            System.out.println(\"seed = \" + seed);\n            Random rng = new Random(seed);\n            double[] weights = new double[baseDimensions];\n            Arrays.fill(weights, 1.0);\n\n            int startNormalization = 10;\n            int outputAfter = startNormalization + shingleSize;\n            long newSeed = rng.nextLong();\n            ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                    .randomSeed(newSeed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                    .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                    .forestMode(ForestMode.STANDARD).weightTime(0).transformMethod(transformMethod).normalizeTime(true)\n                    .startNormalization(startNormalization).outputAfter(outputAfter).initialAcceptFraction(0.125)\n                    .weights(weights).build();\n            ThresholdedRandomCutForest second = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).randomSeed(newSeed).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                    .sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                    .forestMode(ForestMode.STREAMING_IMPUTE).weightTime(0).transformMethod(transformMethod)\n                    .startNormalization(startNormalization).normalizeTime(true).outputAfter(outputAfter)\n                    .initialAcceptFraction(0.125).weights(weights).build();\n\n            Random noise = new Random(0);\n\n            // change the last argument seed for a different run\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1,\n                    50, 100, 5, seed, baseDimensions);\n\n            for (int j = 0; j < length; j++) {\n                // gap has to be asymptotically same\n                long timestamp = 100 * j + 0 * noise.nextInt(10) - 5;\n                AnomalyDescriptor result = first.process(dataWithKeys.data[j], 0L);\n                AnomalyDescriptor test = second.process(dataWithKeys.data[j], timestamp);\n                if (result.getRCFScore() > 0 && test.getRCFScore() > 0) {\n                    assertEquals(result.getRCFScore(), test.getRCFScore(), 1e-6);\n                }\n            }\n\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n            for (int j = length; j < 2 * length; j++) {\n\n                // has to be the same gap\n                long timestamp = 100 * j + noise.nextInt(10) - 5;\n                AnomalyDescriptor firstResult = first.process(dataWithKeys.data[j], 0L);\n                AnomalyDescriptor thirdResult = third.process(dataWithKeys.data[j], timestamp);\n\n                assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-6);\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/DescriptorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\n\npublic class DescriptorTest {\n\n    int dimensions;\n    int horizon;\n    private ForecastDescriptor forecastDescriptor;\n\n    @BeforeEach\n    public void setUp() {\n        dimensions = 4;\n        horizon = 2;\n        forecastDescriptor = new ForecastDescriptor(new double[] { 2.0, 3.0 }, 0L, 7);\n\n    }\n\n    @Test\n    public void testSet() {\n        assertThrows(IllegalArgumentException.class,\n                () -> forecastDescriptor.setObservedErrorDistribution(new RangeVector(15)));\n        assertDoesNotThrow(() -> forecastDescriptor.setObservedErrorDistribution(new RangeVector(14)));\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setErrorRMSE(new DiVector(13)));\n        assertDoesNotThrow(() -> forecastDescriptor.setErrorRMSE(new DiVector(14)));\n\n        assertFalse(forecastDescriptor.isExpectedValuesPresent());\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setExpectedValues(2, new double[2], 1.0));\n        forecastDescriptor.setExpectedValues(0, new double[2], 1.0);\n        assertTrue(forecastDescriptor.isExpectedValuesPresent());\n        assertNull(forecastDescriptor.getLastExpectedRCFPoint());\n        assertArrayEquals(forecastDescriptor.getExpectedValuesList()[0], new double[2]);\n        forecastDescriptor.setExpectedValues(0, new double[] { -1.0, -1.0 }, 0.5);\n        assertArrayEquals(forecastDescriptor.getExpectedValuesList()[0], new double[] { -1.0, -1.0 });\n        assertNull(forecastDescriptor.getMissingValues());\n        forecastDescriptor.setMissingValues(null);\n        assertNull(forecastDescriptor.getMissingValues());\n        forecastDescriptor.setMissingValues(new int[] { 17 });\n        assertArrayEquals(forecastDescriptor.getMissingValues(), new int[] { 17 });\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(0, null));\n        forecastDescriptor.setNumberOfNewImputes(1);\n        forecastDescriptor.setInputLength(1);\n        forecastDescriptor.setShingleSize(1);\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(0, null));\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(0, new double[3]));\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(0, new double[1]));\n        forecastDescriptor.setShingleSize(2);\n        forecastDescriptor.setImputedPoints(null); // reset\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(-1, new double[1]));\n        assertDoesNotThrow(() -> forecastDescriptor.setImputedPoint(0, new double[1]));\n        // cannot set twice\n        assertThrows(IllegalArgumentException.class, () -> forecastDescriptor.setImputedPoint(0, new double[1]));\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ForecastTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys.generateShingledData;\nimport static java.lang.Math.min;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Tag;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.CsvSource;\nimport org.junit.jupiter.params.provider.EnumSource;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\n@Tag(\"functional\")\npublic class ForecastTest {\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void basicAndIdempotence(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = 1L;\n\n        int length = 4 * sampleSize;\n        int outputAfter = 128;\n\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).outputAfter(outputAfter).transformMethod(method).build();\n\n        // as the ratio of amplitude (signal) to noise is changed, the estimation range\n        // in forecast\n        // (or any other inference) should increase\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,\n                baseDimensions);\n\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n        double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);\n\n        assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n        int horizon = 20;\n        if (method == TransformMethod.NORMALIZE_DIFFERENCE || method == TransformMethod.DIFFERENCE) {\n            horizon = min(horizon, shingleSize / 2 + 1);\n        }\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            // forecast first; change centrality to achieve a control over the sampling\n            // setting centrality = 0 would correspond to random sampling from the leaves\n            // reached by\n            // impute visitor\n\n            TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n            RangeVector forecast = extrapolate.rangeVector;\n            assert (forecast.values.length == horizon);\n            assert (extrapolate.timeStamps.length == horizon);\n            assert (extrapolate.lowerTimeStamps.length == horizon);\n            assert (extrapolate.upperTimeStamps.length == horizon);\n\n            RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;\n\n            // repeated invocations of extrapolate should return same result\n            // for the same values of correction,centrality\n            assertArrayEquals(forecast.values, alternative.values, 1e-6f);\n            assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);\n            assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);\n\n            for (int i = 0; i < horizon; i++) {\n                // check ranges\n                if (j > sampleSize) {\n                    assert (extrapolate.timeStamps[i] == j + i);\n                    assert (extrapolate.upperTimeStamps[i] == j + i);\n                    assert (extrapolate.lowerTimeStamps[i] == j + i);\n                }\n                assert (forecast.values[i] >= forecast.lower[i]);\n                assert (forecast.values[i] <= forecast.upper[i]);\n                // compute errors\n                if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {\n                    double t = dataWithKeys.data[j + i][0] - forecast.values[i];\n                    error[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.lower[i];\n                    lowerError[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.upper[i];\n                    upperError[i] += t * t;\n                }\n            }\n            forest.process(dataWithKeys.data[j], j);\n        }\n\n        System.out.println(forest.getTransformMethod().name() + \" RMSE (as horizon increases) \");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n    @ParameterizedTest\n    @CsvSource({ \"NORMALIZE,true\", \"NORMALIZE,false\", \"SUBTRACT_MA,true\", \"SUBTRACT_MA,false\", \"WEIGHTED,true\",\n            \"WEIGHTED,false\" })\n    public void linearShift(String methodString, String normalizeTime) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = 0L;\n\n        int length = 10 * sampleSize;\n        int outputAfter = 128;\n\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).timeDecay(1.0 / 1024).outputAfter(outputAfter)\n                .transformMethod(TransformMethod.valueOf(methodString))\n                .normalizeTime(Boolean.parseBoolean(normalizeTime)).build();\n\n        // as the ratio of amplitude (signal) to noise is changed, the estimation range\n        // in forecast\n        // (or any other inference) should increase\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,\n                baseDimensions, true);\n\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n        double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);\n\n        assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n        // the following constraint is for differencing based methods\n        int horizon = shingleSize / 2 + 1;\n\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            // forecast first; change centrality to achieve a control over the sampling\n            // setting centrality = 0 would correspond to random sampling from the leaves\n            // reached by\n            // impute visitor\n            TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n            RangeVector forecast = extrapolate.rangeVector;\n            assert (forecast.values.length == horizon);\n            assert (extrapolate.timeStamps.length == horizon);\n            assert (extrapolate.lowerTimeStamps.length == horizon);\n            assert (extrapolate.upperTimeStamps.length == horizon);\n\n            RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;\n\n            // repeated invocations of extrapolate should return same result\n            // for the same values of correction,centrality\n            assertArrayEquals(forecast.values, alternative.values, 1e-6f);\n            assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);\n            assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);\n\n            for (int i = 0; i < horizon; i++) {\n\n                if (j > outputAfter) {\n                    assert (extrapolate.timeStamps[i] == i + j);\n                    assert (extrapolate.upperTimeStamps[i] == i + j);\n                    assert (extrapolate.lowerTimeStamps[i] == i + j);\n                }\n                // check ranges\n                assert (forecast.values[i] >= forecast.lower[i]);\n                assert (forecast.values[i] <= forecast.upper[i]);\n                // compute errors\n                if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {\n                    double t = dataWithKeys.data[j + i][0] - forecast.values[i];\n                    error[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.lower[i];\n                    lowerError[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.upper[i];\n                    upperError[i] += t * t;\n                }\n            }\n            forest.process(dataWithKeys.data[j], j);\n        }\n\n        System.out.println(forest.getTransformMethod().name() + \" RMSE (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n    @ParameterizedTest\n    @CsvSource({ \"DIFFERENCE,true\", \"DIFFERENCE,false\", \"NORMALIZE_DIFFERENCE,true\", \"NORMALIZE_DIFFERENCE,false\" })\n    public void linearShiftDifference(String methodString, String normalizeTime) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        // use same seed as previous test\n        long seed = 0L;\n\n        int length = 10 * sampleSize;\n        int outputAfter = 128;\n\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).timeDecay(1.0 / 1024).outputAfter(outputAfter)\n                .transformMethod(TransformMethod.valueOf(methodString))\n                .normalizeTime(Boolean.parseBoolean(normalizeTime)).build();\n\n        // as the ratio of amplitude (signal) to noise is changed, the estimation range\n        // in forecast\n        // (or any other inference) should increase\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 10, seed,\n                baseDimensions, true);\n\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n        double[][] shingledData = generateShingledData(dataWithKeys.data, shingleSize, baseDimensions, false);\n\n        assertEquals(shingledData.length, dataWithKeys.data.length - shingleSize + 1);\n\n        // the following constraint is for differencing based methods\n        // the differenced values will be noisy in the presence of anomalies\n        // the example demonstrates that the best forecaster need not be the best\n        // anomaly detector, even from a restricted family of algorithms\n        int horizon = shingleSize / 2 + 1;\n\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n\n            TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n            RangeVector forecast = extrapolate.rangeVector;\n            assert (forecast.values.length == horizon);\n            assert (extrapolate.timeStamps.length == horizon);\n            assert (extrapolate.lowerTimeStamps.length == horizon);\n            assert (extrapolate.upperTimeStamps.length == horizon);\n\n            RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;\n\n            // repeated invocations of extrapolate should return same result\n            // for the same values of correction,centrality\n            assertArrayEquals(forecast.values, alternative.values, 1e-6f);\n            assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);\n            assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);\n\n            for (int i = 0; i < horizon; i++) {\n                // check ranges\n                assertEquals(extrapolate.timeStamps[i], 0);\n                assertEquals(extrapolate.upperTimeStamps[i], 0);\n                assertEquals(extrapolate.lowerTimeStamps[i], 0);\n                assert (forecast.values[i] >= forecast.lower[i]);\n                assert (forecast.values[i] <= forecast.upper[i]);\n                // compute errors\n                if (j > outputAfter + shingleSize - 1 && j + i < dataWithKeys.data.length) {\n                    double t = dataWithKeys.data[j + i][0] - forecast.values[i];\n                    error[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.lower[i];\n                    lowerError[i] += t * t;\n                    t = dataWithKeys.data[j + i][0] - forecast.upper[i];\n                    upperError[i] += t * t;\n                }\n            }\n            forest.process(dataWithKeys.data[j], 0L);\n        }\n\n        System.out.println(forest.getTransformMethod().name() + \" RMSE (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    void timeAugmentedTest(boolean normalize) {\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n\n        int baseDimensions = 1;\n        int horizon = 10;\n\n        int count = 0;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).internalShinglingEnabled(true).precision(precision).anomalyRate(0.01)\n                .forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(normalize).build();\n\n        long seed = new Random().nextLong();\n        double[] data = new double[] { 1.0 };\n        System.out.println(\"seed = \" + seed);\n        Random rng = new Random(seed);\n\n        for (int i = 0; i < 200; i++) {\n            long time = 1000L * count + rng.nextInt(100);\n            forest.process(data, time);\n            ++count;\n        }\n        TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n        RangeVector range = extrapolate.rangeVector;\n        assert (range.values.length == baseDimensions * horizon);\n        assert (extrapolate.timeStamps.length == horizon);\n        assert (extrapolate.lowerTimeStamps.length == horizon);\n        assert (extrapolate.upperTimeStamps.length == horizon);\n\n        /*\n         * the forecasted time stamps should be close to 1000 * (count + i) the data\n         * values should remain as in data[]\n         */\n\n        for (int i = 0; i < horizon; i++) {\n            assertEquals(range.values[i], data[0]);\n            assertEquals(range.upper[i], data[0]);\n            assertEquals(range.lower[i], data[0]);\n            assert (Math.abs(Math.round(extrapolate.timeStamps[i] * 0.001) - count - i) <= 1);\n            assert (extrapolate.timeStamps[i] >= extrapolate.lowerTimeStamps[i]);\n            assert (extrapolate.upperTimeStamps[i] >= extrapolate.timeStamps[i]);\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void streamingImputeTest(TransformMethod method) {\n        int shingleSize = 8;\n        int numberOfTrees = 100;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n        int outputAfter = sampleSize;\n\n        // change this to try different number of attributes,\n        int baseDimensions = 1;\n\n        int dropped = 0;\n        long seed = 2022L;\n\n        // the following simulates random drops\n        long dropSeed = 7L;\n        Random dropPRG = new Random(dropSeed);\n\n        System.out.println(\"seed = \" + seed);\n        System.out.println(\"dropping seed = \" + dropSeed);\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STREAMING_IMPUTE)\n                .transformMethod(method).imputationMethod(RCF).build();\n\n        // limited to shingleSize/2+1 due to the differenced methods\n        int horizon = shingleSize / 2 + 1;\n\n        double[] error = new double[horizon];\n        double[] lowerError = new double[horizon];\n        double[] upperError = new double[horizon];\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                100, 5, seed, baseDimensions, true);\n        System.out.println(dataWithKeys.changes.length + \" anomalies injected \");\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n\n            if (dropPRG.nextDouble() < 0.2) {\n                ++dropped;\n            } else {\n\n                // note that the forecast does not change without a new reading in streaming\n                // impute\n                // in this case the forecast corresponds to j+1 .. j + horizon\n                // so we will add the j'th entry and then measure error against j+1 ...\n                // j+horizon values\n\n                long newStamp = 1000L * j + 10 * dropPRG.nextInt(10) - 5;\n                forest.process(dataWithKeys.data[j], newStamp);\n                TimedRangeVector extrapolate = forest.extrapolate(horizon, true, 1.0);\n                RangeVector forecast = extrapolate.rangeVector;\n                assert (forecast.values.length == horizon);\n                assert (extrapolate.timeStamps.length == horizon);\n\n                RangeVector alternative = forest.extrapolate(horizon, true, 1.0).rangeVector;\n\n                // repeated invocations of extrapolate should return same result\n                // for the same values of correction,centrality\n                assertArrayEquals(forecast.values, alternative.values, 1e-6f);\n                assertArrayEquals(forecast.lower, alternative.lower, 1e-6f);\n                assertArrayEquals(forecast.upper, alternative.upper, 1e-6f);\n\n                for (int i = 0; i < horizon; i++) {\n                    // check ranges\n                    assert (forecast.values[i] >= forecast.lower[i]);\n                    assert (forecast.values[i] <= forecast.upper[i]);\n                    assertEquals(extrapolate.timeStamps[i], 0);\n                    assertEquals(extrapolate.upperTimeStamps[i], 0);\n                    assertEquals(extrapolate.lowerTimeStamps[i], 0);\n                    // compute errors\n                    // NOTE the +1 since we are predicting the unseen values in the data\n                    if (j > outputAfter + shingleSize - 1 && j + i + 1 < dataWithKeys.data.length) {\n                        double t = dataWithKeys.data[j + i + 1][0] - forecast.values[i];\n                        error[i] += t * t;\n                        t = dataWithKeys.data[j + i + 1][0] - forecast.lower[i];\n                        lowerError[i] += t * t;\n                        t = dataWithKeys.data[j + i + 1][0] - forecast.upper[i];\n                        upperError[i] += t * t;\n                    }\n                }\n            }\n        }\n        System.out.println(\"Impute with \" + dropped + \" dropped values from \" + dataWithKeys.data.length + \" values\");\n        System.out.println(forest.getTransformMethod().name() + \" RMSE (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = error[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Lower (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = lowerError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n        System.out.println(\"RMSE Upper (as horizon increases)\");\n        for (int i = 0; i < horizon; i++) {\n            double t = upperError[i] / (dataWithKeys.data.length - shingleSize + 1 - outputAfter - i - dropped);\n            System.out.print(Math.sqrt(t) + \" \");\n        }\n        System.out.println();\n\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/IgnoreTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.time.LocalDateTime;\nimport java.time.temporal.ChronoUnit;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.Set;\nimport java.util.TreeSet;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\n\npublic class IgnoreTest {\n    @Test\n    public void testAnomalies() {\n        // Initialize the forest parameters\n        int shingleSize = 8;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int baseDimensions = 1;\n\n        long count = 0;\n        int dimensions = baseDimensions * shingleSize;\n\n        // Build the ThresholdedRandomCutForest\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STREAMING_IMPUTE)\n                .transformMethod(TransformMethod.NORMALIZE).autoAdjust(true)\n                .ignoreNearExpectedFromAboveByRatio(new double[] { 0.1 })\n                .ignoreNearExpectedFromBelowByRatio(new double[] { 0.1 }).build();\n\n        // Generate the list of doubles\n        List<Double> randomDoubles = generateUniformRandomDoubles();\n\n        // List to store detected anomaly indices\n        List<Integer> anomalies = new ArrayList<>();\n\n        // Process each data point through the forest\n        for (double val : randomDoubles) {\n            double[] point = new double[] { val };\n            long newStamp = 100 * count;\n\n            AnomalyDescriptor result = forest.process(point, newStamp);\n\n            if (result.getAnomalyGrade() != 0) {\n                anomalies.add((int) count);\n            }\n            ++count;\n        }\n\n        // Expected anomalies\n        List<Integer> expectedAnomalies = Arrays.asList(273, 283, 505, 1323);\n\n        System.out.println(\"Anomalies detected at indices: \" + anomalies);\n\n        // Verify that all expected anomalies are detected\n        assertTrue(anomalies.containsAll(expectedAnomalies),\n                \"Anomalies detected do not contain all expected anomalies\");\n    }\n\n    public static List<Double> generateUniformRandomDoubles() {\n        // Set fixed times for reproducibility\n        LocalDateTime startTime = LocalDateTime.of(2020, 1, 1, 0, 0, 0);\n        LocalDateTime endTime = LocalDateTime.of(2020, 1, 2, 0, 0, 0);\n        long totalIntervals = ChronoUnit.MINUTES.between(startTime, endTime);\n\n        // Generate timestamps (not used but kept for completeness)\n        List<LocalDateTime> timestamps = new ArrayList<>();\n        for (int i = 0; i < totalIntervals; i++) {\n            timestamps.add(startTime.plusMinutes(i));\n        }\n\n        // Initialize variables\n        Random random = new Random(0); // For reproducibility\n        double level = 0;\n        List<Double> logCounts = new ArrayList<>();\n\n        // Decide random change points where level will change\n        int numChanges = random.nextInt(6) + 5; // Random number between 5 and 10 inclusive\n\n        Set<Integer> changeIndicesSet = new TreeSet<>();\n        changeIndicesSet.add(0); // Ensure the first index is included\n\n        while (changeIndicesSet.size() < numChanges) {\n            int idx = random.nextInt((int) totalIntervals - 1) + 1; // Random index between 1 and totalIntervals -1\n            changeIndicesSet.add(idx);\n        }\n\n        List<Integer> changeIndices = new ArrayList<>(changeIndicesSet);\n\n        // Generate levels at each change point\n        List<Double> levels = new ArrayList<>();\n        for (int i = 0; i < changeIndices.size(); i++) {\n            if (i == 0) {\n                level = random.nextDouble() * 10; // Starting level between 0 and 10\n            } else {\n                double increment = -2 + random.nextDouble() * 7; // Random increment between -2 and 5\n                level = Math.max(0, level + increment);\n            }\n            levels.add(level);\n        }\n\n        // Now generate logCounts for each timestamp with even smoother transitions\n        int currentLevelIndex = 0;\n        for (int idx = 0; idx < totalIntervals; idx++) {\n            if (currentLevelIndex + 1 < changeIndices.size() && idx >= changeIndices.get(currentLevelIndex + 1)) {\n                currentLevelIndex++;\n            }\n            level = levels.get(currentLevelIndex);\n            double sineWave = Math.sin((idx % 300) * (Math.PI / 150)) * 0.05 * level;\n            double noise = (-0.01 * level) + random.nextDouble() * (0.02 * level); // Noise between -0.01*level and\n                                                                                   // 0.01*level\n            double count = Math.max(0, level + sineWave + noise);\n            logCounts.add(count);\n        }\n\n        // Introduce controlled changes for anomaly detection testing\n        for (int changeIdx : changeIndices) {\n            if (changeIdx + 10 < totalIntervals) {\n                logCounts.set(changeIdx + 5, logCounts.get(changeIdx + 5) * 1.05); // 5% increase\n                logCounts.set(changeIdx + 10, logCounts.get(changeIdx + 10) * 1.10); // 10% increase\n            }\n        }\n\n        // Output the generated logCounts\n        System.out.println(\"Generated logCounts of size: \" + logCounts.size());\n        return logCounts;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/MissingValueTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.extension.ExtensionContext;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.ArgumentsProvider;\nimport org.junit.jupiter.params.provider.ArgumentsSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\n\npublic class MissingValueTest {\n    private static class EnumAndValueProvider implements ArgumentsProvider {\n        @Override\n        public Stream<? extends Arguments> provideArguments(ExtensionContext context) {\n            return Stream.of(ImputationMethod.PREVIOUS, ImputationMethod.ZERO, ImputationMethod.FIXED_VALUES)\n                    .flatMap(method -> Stream.of(4, 8, 16) // Example shingle sizes\n                            .map(shingleSize -> Arguments.of(method, shingleSize)));\n        }\n    }\n\n    @ParameterizedTest\n    @ArgumentsSource(EnumAndValueProvider.class)\n    public void testConfidence(ImputationMethod method, int shingleSize) {\n        // Create and populate a random cut forest\n\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int baseDimensions = 1;\n\n        long count = 0;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest.Builder forestBuilder = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).imputationMethod(method)\n                .forestMode(ForestMode.STREAMING_IMPUTE).transformMethod(TransformMethod.NORMALIZE).autoAdjust(true);\n\n        if (method == ImputationMethod.FIXED_VALUES) {\n            // we cannot pass fillValues when the method is not fixed values. Otherwise, we\n            // will impute\n            // filled in values irregardless of imputation method\n            forestBuilder.fillValues(new double[] { 3 });\n        }\n\n        ThresholdedRandomCutForest forest = forestBuilder.build();\n\n        // Define the size and range\n        int size = 400;\n        double min = 200.0;\n        double max = 240.0;\n\n        // Generate the list of doubles\n        List<Double> randomDoubles = generateUniformRandomDoubles(size, min, max);\n\n        double lastConfidence = 0;\n        for (double val : randomDoubles) {\n            double[] point = new double[] { val };\n            long newStamp = 100 * count;\n            if (count >= 300 && count < 325) {\n                // drop observations\n                AnomalyDescriptor result = forest.process(new double[] { Double.NaN }, newStamp,\n                        generateIntArray(point.length));\n                if (count > 300) {\n                    // confidence start decreasing after 1 missing point\n                    assertTrue(result.getDataConfidence() < lastConfidence, \"count \" + count);\n                }\n                lastConfidence = result.getDataConfidence();\n                float[] rcfPoint = result.getRCFPoint();\n                double scale = result.getScale()[0];\n                double shift = result.getShift()[0];\n                double[] actual = new double[] { (rcfPoint[shingleSize - 1] * scale) + shift };\n                if (method == ImputationMethod.ZERO) {\n                    assertEquals(0, actual[0], 0.001d);\n                    if (count == 300) {\n                        assertTrue(result.getAnomalyGrade() > 0);\n                    }\n                } else if (method == ImputationMethod.FIXED_VALUES) {\n                    assertEquals(3.0d, actual[0], 0.001d);\n                    if (count == 300) {\n                        assertTrue(result.getAnomalyGrade() > 0);\n                    }\n                } else if (method == ImputationMethod.PREVIOUS) {\n                    assertEquals(0, result.getAnomalyGrade(), 0.001d,\n                            \"count: \" + count + \" actual: \" + Arrays.toString(actual));\n                }\n            } else {\n                AnomalyDescriptor result = forest.process(point, newStamp);\n                // after 325, we have a period of confidence decreasing. After that, confidence\n                // starts increasing again.\n                // We are not sure where the confidence will start increasing after decreasing.\n                // So we start check the behavior after 325 + shingleSize.\n                int backupPoint = 325 + shingleSize;\n                if ((count > 100 && count < 300) || count >= backupPoint) {\n                    // The first 65+ observations gives 0 confidence.\n                    // Confidence start increasing after 1 observed point\n                    assertTrue(result.getDataConfidence() > lastConfidence,\n                            String.format(Locale.ROOT, \"count: %d, confidence: %f, last confidence: %f\", count,\n                                    result.getDataConfidence(), lastConfidence));\n                } else if (count < 325 && count > 300) {\n                    assertTrue(result.getDataConfidence() < lastConfidence,\n                            String.format(Locale.ROOT, \"count: %d, confidence: %f, last confidence: %f\", count,\n                                    result.getDataConfidence(), lastConfidence));\n                }\n                lastConfidence = result.getDataConfidence();\n            }\n            ++count;\n        }\n    }\n\n    public static int[] generateIntArray(int size) {\n        int[] intArray = new int[size];\n        for (int i = 0; i < size; i++) {\n            intArray[i] = i;\n        }\n        return intArray;\n    }\n\n    public static List<Double> generateUniformRandomDoubles(int size, double min, double max) {\n        List<Double> randomDoubles = new ArrayList<>(size);\n        Random random = new Random(0);\n\n        for (int i = 0; i < size; i++) {\n            double randomValue = min + (max - min) * random.nextDouble();\n            randomDoubles.add(randomValue);\n        }\n\n        return randomDoubles;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/PredictorCorrectorTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;\nimport static com.amazon.randomcutforest.parkservices.PredictorCorrector.DEFAULT_SAMPLING_SUPPORT;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.state.predictorcorrector.PredictorCorrectorMapper;\nimport com.amazon.randomcutforest.parkservices.threshold.BasicThresholder;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\npublic class PredictorCorrectorTest {\n\n    @Test\n    void AttributorTest() {\n        int sampleSize = 256;\n        int baseDimensions = 10;\n        int shingleSize = 10;\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(0L)\n                .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01).transformMethod(NORMALIZE)\n                .build();\n        DiVector test = new DiVector(baseDimensions * shingleSize);\n        assert (forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null) == null);\n        assertThrows(IllegalArgumentException.class, () -> forest.predictorCorrector.setNumberOfAttributors(-1));\n        forest.predictorCorrector.setNumberOfAttributors(baseDimensions);\n        assertThrows(NullPointerException.class,\n                () -> forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null));\n        double[] array = new double[20];\n        Arrays.fill(array, 1.0);\n        DiVector testTwo = new DiVector(array, array);\n        assertThrows(NullPointerException.class,\n                () -> forest.predictorCorrector.getExpectedPoint(test, 0, baseDimensions, null, null));\n    }\n\n    @Test\n    void configTest() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 10;\n        int dimensions = baseDimensions * shingleSize;\n        double[] testOne = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testTwo = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testThree = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testFour = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(0L)\n                .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                .scoringStrategy(ScoringStrategy.DISTANCE).transformMethod(NORMALIZE).randomSeed(1110).autoAdjust(true)\n                .ignoreNearExpectedFromAbove(testOne).ignoreNearExpectedFromBelow(testTwo)\n                .ignoreNearExpectedFromAboveByRatio(testThree).ignoreNearExpectedFromBelowByRatio(testFour).build();\n        PredictorCorrector predictorCorrector = forest.getPredictorCorrector();\n        assertEquals(predictorCorrector.getSamplingSupport(), DEFAULT_SAMPLING_SUPPORT);\n        assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setSamplingSupport(-1.0));\n        assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setSamplingSupport(2.0));\n        assertDoesNotThrow(() -> predictorCorrector.setSamplingSupport(1.5 * DEFAULT_SAMPLING_SUPPORT));\n        double[] test = new double[1];\n        assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setIgnoreNearExpected(test));\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpected(null));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, testOne, 1e-10);\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpectedFromAbove(null));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, testOne, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, testTwo, 1e-10);\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpectedFromBelow(null));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, testTwo, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpectedFromAboveByRatio(null));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpectedFromBelowByRatio(null));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);\n        assertNotNull(predictorCorrector.getDeviations());\n        assertEquals(predictorCorrector.lastStrategy, ScoringStrategy.DISTANCE);\n        assertThrows(IllegalArgumentException.class,\n                () -> predictorCorrector.getCachedAttribution(1, null, new DiVector[2], null));\n\n        PredictorCorrectorMapper mapper = new PredictorCorrectorMapper();\n        PredictorCorrector copy = mapper.toModel(mapper.toState(predictorCorrector));\n        assertArrayEquals(copy.ignoreNearExpectedFromAbove, testOne, 1e-10);\n        assertArrayEquals(copy.ignoreNearExpectedFromBelow, testTwo, 1e-10);\n        assertArrayEquals(copy.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);\n        assertArrayEquals(copy.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);\n        assertNotNull(copy.getDeviations());\n        assertEquals(copy.lastStrategy, ScoringStrategy.DISTANCE);\n        copy.deviationsActual = new Deviation[1]; // changing the state\n        IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> copy.getDeviations());\n        assertEquals(\"incorrect state\", exception.getMessage());\n        copy.deviationsExpected = new Deviation[1];\n        exception = assertThrows(IllegalArgumentException.class, () -> copy.getDeviations());\n        assertEquals(\"length should be base dimension\", exception.getMessage());\n\n        double[] another = new double[4 * baseDimensions];\n        assertDoesNotThrow(() -> predictorCorrector.setIgnoreNearExpected(another));\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, new double[2]);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, new double[2]);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, new double[2]);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, new double[2]);\n        another[0] = -1;\n        assertThrows(IllegalArgumentException.class, () -> predictorCorrector.setIgnoreNearExpected(another));\n        forest.setIgnoreNearExpectedFromAbove(testOne);\n        forest.setIgnoreNearExpectedFromBelow(testTwo);\n        forest.setIgnoreNearExpectedFromAboveByRatio(testThree);\n        forest.setIgnoreNearExpectedFromBelowByRatio(testFour);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAbove, testOne, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelow, testTwo, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromAboveByRatio, testThree, 1e-10);\n        assertArrayEquals(predictorCorrector.ignoreNearExpectedFromBelowByRatio, testFour, 1e-10);\n\n        Random testRandom = new Random(1110L);\n        assertEquals(predictorCorrector.getRandomSeed(), 1110L);\n        double nextDouble = predictorCorrector.nextDouble();\n        assertEquals(predictorCorrector.getRandomSeed(), testRandom.nextLong());\n        assertEquals(nextDouble, testRandom.nextDouble(), 1e-10);\n\n    }\n\n    @Test\n    public void mapperTest() {\n        assertThrows(IllegalArgumentException.class, () -> new PredictorCorrector(new BasicThresholder[0], null, 1, 0));\n        assertThrows(NullPointerException.class, () -> new PredictorCorrector(new BasicThresholder[1], null, 1, 0));\n        assertThrows(IllegalArgumentException.class,\n                () -> new PredictorCorrector(new BasicThresholder[] { new BasicThresholder(0) }, new Deviation[1], 1,\n                        0));\n    }\n\n    @Test\n    public void expectedValueTest() {\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(20).randomSeed(0L)\n                .forestMode(ForestMode.STANDARD).shingleSize(1).anomalyRate(0.01)\n                .scoringStrategy(ScoringStrategy.DISTANCE).transformMethod(NORMALIZE).build();\n        PredictorCorrector predictorCorrector = forest.getPredictorCorrector();\n        double[] vector = new double[20];\n        Arrays.fill(vector, 1.0);\n        DiVector diVec = new DiVector(vector, vector);\n        assertNull(predictorCorrector.getExpectedPoint(diVec, 0, 20, null, null));\n        assertTrue(predictorCorrector.trigger(diVec, 1, 20, null, null, 1.0));\n        assertTrue(predictorCorrector.trigger(diVec, 21, 20, null, null, 1.0));\n        assertTrue(predictorCorrector.trigger(diVec, 21, 20, diVec, null, 1.0));\n        assertEquals(1, predictorCorrector.centeredTransformPass(new AnomalyDescriptor(null, 0), toFloatArray(vector)));\n        Arrays.fill(vector, 0);\n        assertEquals(0, predictorCorrector.centeredTransformPass(new AnomalyDescriptor(null, 0), toFloatArray(vector)));\n    }\n\n    @Test\n    public void runLengthTest() {\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().dimensions(4).randomSeed(0L)\n                .forestMode(ForestMode.STANDARD).shingleSize(4).anomalyRate(0.01).autoAdjust(false)\n                .scoringStrategy(ScoringStrategy.MULTI_MODE).transformMethod(NORMALIZE).build();\n        for (int i = 0; i < 100; i++) {\n            forest.process(new double[] { 10 }, 0);\n        }\n        for (int i = 0; i < 100; i++) {\n            forest.process(new double[] { 20 }, 0);\n        }\n        double[] scores = forest.getPredictorCorrector().getLastScore();\n        forest.predictorCorrector.setLastScore(null);\n        assertArrayEquals(forest.predictorCorrector.getLastScore(), scores, 1e-10);\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/RCFCasterTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.state.RCFCasterMapper;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class RCFCasterTest {\n\n    @Test\n    public void constructorTest() {\n        RCFCaster.Builder builder = new RCFCaster.Builder();\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.forecastHorizon(-1);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.forecastHorizon(2).shingleSize(0);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.shingleSize(1).dimensions(1).scoreDifferencing(0);\n        assertDoesNotThrow(builder::build);\n        // unlikely to succeed; independent random number generator\n        assertNotEquals(builder.getRandom().nextInt(), builder.getRandom().nextInt());\n        builder.randomSeed(10);\n        assertEquals(builder.getRandom().nextInt(), builder.getRandom().nextInt());\n        builder.internalShinglingEnabled(false);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.internalShinglingEnabled(true);\n        assertDoesNotThrow(builder::build);\n        builder.forestMode(ForestMode.STREAMING_IMPUTE);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.forestMode(ForestMode.TIME_AUGMENTED);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.forestMode(ForestMode.STANDARD);\n        builder.upperLimit(new float[] {});\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.upperLimit(new float[] { 1.0f });\n        builder.lowerLimit(new float[] {});\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.lowerLimit(new float[] { 2.0f });\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.lowerLimit(new float[] { 0.0f });\n        builder.parallelExecutionEnabled(true).threadPoolSize(2).zFactor(2.0);\n        assertDoesNotThrow(builder::build);\n        builder.startNormalization(-1);\n        assertThrows(IllegalArgumentException.class, builder::build);\n        builder.startNormalization(200).outputAfter(1);\n        assertThrows(IllegalArgumentException.class, builder::build);\n    }\n\n    @Test\n    public void configTest() {\n        RCFCaster.Builder builder = new RCFCaster.Builder().dimensions(1).shingleSize(1).forecastHorizon(1);\n        RCFCaster caster = builder.build();\n        caster.setLowerLimit(null);\n        caster.setUpperLimit(null);\n        assertThrows(IllegalArgumentException.class, () -> caster.setUpperLimit(new float[] { 0, 0 }));\n        assertThrows(IllegalArgumentException.class, () -> caster.setLowerLimit(new float[] { 0, 0 }));\n        assertDoesNotThrow(() -> caster.setUpperLimit(new float[] { 0 }));\n        assertThrows(IllegalArgumentException.class, () -> caster.setLowerLimit(new float[] { 1 }));\n        assertThrows(IllegalArgumentException.class,\n                () -> caster.processSequentially(new double[][] { new double[0] }));\n        assertThrows(IllegalArgumentException.class, () -> caster.process(new double[1], 0L, new int[1]));\n    }\n\n    @ParameterizedTest\n    @EnumSource(Calibration.class)\n    void testRCFCast(Calibration calibration) {\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 2 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n        int forecastHorizon = 5; // speeding up\n        int shingleSize = 10;\n        int outputAfter = 32;\n        int errorHorizon = 256;\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                50, 5, seed, baseDimensions, false);\n\n        // force introduce anomalies\n        double[][] data = new double[dataSize + shingleSize][];\n        for (int i = 0; i < dataWithKeys.data.length; i++) {\n            data[i] = dataWithKeys.data[i];\n        }\n        for (int i = dataSize; i < dataSize + shingleSize; i++) {\n            // all zero\n            data[i] = new double[baseDimensions];\n        }\n\n        int dimensions = baseDimensions * shingleSize;\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n        RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).anomalyRate(0.01)\n                .forestMode(ForestMode.STANDARD).transformMethod(transformMethod).outputAfter(outputAfter)\n                .forecastHorizon(forecastHorizon).centerOfMassEnabled(true).storeSequenceIndexesEnabled(true) // neither\n                                                                                                              // is\n                                                                                                              // relevant\n                .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125).build();\n        RCFCaster shadow = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).anomalyRate(0.01)\n                .forestMode(ForestMode.STANDARD).transformMethod(transformMethod).outputAfter(outputAfter)\n                .forecastHorizon(forecastHorizon).calibration(calibration).errorHorizon(errorHorizon)\n                .initialAcceptFraction(0.125).boundingBoxCacheFraction(0).build();\n        RCFCaster secondShadow = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1)\n                .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)\n                .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125)\n                .boundingBoxCacheFraction(0).build();\n        RCFCaster thirdShadow = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1)\n                .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)\n                .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125)\n                .boundingBoxCacheFraction(1.0).build();\n\n        // testing scoring strategies\n        caster.setScoringStrategy(ScoringStrategy.MULTI_MODE);\n        shadow.setScoringStrategy(ScoringStrategy.MULTI_MODE);\n        secondShadow.setScoringStrategy(ScoringStrategy.MULTI_MODE);\n        thirdShadow.setScoringStrategy(ScoringStrategy.MULTI_MODE);\n        // ensuring/testing that the parameters are the same; otherwise the\n        // grades/scores cannot be the same\n        caster.setLowerThreshold(1.1);\n        shadow.setLowerThreshold(1.1);\n        secondShadow.setLowerThreshold(1.1);\n        thirdShadow.setLowerThreshold(1.1);\n        caster.setInitialThreshold(2.0);\n        shadow.setInitialThreshold(2.0);\n        secondShadow.setInitialThreshold(2.0);\n        thirdShadow.setInitialThreshold(2.0);\n        caster.setScoreDifferencing(0.4);\n        shadow.setScoreDifferencing(0.4);\n        secondShadow.setScoreDifferencing(0.4);\n        thirdShadow.setScoreDifferencing(0.4);\n\n        assert (caster.errorHandler.getErrorHorizon() == errorHorizon);\n        assert (caster.errorHorizon == errorHorizon);\n\n        for (int j = 0; j < data.length; j++) {\n            ForecastDescriptor result = caster.process(data[j], 0L);\n            ForecastDescriptor shadowResult = shadow.process(data[j], 0L);\n            assertEquals(result.getRCFScore(), shadowResult.getRCFScore(), 1e-6);\n\n            int sequenceIndex = caster.errorHandler.getSequenceIndex();\n            assertEquals(shadow.errorHandler.getSequenceIndex(), sequenceIndex);\n            if (caster.forest.isOutputReady()) {\n                float[] meanArray = caster.errorHandler.getErrorMean();\n                float[] intervalPrecision = shadow.errorHandler.getIntervalPrecision();\n                for (float y : intervalPrecision) {\n                    assertTrue(0 <= y && y <= 1.0);\n                }\n                assertArrayEquals(intervalPrecision, result.getIntervalPrecision(), 1e-6f);\n            }\n        }\n\n        // 0 length arrays do not change state\n        secondShadow.processSequentially(new double[0][]);\n        List<AnomalyDescriptor> firstList = secondShadow.processSequentially(data, x -> true);\n        List<AnomalyDescriptor> thirdList = thirdShadow.processSequentially(data, x -> true);\n        assertEquals(firstList.size(), data.length);\n        assertEquals(thirdList.size(), data.length);\n        assertEquals(firstList.get(data.length - 1).getInternalTimeStamp(),\n                thirdList.get(data.length - 1).getInternalTimeStamp());\n        // null does not change state\n        thirdShadow.processSequentially(null);\n\n        if (calibration != Calibration.NONE) {\n            // calibration fails\n            assertThrows(IllegalArgumentException.class, () -> caster.extrapolate(forecastHorizon - 1));\n            assertThrows(IllegalArgumentException.class, () -> caster.extrapolate(forecastHorizon + 1));\n        }\n\n        TimedRangeVector forecast1 = caster.extrapolate(forecastHorizon);\n        TimedRangeVector forecast2 = shadow.extrapolate(forecastHorizon);\n\n        TimedRangeVector forecast3 = secondShadow.extrapolate(forecastHorizon);\n        TimedRangeVector forecast4 = thirdShadow.extrapolate(forecastHorizon);\n        assertArrayEquals(forecast1.rangeVector.values, forecast2.rangeVector.values, 1e-6f);\n\n        assertArrayEquals(forecast3.rangeVector.values, forecast4.rangeVector.values, 1e-6f);\n        // the order of floating point operations now vary\n\n        for (int i = 0; i < forecast1.rangeVector.values.length; i++) {\n            assertTrue(Math.abs(forecast1.rangeVector.values[i] - forecast3.rangeVector.values[i]) < 1e-4\n                    * (1 + Math.abs(forecast1.rangeVector.values[i])));\n        }\n\n    }\n\n    @ParameterizedTest\n    @EnumSource(Calibration.class)\n    void testRCFCastThresholdedRCF(Calibration calibration) {\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n        int forecastHorizon = 15;\n        int shingleSize = 10;\n        int outputAfter = 32;\n        int errorHorizon = 256;\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                50, 5, seed, baseDimensions, false);\n\n        int dimensions = baseDimensions * shingleSize;\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n\n        RCFCaster caster = RCFCaster.builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)\n                .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)\n                .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125).build();\n        ThresholdedRandomCutForest shadow = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(seed + 1).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .transformMethod(transformMethod).outputAfter(outputAfter).initialAcceptFraction(0.125).build();\n\n        // ensuring that the parameters are the same; otherwise the grades/scores cannot\n        // be the same\n        // weighTime has to be 0\n        caster.setLowerThreshold(1.1);\n        shadow.setLowerThreshold(1.1);\n\n        assertTrue(caster.errorHandler.getErrorHorizon() == errorHorizon);\n        assertTrue(caster.errorHorizon == errorHorizon);\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            ForecastDescriptor result = caster.process(dataWithKeys.data[j], 0L);\n            AnomalyDescriptor shadowResult = shadow.process(dataWithKeys.data[j], 0L);\n            assertEquals(result.getRCFScore(), shadowResult.getRCFScore(), 1e-6f);\n\n            TimedRangeVector timedShadowForecast = shadow.extrapolate(forecastHorizon);\n\n            assertArrayEquals(timedShadowForecast.timeStamps, result.getTimedForecast().timeStamps);\n            assertArrayEquals(timedShadowForecast.upperTimeStamps, result.getTimedForecast().upperTimeStamps);\n            assertArrayEquals(timedShadowForecast.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);\n\n            // first check idempotence -- forecasts are state dependent only\n            // for ThresholdedRCF\n            TimedRangeVector newShadow = shadow.extrapolate(forecastHorizon);\n            assertArrayEquals(newShadow.rangeVector.values, timedShadowForecast.rangeVector.values, 1e-6f);\n            assertArrayEquals(newShadow.rangeVector.upper, timedShadowForecast.rangeVector.upper, 1e-6f);\n            assertArrayEquals(newShadow.rangeVector.lower, timedShadowForecast.rangeVector.lower, 1e-6f);\n            assertArrayEquals(newShadow.timeStamps, timedShadowForecast.timeStamps);\n            assertArrayEquals(newShadow.upperTimeStamps, timedShadowForecast.upperTimeStamps);\n            assertArrayEquals(newShadow.lowerTimeStamps, timedShadowForecast.lowerTimeStamps);\n\n            // extrapolate is idempotent for RCF casters\n            TimedRangeVector newVector = caster.extrapolate(forecastHorizon);\n            assertArrayEquals(newVector.rangeVector.values, result.getTimedForecast().rangeVector.values, 1e-6f);\n            assertArrayEquals(newVector.rangeVector.upper, result.getTimedForecast().rangeVector.upper, 1e-6f);\n            assertArrayEquals(newVector.rangeVector.lower, result.getTimedForecast().rangeVector.lower, 1e-6f);\n            assertArrayEquals(newVector.timeStamps, result.getTimedForecast().timeStamps);\n            assertArrayEquals(newVector.upperTimeStamps, result.getTimedForecast().upperTimeStamps);\n            assertArrayEquals(newVector.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);\n\n            // only difference between RCFCaster and ThresholdedRCF is calibration\n            caster.calibrate(dataWithKeys.data[j], calibration, timedShadowForecast.rangeVector);\n            assertArrayEquals(timedShadowForecast.rangeVector.values, result.getTimedForecast().rangeVector.values,\n                    1e-6f);\n            assertArrayEquals(timedShadowForecast.rangeVector.upper, result.getTimedForecast().rangeVector.upper,\n                    1e-6f);\n            assertArrayEquals(timedShadowForecast.rangeVector.lower, result.getTimedForecast().rangeVector.lower,\n                    1e-6f);\n\n        }\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    void testRCFCallibration(boolean useRCF) {\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n        int forecastHorizon = 15;\n        int shingleSize = 10;\n        int outputAfter = 32;\n        int errorHorizon = 256;\n\n        long seed = new Random().nextLong();\n\n        System.out.println(\"seed = \" + seed);\n        // change the last argument seed for a different run\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(dataSize + shingleSize - 1, 50,\n                50, 5, seed, baseDimensions, false);\n\n        int dimensions = baseDimensions * shingleSize;\n        TransformMethod transformMethod = TransformMethod.NORMALIZE;\n\n        RCFCaster caster = RCFCaster.builder().dimensions(dimensions).randomSeed(seed + 1).numberOfTrees(numberOfTrees)\n                .shingleSize(shingleSize).sampleSize(sampleSize).internalShinglingEnabled(true).anomalyRate(0.01)\n                .forestMode(ForestMode.STANDARD).transformMethod(transformMethod).outputAfter(outputAfter)\n                .forecastHorizon(forecastHorizon).useRCFCallibration(useRCF).calibration(Calibration.SIMPLE)\n                .errorHorizon(errorHorizon).initialAcceptFraction(0.125).build();\n        ThresholdedRandomCutForest shadow = ThresholdedRandomCutForest.builder().dimensions(dimensions)\n                .randomSeed(seed + 1).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .internalShinglingEnabled(true).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                .transformMethod(transformMethod).outputAfter(outputAfter).initialAcceptFraction(0.125).build();\n\n        // ensuring that the parameters are the same; otherwise the grades/scores cannot\n        // be the same\n        // weighTime has to be 0\n        caster.setLowerThreshold(1.1);\n        shadow.setLowerThreshold(1.1);\n\n        RCFCasterMapper mapper = new RCFCasterMapper();\n        RCFCaster secondShadow = mapper.toModel(mapper.toState(caster));\n\n        assertTrue(secondShadow.errorHandler.getErrorHorizon() == errorHorizon);\n        assertTrue(secondShadow.errorHorizon == errorHorizon);\n\n        for (int j = 0; j < dataWithKeys.data.length; j++) {\n            ForecastDescriptor result = caster.process(dataWithKeys.data[j], 0L);\n            AnomalyDescriptor shadowResult = shadow.process(dataWithKeys.data[j], 0L);\n            assertEquals(result.getRCFScore(), shadowResult.getRCFScore(), 1e-6f);\n\n            TimedRangeVector timedShadowForecast = shadow.extrapolate(forecastHorizon);\n\n            assertArrayEquals(timedShadowForecast.timeStamps, result.getTimedForecast().timeStamps);\n            assertArrayEquals(timedShadowForecast.upperTimeStamps, result.getTimedForecast().upperTimeStamps);\n            assertArrayEquals(timedShadowForecast.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);\n\n            // first check idempotence -- forecasts are state dependent only\n            // for ThresholdedRCF\n            TimedRangeVector newShadow = shadow.extrapolate(forecastHorizon);\n            assertArrayEquals(newShadow.rangeVector.values, timedShadowForecast.rangeVector.values, 1e-6f);\n            assertArrayEquals(newShadow.rangeVector.upper, timedShadowForecast.rangeVector.upper, 1e-6f);\n            assertArrayEquals(newShadow.rangeVector.lower, timedShadowForecast.rangeVector.lower, 1e-6f);\n            assertArrayEquals(newShadow.timeStamps, timedShadowForecast.timeStamps);\n            assertArrayEquals(newShadow.upperTimeStamps, timedShadowForecast.upperTimeStamps);\n            assertArrayEquals(newShadow.lowerTimeStamps, timedShadowForecast.lowerTimeStamps);\n\n            // extrapolate is idempotent for RCF casters\n            TimedRangeVector newVector = caster.extrapolate(forecastHorizon);\n            assertArrayEquals(newVector.rangeVector.values, result.getTimedForecast().rangeVector.values, 1e-6f);\n            assertArrayEquals(newVector.rangeVector.upper, result.getTimedForecast().rangeVector.upper, 1e-6f);\n            assertArrayEquals(newVector.rangeVector.lower, result.getTimedForecast().rangeVector.lower, 1e-6f);\n            assertArrayEquals(newVector.timeStamps, result.getTimedForecast().timeStamps);\n            assertArrayEquals(newVector.upperTimeStamps, result.getTimedForecast().upperTimeStamps);\n            assertArrayEquals(newVector.lowerTimeStamps, result.getTimedForecast().lowerTimeStamps);\n\n            // only difference between RCFCaster and ThresholdedRCF is calibration\n            caster.calibrate(dataWithKeys.data[j], Calibration.SIMPLE, timedShadowForecast.rangeVector);\n            assertArrayEquals(timedShadowForecast.rangeVector.values, result.getTimedForecast().rangeVector.values,\n                    1e-6f);\n            assertArrayEquals(timedShadowForecast.rangeVector.upper, result.getTimedForecast().rangeVector.upper,\n                    1e-6f);\n            assertArrayEquals(timedShadowForecast.rangeVector.lower, result.getTimedForecast().rangeVector.lower,\n                    1e-6f);\n\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/SequentialAnalysisTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.RandomCutForest.DEFAULT_SAMPLE_SIZE;\nimport static java.lang.Math.min;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.parkservices.returntypes.AnalysisDescriptor;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class SequentialAnalysisTest {\n\n    protected SequentialAnalysis test = new SequentialAnalysis();\n\n    @Test\n    public void basicTest() {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        int numberOfTrees = 30 + rng.nextInt(20);\n        int outputAfter = 1 + rng.nextInt(50);\n        int shingleSize = 1 + rng.nextInt(15);\n        int baseDimensions = 1 + rng.nextInt(5);\n        int dimensions = baseDimensions * shingleSize;\n        long forestSeed = rng.nextLong();\n        double timeDecay = 0.1 / sampleSize;\n        double transformDecay = 1.0 / sampleSize;\n        double fraction = 1.0 * outputAfter / sampleSize;\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter).timeDecay(timeDecay)\n                .transformDecay(transformDecay).internalShinglingEnabled(true).initialAcceptFraction(fraction)\n                .shingleSize(shingleSize).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter).timeDecay(timeDecay)\n                .transformDecay(transformDecay).boundingBoxCacheFraction(0).internalShinglingEnabled(true)\n                .initialAcceptFraction(fraction).shingleSize(shingleSize).build();\n\n        assertEquals(first.processSequentially(null), second.processSequentially(null));\n        assertEquals(first.processSequentially(new double[0][]), second.processSequentially(new double[0][]));\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                rng.nextLong(), baseDimensions);\n        List<AnomalyDescriptor> a = first.processSequentially(dataWithKeys.data);\n        List<AnomalyDescriptor> b = second.processSequentially(dataWithKeys.data);\n        assertEquals(a.size(), b.size());\n        assertDoesNotThrow(\n                () -> test.forecastWithAnomalies(dataWithKeys.data, 2, 256, 0.001, TransformMethod.NONE, 10, 10, 0));\n        dataWithKeys.data[0] = new double[0]; // changing length\n        assertThrows(IllegalArgumentException.class, () -> first.processSequentially(dataWithKeys.data));\n        assertThrows(IllegalArgumentException.class,\n                () -> test.detectAnomalies(null, 1, 256, 0.001, TransformMethod.NONE, 0L));\n        assertThrows(IllegalArgumentException.class,\n                () -> test.forecastWithAnomalies(null, 1, 256, 0.001, TransformMethod.NONE, 10, 10, 0));\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void AnomalyTest(TransformMethod method) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int numberOfTrees = 30 + rng.nextInt(20);\n            int outputAfter = 1 + rng.nextInt(50);\n            int shingleSize = 1 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            double timeDecay = 0.1 / sampleSize;\n            double transformDecay = 1.0 / sampleSize;\n            double fraction = 1.0 * outputAfter / sampleSize;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter)\n                    .transformMethod(method).timeDecay(timeDecay).transformDecay(transformDecay)\n                    .internalShinglingEnabled(true).initialAcceptFraction(fraction).shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,\n                    sampleSize, numberOfTrees, timeDecay, outputAfter, method, transformDecay, forestSeed);\n\n            int count = 0;\n            for (double[] point : dataWithKeys.data) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);\n                    assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());\n                    ++count;\n                }\n            }\n            assertTrue(count == result.size());\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void AnomalyTest2(TransformMethod method) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int outputAfter = sampleSize / 4;\n            int shingleSize = 1 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            double timeDecay = 0.1 / sampleSize;\n            double fraction = 1.0 * outputAfter / sampleSize;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).randomSeed(forestSeed).transformMethod(method).timeDecay(timeDecay)\n                    .internalShinglingEnabled(true).transformDecay(timeDecay).initialAcceptFraction(fraction)\n                    .shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,\n                    sampleSize, timeDecay, method, forestSeed);\n\n            int count = 0;\n            for (double[] point : dataWithKeys.data) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);\n                    assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());\n                    assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);\n                    ++count;\n                }\n            }\n            assertTrue(count == result.size());\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void AnomalyTest3(TransformMethod method) {\n        int sampleSize = DEFAULT_SAMPLE_SIZE;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1; // just once since testing exact equality\n        int length = 40 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int outputAfter = sampleSize / 4;\n            int shingleSize = 1 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            double timeDecay = 0.1 / sampleSize;\n            double transformDecay = (1.0 + rng.nextDouble()) / sampleSize;\n            double fraction = 1.0 * outputAfter / sampleSize;\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                    .dimensions(dimensions).randomSeed(forestSeed).transformMethod(method).timeDecay(timeDecay)\n                    .internalShinglingEnabled(true).transformDecay(transformDecay).initialAcceptFraction(fraction)\n                    .shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            List<AnomalyDescriptor> result = SequentialAnalysis.detectAnomalies(dataWithKeys.data, shingleSize,\n                    timeDecay, method, transformDecay, forestSeed);\n\n            int count = 0;\n            for (double[] point : dataWithKeys.data) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);\n                    assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());\n                    assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);\n                    ++count;\n                }\n            }\n            assertTrue(count == result.size());\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(Calibration.class)\n    public void ForecasterTest(Calibration calibration) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1; // just once since testing exact equality\n        int length = 4 * sampleSize;\n        for (int i = 0; i < numTrials; i++) {\n\n            int numberOfTrees = 50;\n            int outputAfter = 1 + rng.nextInt(50);\n            int shingleSize = 2 + rng.nextInt(15);\n            int forecastHorizon = min(4 * shingleSize, 10);\n            int errorHorizon = 100;\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            double timeDecay = 0.1 / sampleSize;\n            double transformDecay = 1.0 / sampleSize;\n            double fraction = 1.0 * outputAfter / sampleSize;\n            RCFCaster first = new RCFCaster.Builder().dimensions(dimensions).numberOfTrees(numberOfTrees)\n                    .randomSeed(forestSeed).outputAfter(outputAfter).transformMethod(TransformMethod.NORMALIZE)\n                    .timeDecay(timeDecay).transformDecay(transformDecay).internalShinglingEnabled(true)\n                    .forecastHorizon(forecastHorizon).errorHorizon(errorHorizon).calibration(calibration)\n                    .initialAcceptFraction(fraction).shingleSize(shingleSize).build();\n\n            MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 5,\n                    rng.nextLong(), baseDimensions);\n\n            AnalysisDescriptor descriptor = SequentialAnalysis.forecastWithAnomalies(dataWithKeys.data, shingleSize,\n                    sampleSize, timeDecay, outputAfter, TransformMethod.NORMALIZE, transformDecay, forecastHorizon,\n                    errorHorizon, 0.1, calibration, forestSeed);\n\n            List<AnomalyDescriptor> result = descriptor.getAnomalies();\n\n            int count = 0;\n            ForecastDescriptor last = null;\n            for (double[] point : dataWithKeys.data) {\n                ForecastDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    assertEquals(firstResult.getAnomalyGrade(), result.get(count).getAnomalyGrade(), 1e-3);\n                    assertEquals(firstResult.getInternalTimeStamp(), result.get(count).getInternalTimeStamp());\n                    assertEquals(firstResult.getRCFScore(), result.get(count).getRCFScore(), 1e-3);\n                    ++count;\n                }\n                last = firstResult;\n            }\n            assertTrue(count == result.size());\n            RangeVector sequential = descriptor.getForecastDescriptor().getTimedForecast().rangeVector;\n            RangeVector current = last.getTimedForecast().rangeVector;\n            assertArrayEquals(current.values, sequential.values, 1e-3f);\n            assertArrayEquals(current.upper, sequential.upper, 1e-3f);\n            assertArrayEquals(current.lower, sequential.lower, 1e-3f);\n            assertArrayEquals(descriptor.getForecastDescriptor().getIntervalPrecision(), last.getIntervalPrecision(),\n                    1e-3f);\n            assertArrayEquals(descriptor.getForecastDescriptor().getErrorMean(), last.getErrorMean(), 1e-3f);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/TestGlobalLocalAnomalyDetector.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toDoubleArray;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector.DEFAULT_GLAD_THRESHOLD;\nimport static com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector.DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE;\nimport static com.amazon.randomcutforest.parkservices.GlobalLocalAnomalyDetector.DEFAULT_MAX;\nimport static com.amazon.randomcutforest.parkservices.threshold.BasicThresholder.DEFAULT_Z_FACTOR;\nimport static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_NUMBER_OF_REPRESENTATIVES;\nimport static com.amazon.randomcutforest.summarization.GenericMultiCenter.DEFAULT_SHRINKAGE;\nimport static com.amazon.randomcutforest.testutils.ExampleDataSets.rotateClockWise;\nimport static java.lang.Math.PI;\nimport static java.lang.Math.cos;\nimport static java.lang.Math.min;\nimport static java.lang.Math.sin;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.io.BufferedWriter;\nimport java.io.FileWriter;\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Random;\nimport java.util.function.BiFunction;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.parkservices.config.ScoringStrategy;\nimport com.amazon.randomcutforest.parkservices.returntypes.GenericAnomalyDescriptor;\nimport com.amazon.randomcutforest.summarization.ICluster;\nimport com.amazon.randomcutforest.summarization.Summarizer;\nimport com.amazon.randomcutforest.testutils.NormalMixtureTestData;\nimport com.amazon.randomcutforest.util.Weighted;\n\npublic class TestGlobalLocalAnomalyDetector {\n\n    @Test\n    void testConstructor() {\n        int reservoirSize = 2000;\n        int stringSize = 70;\n        BiFunction<char[], char[], Double> dist = (a, b) -> toyD(a, b, stringSize / 2.0);\n        GlobalLocalAnomalyDetector<char[]> reservoir = new GlobalLocalAnomalyDetector<>(\n                GlobalLocalAnomalyDetector.builder().randomSeed(42).numberOfRepresentatives(5)\n                        .timeDecay(1.0 / reservoirSize).capacity(reservoirSize),\n                dist);\n        assertEquals(reservoir.getObjectList().size(), 0);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setMaxAllowed(200));\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setMaxAllowed(2));\n        assertEquals(reservoir.getMaxAllowed(), DEFAULT_MAX);\n        reservoir.setMaxAllowed(DEFAULT_MAX + 1);\n        assertEquals(reservoir.getMaxAllowed(), DEFAULT_MAX + 1);\n        assertEquals(reservoir.getIgnoreBelow(), DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setIgnoreBelow(-1.0));\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setIgnoreBelow(0.2));\n        reservoir.setIgnoreBelow(2 * DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE);\n        assertEquals(reservoir.getIgnoreBelow(), 2 * DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE);\n        assertEquals(reservoir.getZfactor(), DEFAULT_Z_FACTOR);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setZfactor(1.0));\n        reservoir.setZfactor(0.95 * DEFAULT_Z_FACTOR);\n        assertEquals(reservoir.getZfactor(), 0.95 * DEFAULT_Z_FACTOR);\n        assertEquals(reservoir.getDoNotreclusterWithin(), reservoir.getCapacity() / 2);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setDoNotreclusterWithin(-100));\n        reservoir.setDoNotreclusterWithin(reservoir.getCapacity() / 2 + 1);\n        assertEquals(reservoir.getDoNotreclusterWithin(), reservoir.getCapacity() / 2 + 1);\n        assertEquals(reservoir.getLowerThreshold(), DEFAULT_GLAD_THRESHOLD);\n        assertEquals(reservoir.getShrinkage(), DEFAULT_SHRINKAGE);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setShrinkage(-1.0));\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setShrinkage(2.0));\n        reservoir.setShrinkage(DEFAULT_SHRINKAGE);\n        assertEquals(reservoir.getShrinkage(), DEFAULT_SHRINKAGE);\n        assertEquals(reservoir.getNumberOfRepresentatives(), DEFAULT_NUMBER_OF_REPRESENTATIVES);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setNumberOfRepresentatives(0));\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setNumberOfRepresentatives(200));\n        reservoir.setNumberOfRepresentatives(DEFAULT_NUMBER_OF_REPRESENTATIVES + 1);\n        assertEquals(reservoir.getNumberOfRepresentatives(), DEFAULT_NUMBER_OF_REPRESENTATIVES + 1);\n        assertThrows(IllegalArgumentException.class, () -> reservoir.setLowerThreshold(-1.0));\n        assertThrows(IllegalArgumentException.class, () -> reservoir.process(null, -1.0f, dist, true));\n\n        GlobalLocalAnomalyDetector<char[]> second = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(5).timeDecay(1.0 / reservoirSize).capacity(reservoirSize).maxAllowed(5)\n                .anomalyRate(0.01).ignoreBelow(0.01).doNotReclusterWithin(1).build();\n        second.process(null, 1.0f, dist, true);\n        second.process(null, 1.0f, dist, true);\n        second.process(null, 1.0f, dist, true);\n        // global function not set\n        assertThrows(IllegalArgumentException.class, () -> second.process(null, 1.0f, dist, true));\n    }\n\n    @Test\n    void testDynamicStringClustering() {\n        long seed = new Random().nextLong();\n        System.out.println(\"String summarization seed : \" + seed);\n        Random random = new Random(seed);\n        int stringSize = 70;\n        int numberOfStrings = 200000;\n        int reservoirSize = 2000;\n        boolean changeInMiddle = true;\n        // the following should be away from 0.5 in [0.5,1]\n        double gapProbOfA = 0.85;\n\n        double anomalyRate = 0.05;\n        char[][] points = new char[numberOfStrings][];\n        boolean[] injected = new boolean[numberOfStrings];\n        int numberOfInjected = 0;\n\n        for (int i = 0; i < numberOfStrings; i++) {\n            if (random.nextDouble() < anomalyRate && i > reservoirSize / 2) {\n                injected[i] = true;\n                ++numberOfInjected;\n                points[i] = getABArray(stringSize + 10, 0.5, random, false, 0);\n            } else {\n                boolean flag = changeInMiddle && random.nextDouble() < 0.25;\n                double prob = (random.nextDouble() < 0.5) ? gapProbOfA : (1 - gapProbOfA);\n                points[i] = getABArray(stringSize, prob, random, flag, 0.25 * i / numberOfStrings);\n            }\n        }\n\n        System.out.println(\"Injected \" + numberOfInjected + \" 'anomalies' in \" + points.length);\n\n        BiFunction<char[], char[], Double> dist = (a, b) -> toyD(a, b, stringSize / 2.0);\n        GlobalLocalAnomalyDetector<char[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(5).timeDecay(1.0 / reservoirSize).capacity(reservoirSize).build();\n        reservoir.setGlobalDistance(dist);\n        reservoir.setLowerThreshold(0.8);\n\n        int truePos = 0;\n        int falsePos = 0;\n        int falseNeg = 0;\n        for (int y = 0; y < points.length; y++) {\n\n            if (y % 200 == 100 && y > reservoirSize) {\n                char[] temp = points[y];\n                // check for malformed distance function, to the extent we can check efficiently\n                BiFunction<char[], char[], Double> badDistance = (a, b) -> -1.0;\n                assertThrows(IllegalArgumentException.class, () -> reservoir.process(temp, 1.0f, badDistance, true));\n                BiFunction<char[], char[], Double> superBadDistance = (a, b) -> Double.MAX_VALUE;\n                assertThrows(IllegalArgumentException.class,\n                        () -> reservoir.process(temp, 1.0f, superBadDistance, true));\n            }\n            GenericAnomalyDescriptor<char[]> result = reservoir.process(points[y], 1.0f, null, false);\n\n            if (result.getRepresentativeList() != null) {\n                double sum = 0;\n                for (Weighted<char[]> rep : result.getRepresentativeList()) {\n                    assert (rep.weight <= 1.0);\n                    sum += rep.weight;\n                }\n                // checking likelihood summing to 1\n                assertEquals(sum, 1.0, 1e-6);\n            }\n\n            if (result.getAnomalyGrade() > 0) {\n                if (!injected[y]) {\n                    ++falsePos;\n                } else {\n                    ++truePos;\n                }\n            } else if (injected[y]) {\n                ++falseNeg;\n            }\n\n            if (10 * y % points.length == 0 && y > 0) {\n                System.out.println(\" at \" + y);\n                System.out.println(\"Precision = \" + precision(truePos, falsePos));\n                System.out.println(\"Recall = \" + recall(truePos, falseNeg));\n            }\n        }\n        System.out.println(\" Final: \");\n        System.out.println(\"Precision = \" + precision(truePos, falsePos));\n        System.out.println(\"Recall = \" + recall(truePos, falseNeg));\n        assert (reservoir.getObjectList().size() > reservoirSize / 2);\n    }\n\n    public static double toyD(char[] a, char[] b, double u) {\n        if (a.length > b.length) {\n            return toyD(b, a, u);\n        }\n        double[][] dist = new double[2][b.length + 1];\n        for (int j = 0; j < b.length + 1; j++) {\n            dist[0][j] = j;\n        }\n\n        for (int i = 1; i < a.length + 1; i++) {\n            dist[1][0] = i;\n            for (int j = 1; j < b.length + 1; j++) {\n                double t = dist[0][j - 1] + ((a[i - 1] == b[j - 1]) ? 0 : 1);\n                dist[1][j] = min(min(t, dist[0][j] + 1), dist[1][j - 1] + 1);\n            }\n            for (int j = 0; j < b.length + 1; j++) {\n                dist[0][j] = dist[1][j];\n            }\n        }\n        return dist[1][b.length];\n    }\n\n    // colors\n    public static final String ANSI_RESET = \"\\u001B[0m\";\n    public static final String ANSI_RED = \"\\u001B[31m\";\n    public static final String ANSI_BLUE = \"\\u001B[34m\";\n\n    public char[] getABArray(int size, double probabilityOfA, Random random, Boolean changeInMiddle, double fraction) {\n\n        int newSize = size + random.nextInt(size / 5);\n        char[] a = new char[newSize];\n        for (int i = 0; i < newSize; i++) {\n            double toss = (changeInMiddle && (i > (1 - fraction) * newSize || i < newSize * fraction))\n                    ? (1 - probabilityOfA)\n                    : probabilityOfA;\n            if (random.nextDouble() < toss) {\n                a[i] = '-';\n            } else {\n                a[i] = '_';\n            }\n        }\n        return a;\n    }\n\n    public double[][] shiftedEllipse(int dataSize, int seed, double shift, int fans) {\n        NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0.0, 1.0);\n        double[][] data = generator.generateTestData(dataSize, 2, seed);\n        Random prg = new Random(0);\n        for (int i = 0; i < dataSize; i++) {\n            int nextFan = prg.nextInt(fans);\n            // scale\n            data[i][1] *= 1.0 / fans;\n            data[i][0] *= 2.0;\n            // shift\n            data[i][0] += shift + 1.0 / fans;\n            data[i] = rotateClockWise(data[i], 2 * PI * nextFan / fans);\n        }\n\n        return data;\n    }\n\n    @Test\n    void testDynamicNumericClustering() throws IOException {\n        long randomSeed = new Random().nextLong();\n        System.out.println(\"Seed \" + randomSeed);\n        // we would be sending dataSize * 360 vectors\n        int dataSize = 2000;\n        double range = 10.0;\n        int numberOfFans = 3;\n        // corresponds to number of clusters\n        double[][] data = shiftedEllipse(dataSize, 7, range / 2, numberOfFans);\n        int truePos = 0;\n        int falsePos = 0;\n        int falseNeg = 0;\n\n        int truePosRCF = 0;\n        int falsePosRCF = 0;\n        int falseNegRCF = 0;\n\n        int reservoirSize = dataSize;\n        double timedecay = 1.0 / reservoirSize;\n        GlobalLocalAnomalyDetector<float[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(3).timeDecay(timedecay).capacity(reservoirSize).build();\n        reservoir.setGlobalDistance(Summarizer::L2distance);\n\n        double zFactor = 6.0; // six sigma deviation; seems to work best\n        reservoir.setZfactor(zFactor);\n\n        ThresholdedRandomCutForest test = ThresholdedRandomCutForest.builder().dimensions(2).shingleSize(1)\n                .randomSeed(77).timeDecay(timedecay).scoringStrategy(ScoringStrategy.DISTANCE).build();\n        test.setZfactor(zFactor); // using the same apples to apples comparison\n\n        String name = \"clustering_example\";\n        BufferedWriter file = new BufferedWriter(new FileWriter(name));\n\n        Random noiseGen = new Random(randomSeed + 1);\n        for (int degree = 0; degree < 360; degree += 1) {\n            int index = 0;\n            while (index < data.length) {\n                boolean injected = false;\n                float[] vec;\n                if (noiseGen.nextDouble() < 0.005) {\n                    injected = true;\n                    double[] candAnomaly = new double[2];\n                    // generate points along x axis\n                    candAnomaly[0] = (range / 2 * noiseGen.nextDouble() + range / 2);\n                    candAnomaly[1] = 0.1 * (2.0 * noiseGen.nextDouble() - 1.0);\n                    int antiFan = noiseGen.nextInt(numberOfFans);\n                    // rotate to be 90-180 degrees away -- these are decidedly anomalous\n                    vec = toFloatArray(rotateClockWise(candAnomaly,\n                            -2 * PI * (degree + 180 * (1 + 2 * antiFan) / numberOfFans) / 360));\n                } else {\n                    vec = toFloatArray(rotateClockWise(data[index], -2 * PI * degree / 360));\n                    ++index;\n                }\n\n                GenericAnomalyDescriptor<float[]> result = reservoir.process(vec, 1.0f, null, true);\n\n                AnomalyDescriptor res = test.process(toDoubleArray(vec), 0L);\n                double grade = res.getAnomalyGrade();\n\n                if (result.getRepresentativeList() != null) {\n                    double sum = 0;\n                    for (Weighted<float[]> rep : result.getRepresentativeList()) {\n                        assert (rep.weight <= 1.0);\n                        sum += rep.weight;\n                    }\n                    // checking likelihood summing to 1\n                    assert (sum > 0.9);\n                }\n                if (injected) {\n                    if (result.getAnomalyGrade() > 0) {\n                        ++truePos;\n                    } else {\n                        ++falseNeg;\n                    }\n                    if (grade > 0) {\n                        ++truePosRCF;\n                        assert (res.getAttribution() != null);\n                        // even though scoring is different, we should see attribution add up to score\n                        assertEquals(res.getAttribution().getHighLowSum(), res.getRCFScore(), 1e-6);\n                    } else {\n                        ++falseNegRCF;\n                    }\n                } else {\n                    if (result.getAnomalyGrade() > 0) {\n                        ++falsePos;\n                    }\n                    if (grade > 0) {\n                        ++falsePosRCF;\n                        assert (res.getAttribution() != null);\n                        // even though scoring is different, we should see attribution add up to score\n                        assertEquals(res.getAttribution().getHighLowSum(), res.getRCFScore(), 1e-6);\n                    }\n                }\n            }\n\n            if (falsePos + truePos == 0) {\n                throw new IllegalStateException(\"\");\n            }\n\n            checkArgument(falseNeg + truePos == falseNegRCF + truePosRCF, \" incorrect accounting\");\n            System.out.println(\" at degree \" + degree + \" injected \" + (truePos + falseNeg));\n            System.out.print(\"Precision = \" + precision(truePos, falsePos));\n            System.out.println(\" Recall = \" + recall(truePos, falseNeg));\n            System.out.print(\"RCF Distance Mode Precision = \" + precision(truePosRCF, falsePosRCF));\n            System.out.println(\" RCF Distance Mode Recall = \" + recall(truePosRCF, falseNegRCF));\n\n        }\n        // attempting merge\n        long number = new Random().nextLong();\n        int size = reservoirSize;// - new Random().nextInt(100);\n        double newShrinkage = new Random().nextDouble();\n        int reps = new Random().nextInt(10) + 1; // cannot be 0\n        GlobalLocalAnomalyDetector.Builder builder = GlobalLocalAnomalyDetector.builder().capacity(size)\n                .shrinkage(newShrinkage).numberOfRepresentatives(reps).timeDecay(timedecay).randomSeed(number);\n        GlobalLocalAnomalyDetector<float[]> newDetector = new GlobalLocalAnomalyDetector<>(reservoir, reservoir,\n                builder, true, Summarizer::L1distance);\n        assertEquals(newDetector.getCapacity(), size);\n        List<ICluster<float[]>> clusters = newDetector.getClusters();\n        assertNotEquals(clusters, null);\n        double score = newDetector.score(clusters.get(0).getRepresentatives().get(0).index, null, true).get(0).weight;\n        assertEquals(0.0, score);\n        assertEquals(newDetector.numberOfRepresentatives, reps);\n        assertEquals(newDetector.shrinkage, newShrinkage);\n        GlobalLocalAnomalyDetector<float[]> another = new GlobalLocalAnomalyDetector<>(reservoir, reservoir, builder,\n                false, Summarizer::L2distance);\n        assertNull(another.getClusters());\n        file.close();\n    }\n\n    double precision(int truePos, int falsePos) {\n        return (truePos + falsePos > 0) ? 1.0 * truePos / (truePos + falsePos) : 1.0;\n    }\n\n    double recall(int truePos, int falseNeg) {\n        return (truePos + falseNeg > 0) ? 1.0 * truePos / (truePos + falseNeg) : 1.0;\n    }\n\n    @Test\n    public void testOcclusion() {\n        GlobalLocalAnomalyDetector<float[]> reservoir = GlobalLocalAnomalyDetector.builder().randomSeed(42)\n                .numberOfRepresentatives(3).initialAcceptFraction(1.0).timeDecay(0).capacity(100).maxAllowed(20)\n                .build();\n        reservoir.setGlobalDistance(Summarizer::L2distance);\n        for (int i = 0; i < 10; i++) {\n            reservoir.process(new float[] { 1.0f, 0 }, 1.0f, null, false);\n            reservoir.process(new float[] { (float) cos(2 * PI / 6), (float) sin(2 * PI / 6) }, 1.0f, null, false);\n            reservoir.process(new float[] { (float) cos(2 * 2 * PI / 6), (float) sin(2 * 2 * PI / 6) }, 1.0f, null,\n                    false);\n            reservoir.process(new float[] { (float) cos(3 * 2 * PI / 6), (float) sin(3 * 2 * PI / 6) }, 1.0f, null,\n                    false);\n            reservoir.process(new float[] { (float) cos(4 * 2 * PI / 6), (float) sin(4 * 2 * PI / 6) }, 1.0f, null,\n                    false);\n            reservoir.process(new float[] { (float) cos(5 * 2 * PI / 6), (float) sin(5 * 2 * PI / 6) }, 1.0f, null,\n                    false);\n        }\n        assertTrue(reservoir.getClusters().size() == 6);\n        assert (reservoir.score(new float[] { 1.5f, 0 }, Summarizer::L2distance, true).size() < 6);\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/ThresholdedRandomCutForestTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static com.amazon.randomcutforest.config.ImputationMethod.FIXED_VALUES;\nimport static com.amazon.randomcutforest.config.ImputationMethod.LINEAR;\nimport static com.amazon.randomcutforest.config.ImputationMethod.NEXT;\nimport static com.amazon.randomcutforest.config.ImputationMethod.PREVIOUS;\nimport static com.amazon.randomcutforest.config.ImputationMethod.RCF;\nimport static com.amazon.randomcutforest.config.ImputationMethod.ZERO;\nimport static com.amazon.randomcutforest.config.TransformMethod.DIFFERENCE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE;\nimport static com.amazon.randomcutforest.config.TransformMethod.NORMALIZE_DIFFERENCE;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.List;\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.EnumSource;\nimport org.junit.jupiter.params.provider.MethodSource;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper;\nimport com.amazon.randomcutforest.preprocessor.Preprocessor;\n\npublic class ThresholdedRandomCutForestTest {\n\n    @Test\n    public void testConfigAugmentOne() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        assertThrows(IllegalArgumentException.class,\n                () -> ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize).dimensions(dimensions)\n                        .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)\n                        .internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        // have to enable internal shingling or keep it unspecified\n        assertDoesNotThrow(\n                () -> ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize).dimensions(dimensions)\n                        .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED)\n                        .internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        // imputefraction not allowed\n        assertThrows(IllegalArgumentException.class,\n                () -> new ThresholdedRandomCutForest.Builder<>().compact(true).sampleSize(sampleSize)\n                        .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                        .forestMode(ForestMode.TIME_AUGMENTED).useImputedFraction(0.5).internalShinglingEnabled(true)\n                        .shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).sampleSize(sampleSize)\n                .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                .forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .build();\n        assertNotNull(((Preprocessor) forest.getPreprocessor()).getInitialTimeStamps());\n    }\n\n    @Test\n    public void testConfigAugmentTwo() {\n        int baseDimensions = 2;\n        int shingleSize = 1; // passes due to this\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest.Builder b = new ThresholdedRandomCutForest.Builder().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(false)\n                .shingleSize(shingleSize).anomalyRate(0.01);\n        ThresholdedRandomCutForest f = b.build();\n        assertEquals(f.getForest().getDimensions(), dimensions + 1);\n\n        assertThrows(IllegalArgumentException.class, () -> f.process(new double[1], 0L, new int[] { -1 }));\n        assertThrows(IllegalArgumentException.class, () -> f.process(new double[1], 0L, new int[] { 1 }));\n        assertThrows(IllegalArgumentException.class, () -> f.process(new double[1], 0L, new int[2]));\n        assertThrows(IllegalArgumentException.class, () -> f.extrapolate(10));\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions).randomSeed(seed)\n                        .weights(new double[] { -1 }).forestMode(ForestMode.TIME_AUGMENTED)\n                        .internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions).randomSeed(seed)\n                        .transformMethod(NORMALIZE).forestMode(ForestMode.TIME_AUGMENTED)\n                        .internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        ThresholdedRandomCutForest forest = new ThresholdedRandomCutForest.Builder<>().compact(true)\n                .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                .forestMode(ForestMode.TIME_AUGMENTED).shingleSize(shingleSize).anomalyRate(0.01).build();\n        assertTrue(forest.getForest().isInternalShinglingEnabled()); // default on\n\n    }\n\n    @Test\n    public void testConfigImpute() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        // have to enable internal shingling or keep it unfixed\n        assertThrows(IllegalArgumentException.class,\n                () -> new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                        .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)\n                        .internalShinglingEnabled(false).shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        assertDoesNotThrow(() -> new ThresholdedRandomCutForest.Builder<>().compact(true).dimensions(dimensions)\n                .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)\n                .shingleSize(shingleSize).anomalyRate(0.01).build());\n\n        assertThrows(IllegalArgumentException.class,\n                () -> new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions).randomSeed(seed)\n                        .forestMode(ForestMode.STREAMING_IMPUTE).outputAfter(1).startNormalization(1)\n                        .shingleSize(shingleSize).anomalyRate(0.01).build());\n    }\n\n    @Test\n    public void testConfigStandard() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        // have to enable internal shingling or keep it unfixed\n        assertThrows(IllegalArgumentException.class,\n                () -> ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                        .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STANDARD)\n                        .useImputedFraction(0.5).internalShinglingEnabled(false).shingleSize(shingleSize)\n                        .anomalyRate(0.01).build());\n\n        assertDoesNotThrow(() -> {\n            ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions).precision(Precision.FLOAT_32)\n                    .randomSeed(seed).forestMode(ForestMode.STANDARD).internalShinglingEnabled(false)\n                    .shingleSize(shingleSize).anomalyRate(0.01).build();\n        });\n\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                    .transformMethod(NORMALIZE).startNormalization(111).stopNormalization(100).build();\n        });\n        // change if baseDimension != 2\n        double[] testOne = new double[] { 0 };\n        double[] testTwo = new double[] { 0, -1 };\n        double[] testThree = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testFour = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testFive = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        double[] testSix = new double[] { new Random().nextDouble(), new Random().nextDouble() };\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                    .transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testOne).build();\n        });\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                    .transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testTwo).build();\n        });\n        assertDoesNotThrow(() -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                    .transformMethod(NORMALIZE).ignoreNearExpectedFromAbove(testThree)\n                    .ignoreNearExpectedFromBelow(testFour).ignoreNearExpectedFromAboveByRatio(testFive)\n                    .ignoreNearExpectedFromBelowByRatio(testSix).build();\n            double[] array = forest.getPredictorCorrector().getIgnoreNearExpected();\n            assert (array.length == 4 * baseDimensions);\n            assert (array[0] == testThree[0]);\n            assert (array[1] == testThree[1]);\n            assert (array[2] == testFour[0]);\n            assert (array[3] == testFour[1]);\n            assert (array[4] == testFive[0]);\n            assert (array[5] == testFive[1]);\n            assert (array[6] == testSix[0]);\n            assert (array[7] == testSix[1]);\n            double random = new Random().nextDouble();\n            assertThrows(IllegalArgumentException.class, () -> forest.predictorCorrector.setSamplingRate(-1));\n            assertThrows(IllegalArgumentException.class, () -> forest.predictorCorrector.setSamplingRate(2));\n            assertDoesNotThrow(() -> forest.predictorCorrector.setSamplingRate(random));\n            assertEquals(forest.predictorCorrector.getSamplingRate(), random, 1e-10);\n            long newSeed = forest.predictorCorrector.getRandomSeed();\n            assertEquals(seed, newSeed);\n            assertFalse(forest.predictorCorrector.autoAdjust);\n            assertNull(forest.predictorCorrector.getDeviations());\n        });\n        assertDoesNotThrow(() -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STANDARD).shingleSize(shingleSize).anomalyRate(0.01)\n                    .transformMethod(NORMALIZE).autoAdjust(true).build();\n            assertTrue(forest.predictorCorrector.autoAdjust);\n            assert (forest.predictorCorrector.getDeviations().length == 2 * baseDimensions);\n        });\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STANDARD).shingleSize(shingleSize)\n                .anomalyRate(0.01).transformMethod(NORMALIZE).startNormalization(111).stopNormalization(111).build();\n\n        assertTrue(forest.getForest().isInternalShinglingEnabled()); // left to false\n        assertEquals(((Preprocessor) forest.getPreprocessor()).getInitialValues().length, 111);\n        assertEquals(((Preprocessor) forest.getPreprocessor()).getInitialTimeStamps().length, 111);\n        assertEquals(((Preprocessor) forest.getPreprocessor()).getStopNormalization(), 111);\n        assertEquals(((Preprocessor) forest.getPreprocessor()).getStartNormalization(), 111);\n    }\n\n    @Test\n    void testImputeConfig() {\n        int baseDimensions = 1;\n        int shingleSize = 2;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        // not providing values\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)\n                    .normalizeTime(true).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                    .build();\n        });\n\n        // incorrect number of values to fill\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)\n                    .fillValues(new double[] { 0.0, 17.0 }).normalizeTime(true).internalShinglingEnabled(true)\n                    .shingleSize(shingleSize).anomalyRate(0.01).build();\n        });\n\n        assertDoesNotThrow(() -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(ImputationMethod.FIXED_VALUES)\n                    .fillValues(new double[] { 2.0 }).internalShinglingEnabled(true).shingleSize(shingleSize)\n                    .anomalyRate(0.01).build();\n        });\n    }\n\n    @ParameterizedTest\n    @EnumSource(ImputationMethod.class)\n    void testImpute(ImputationMethod method) {\n        int baseDimensions = 1;\n        int shingleSize = 1;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        // shingle size 1 ie not useful for impute\n        assertThrows(IllegalArgumentException.class, () -> {\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).precision(Precision.FLOAT_32).randomSeed(seed)\n                    .forestMode(ForestMode.STREAMING_IMPUTE).imputationMethod(method).normalizeTime(true)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01).build();\n        });\n\n        int newShingleSize = 4;\n        int newDimensions = baseDimensions * newShingleSize;\n\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(newDimensions)\n                .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)\n                .imputationMethod(method).internalShinglingEnabled(true).shingleSize(newShingleSize).anomalyRate(0.01)\n                .useImputedFraction(0.76).fillValues(new double[] { 0 }).build();\n\n        double[] fixedData = new double[] { 1.0 };\n        double[] newData = new double[] { 10.0 };\n        Random random = new Random(0);\n        int count = 0;\n        for (int i = 0; i < 200 + new Random().nextInt(100); i++) {\n            forest.process(fixedData, (long) count * 113 + random.nextInt(10));\n            ++count;\n        }\n\n        AnomalyDescriptor result = forest.process(newData, (long) count * 113 + 1000);\n        assert (result.getAnomalyGrade() > 0);\n        assert (result.isExpectedValuesPresent());\n        if (method != NEXT && method != ZERO && method != FIXED_VALUES) {\n            assert (result.getRelativeIndex() == 0);\n            assertArrayEquals(result.getExpectedValuesList()[0], fixedData, 1e-6);\n        }\n        // the gap is 1000 + 113 which is about 9 times 113\n        // but only the first three entries are allowed in with shinglesize 4,\n        // after which the imputation is 100% and\n        // only at most 76% imputed tuples are allowed in the forest\n        // an additional one arise from the actual input\n        assertEquals(forest.getForest().getTotalUpdates(), count + 9 + 1);\n        // triggerring consecutive anomalies (no differencing)\n        if (method == PREVIOUS && method == RCF) {\n            assertEquals(forest.process(newData, (long) count * 113 + 1113).getAnomalyGrade(), 1.0);\n        }\n        assert (forest.process(new double[] { 20 }, (long) count * 113 + 1226).getAnomalyGrade() > 0);\n\n        long stamp = (long) count * 113 + 1226;\n        // time has to increase\n        assertThrows(IllegalArgumentException.class, () -> {\n            forest.process(new double[] { 20 }, stamp);\n        });\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"args\")\n    void testImpute(TransformMethod transformMethod, ImputationMethod method) {\n        int baseDimensions = 1;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .precision(Precision.FLOAT_32).randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE)\n                .imputationMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .useImputedFraction(0.76).fillValues(new double[] { 1.0 }).transformMethod(transformMethod).build();\n\n        double[] fixedData = new double[] { 1.0 };\n        double[] newData = new double[] { 10.0 };\n        Random random = new Random();\n        int count = 0;\n        for (int i = 0; i < 2000 + new Random().nextInt(100); i++) {\n            forest.process(fixedData, (long) count * 113 + random.nextInt(10));\n            ++count;\n        }\n\n        // note every will have an update\n        assertEquals(forest.getForest().getTotalUpdates(), count);\n        AnomalyDescriptor result = forest.process(newData, (long) count * 113 + 1000);\n        if (method != NEXT && method != LINEAR) {\n            assert (result.getAnomalyGrade() > 0);\n            assert (result.isExpectedValuesPresent());\n        }\n        // the other impute methods generate too much noise\n        if (method == RCF || method == PREVIOUS) {\n            assert (Math.abs(result.getExpectedValuesList()[0][0] - fixedData[0]) < 0.05);\n        }\n\n        // the gap is 1000 + 113 which is about 9 times 113\n        // but only the first three entries are allowed in with shinglesize 4,\n        // after which the imputation is 100% and\n        // only at most 76% imputed tuples are allowed in the forest\n        // an additional one does not arise from the actual input because all the\n        // initial\n        // entries are imputed and the method involves differencing\n        if (transformMethod != DIFFERENCE && transformMethod != NORMALIZE_DIFFERENCE) {\n            assertEquals(forest.getForest().getTotalUpdates(), count + 9 + 1);\n        } else {\n            assertEquals(forest.getForest().getTotalUpdates(), count + 9 + 1);\n        }\n    }\n\n    static Stream<Arguments> args() {\n        return transformMethodStream().flatMap(\n                classParameter -> imputationMethod().map(testParameter -> Arguments.of(classParameter, testParameter)));\n    }\n\n    static Stream<ImputationMethod> imputationMethod() {\n        return Stream.of(ImputationMethod.values());\n    }\n\n    static Stream<TransformMethod> transformMethodStream() {\n        return Stream.of(TransformMethod.values());\n    }\n\n    @Test\n    void testMapper() {\n        double[] initialData = new double[] { 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0,\n                23.0, 23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0,\n                20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,\n                17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0,\n                15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 18.0, 18.0, 18.0,\n                18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,\n                21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 23.0, 23.0, 23.0,\n                23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0, 23.0, 23.0, 23.0, 23.0,\n                23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,\n                21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,\n                19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0,\n                17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0,\n                16.0, 16.0, 16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,\n                15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0,\n                13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 15.0,\n                16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0,\n                20.0, 20.0, 21.0, 21.0, 21.0, 22.0, 22.0, 22.0, 22.0, 23.0, 23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0,\n                25.0, 25.0, 25.0, 26.0, 26.0, 26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 28.0, 28.0, 28.0, 28.0, 29.0, 29.0,\n                29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 29.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0,\n                28.0, 28.0, 28.0, 28.0, 28.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,\n                27.0, 27.0, 27.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0, 25.0,\n                25.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,\n                27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0,\n                28.0, 28.0, 28.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 25.0,\n                25.0, 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0, 23.0, 23.0, 23.0, 23.0, 22.0,\n                22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,\n                20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,\n                18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0,\n                18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,\n                19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,\n                19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,\n                20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 21.0, 21.0,\n                21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,\n                22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0, 22.0,\n                22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 21.0,\n                21.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0,\n                20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 20.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,\n                19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0,\n                17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 15.0,\n                15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 13.0,\n                13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 13.0, 13.0, 13.0,\n                14.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 18.0,\n                18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 20.0, 20.0, 20.0, 20.0, 20.0, 21.0, 21.0, 21.0, 22.0, 22.0,\n                22.0, 22.0, 22.0, 23.0, 23.0, 23.0, 24.0, 24.0, 24.0, 24.0, 24.0, 25.0, 25.0, 25.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0,\n                27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 27.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0,\n                26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 26.0, 25.0, 25.0, 25.0, 25.0, 24.0, 24.0, 24.0, 24.0, 24.0, 23.0,\n                23.0, 23.0, 23.0, 23.0, 22.0, 22.0, 22.0, 22.0, 21.0, 21.0, 21.0, 21.0, 21.0, 20.0, 20.0, 20.0, 20.0,\n                20.0, 19.0, 19.0, 19.0, 19.0, 18.0, 18.0, 18.0, 18.0, 18.0, 17.0, 17.0, 17.0, 17.0, 16.0, 16.0, 16.0,\n                16.0, 16.0, 15.0, 15.0, 15.0, 15.0, 15.0, 14.0, 14.0, 14.0, 14.0, 13.0, 13.0, 13.0, 13.0, 13.0, 13.0,\n                13.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 14.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0, 15.0,\n                15.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 16.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0, 17.0,\n                17.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 18.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0, 19.0,\n                19.0, 20.0, 20.0, 20.0, 20.0, 20.0 };\n\n        double[] data = new double[] { 13.0, 20.0, 26.0, 18.0 };\n\n        int shingleSize = 8;\n        int numberOfTrees = 30;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n\n        int baseDimensions = 1;\n        long seed = -3095522926185205814L;\n\n        int dimensions = baseDimensions * shingleSize;\n        ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true).dimensions(dimensions)\n                .randomSeed(seed).numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n                .precision(precision).parallelExecutionEnabled(false).outputAfter(32).internalShinglingEnabled(true)\n                .anomalyRate(0.005).initialAcceptFraction(0.125).timeDecay(0.0001).boundingBoxCacheFraction(0)\n                .forestMode(ForestMode.STANDARD).build();\n\n        double scoreSum = 0;\n\n        for (double dataPoint : initialData) {\n            AnomalyDescriptor result = forest.process(new double[] { dataPoint }, 0L);\n            scoreSum += result.getRCFScore();\n        }\n\n        // checking average score < 1\n        assert (scoreSum < initialData.length);\n\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest second = mapper.toModel(mapper.toState(forest));\n\n        for (double dataPoint : data) {\n            AnomalyDescriptor result = second.process(new double[] { dataPoint }, 0L);\n            // average score jumps due to discontinuity, checking > 1\n            assert (result.getRCFScore() > 1.0);\n        }\n    }\n\n    @ParameterizedTest\n    @ValueSource(ints = { 1, 2, 3, 4, 5, 6 })\n    void smallGap(int gap) {\n        int shingleSize = 4;\n        int numberOfTrees = 50;\n        int sampleSize = 256;\n        Precision precision = Precision.FLOAT_32;\n        int dataSize = 4 * sampleSize;\n\n        // change this to try different number of attributes,\n        // this parameter is not expected to be larger than 5 for this example\n        int baseDimensions = 1;\n        // 10 trials each\n        int numTrials = 10;\n\n        int correct = 0;\n        for (int z = 0; z < numTrials; z++) {\n            int dimensions = baseDimensions * shingleSize;\n            TransformMethod transformMethod = TransformMethod.NORMALIZE;\n            ThresholdedRandomCutForest forest = ThresholdedRandomCutForest.builder().compact(true)\n                    .dimensions(dimensions).randomSeed(0).numberOfTrees(numberOfTrees).shingleSize(shingleSize)\n                    .sampleSize(sampleSize).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n                    .transformMethod(transformMethod).build();\n\n            long seed = new Random().nextLong();\n            System.out.println(\"seed = \" + seed);\n            Random rng = new Random(seed);\n            for (int i = 0; i < dataSize; i++) {\n                double[] point = new double[] { 0.6 + 0.2 * (2 * rng.nextDouble() - 1) };\n                AnomalyDescriptor result = forest.process(point, 0L);\n            }\n            AnomalyDescriptor result = forest.process(new double[] { 11.2 }, 0L);\n            for (int y = 0; y < gap; y++) {\n                forest.process(new double[] { 0.6 + 0.2 * (2 * rng.nextDouble() - 1) }, 0L);\n            }\n            assert (forest.extrapolate(1, true, 1.0).rangeVector.values[0] < 1.0);\n            assert (forest.extrapolate(1, true, 1.0).rangeVector.values[0] < 1.0);\n\n            result = forest.process(new double[] { 10.0 }, 0L);\n            if (result.getAnomalyGrade() > 0) {\n                ++correct;\n            }\n        }\n        assert (correct > 0.9 * numTrials);\n    }\n\n    @Test\n    public void testAutoAdjustDoesNotSetAbsoluteThreshold() {\n        int dimensions = 4; // any small positive value is fine\n        long seed = new Random().nextLong();\n\n        // ── autoAdjust = false → setAbsoluteThreshold *is* invoked\n        ThresholdedRandomCutForest manual = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .shingleSize(1).autoAdjust(false).build();\n\n        // ── autoAdjust = true → setAbsoluteThreshold is *skipped*\n        ThresholdedRandomCutForest auto = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .shingleSize(1).autoAdjust(true).build();\n\n        boolean autoThresholdOff = !manual.getPredictorCorrector().getThresholders()[0].isAutoThreshold();\n\n        boolean autoThresholdOn = auto.getPredictorCorrector().getThresholders()[0].isAutoThreshold();\n\n        // the manual lower thresholder should turn off auto-adjusting\n        assertTrue(autoThresholdOff);\n\n        // … whereas the auto-adjusting build should *not* turn off auto-adjusting\n        assertTrue(autoThresholdOn);\n    }\n\n    @Test\n    void psqPrecondition_nullTimestamps() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        double[][] data = { { 1.0 } };\n        long[] stamps = null;\n\n        assertThrows(IllegalArgumentException.class, () -> f.processSequentially(data, stamps, d -> true));\n    }\n\n    @Test\n    void psqPrecondition_lengthMismatch() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        double[][] data = { { 1.0 }, { 2.0 } };\n        long[] stamps = { 0L }; // shorter than data.length\n\n        assertThrows(IllegalArgumentException.class, () -> f.processSequentially(data, stamps, d -> true));\n    }\n\n    @Test\n    void psqPrecondition_notAscending() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        double[][] data = { { 1.0 }, { 2.0 } };\n        long[] stamps = { 1L, 0L }; // 2nd stamp ≤ 1st\n\n        assertThrows(IllegalArgumentException.class, () -> f.processSequentially(data, stamps, d -> true));\n    }\n\n    @Test\n    void psqLoop_nonUniformLengths() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        // second row has the wrong length\n        double[][] data = { { 1.0 }, { 2.0, 3.0 } };\n        long[] stamps = { 0L, 1L };\n\n        assertThrows(IllegalArgumentException.class, () -> f.processSequentially(data, stamps, d -> true));\n    }\n\n    /**\n     * Filter rejects everything → returned list must be empty even after warm-up.\n     */\n    @Test\n    void psqValidButFilterRejectsAll() {\n        int warmup = 450; // > 400 to guarantee outputReady\n        int total = warmup + 3;\n\n        double[][] data = new double[total][1];\n        long[] stamps = new long[total];\n\n        // 450 normal points (value 1.0) + 3 more normal points\n        for (int i = 0; i < total; i++) {\n            data[i][0] = 1.0;\n            stamps[i] = i; // strictly ascending\n        }\n\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        // filter ALWAYS returns false\n        List<AnomalyDescriptor> out = f.processSequentially(data, stamps, d -> false);\n\n        assertTrue(out.isEmpty(), \"filter rejects all so list must be empty\");\n    }\n\n    @Test\n    void psqCacheToggle_path() {\n        int warmup = 500;\n        int total = warmup + 3;\n\n        double[][] data = new double[total][1];\n        long[] stamps = new long[total];\n\n        // 500 benign points (≈1.0), then a spike (20.0), then two benign points\n        for (int i = 0; i < warmup; i++) {\n            data[i][0] = 1.0 + 0.05 * ((i % 5) - 2); // tiny noise\n            stamps[i] = i;\n        }\n        data[warmup][0] = 20.0; // clear outlier\n        data[warmup + 1][0] = 1.0;\n        data[warmup + 2][0] = 1.0;\n        stamps[warmup] = warmup;\n        stamps[warmup + 1] = warmup + 1;\n        stamps[warmup + 2] = warmup + 2;\n\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1)\n                .boundingBoxCacheFraction(0) // ensures cacheDisabled == true\n                .build();\n\n        assertEquals(0.0, f.getForest().getBoundingBoxCacheFraction(), 1e-10);\n\n        List<AnomalyDescriptor> out = f.processSequentially(data, stamps, d -> d.getAnomalyGrade() > 0);\n\n        /*\n         * Expectations ───────────── 1. The spike should raise at least one descriptor\n         * with grade>0. 2. The finally-block must restore the cache fraction to 0.\n         */\n        assertFalse(out.isEmpty(), \"spike should be detected as anomaly\");\n        assertEquals(0.0, f.getForest().getBoundingBoxCacheFraction(), 1e-10);\n    }\n\n    @Test\n    void psqEmptyDataReturnsEmpty() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(1).shingleSize(1).build();\n\n        List<AnomalyDescriptor> out1 = f.processSequentially(new double[0][0], new long[0], d -> true);\n        assertTrue(out1.isEmpty());\n\n        List<AnomalyDescriptor> out2 = f.processSequentially(null, null, d -> true);\n        assertTrue(out2.isEmpty());\n    }\n\n    @Test\n    void testProcessSequentiallyWithMissingValues() {\n        ThresholdedRandomCutForest f = ThresholdedRandomCutForest.builder().dimensions(2).shingleSize(1).build();\n\n        double[][] data = { { 1.0, Double.NaN }, { 3.0, 4.0 }, { Double.NaN, 5.0 }, { Double.NaN, Double.NaN } };\n        long[] stamps = { 10L, 20L, 30L, 40L };\n\n        List<AnomalyDescriptor> descriptors = f.processSequentially(data, stamps, d -> true);\n\n        assertEquals(4, descriptors.size());\n\n        AnomalyDescriptor first = descriptors.get(0);\n        assertEquals(10L, first.getInputTimestamp());\n        assertArrayEquals(new double[] { 1.0, Double.NaN }, first.getCurrentInput());\n        assertNotNull(first.getMissingValues());\n        assertArrayEquals(new int[] { 1 }, first.getMissingValues());\n\n        AnomalyDescriptor second = descriptors.get(1);\n        assertEquals(20L, second.getInputTimestamp());\n        assertArrayEquals(new double[] { 3.0, 4.0 }, second.getCurrentInput());\n        assertNull(second.getMissingValues());\n\n        AnomalyDescriptor third = descriptors.get(2);\n        assertEquals(30L, third.getInputTimestamp());\n        assertArrayEquals(new double[] { Double.NaN, 5.0 }, third.getCurrentInput());\n        assertNotNull(third.getMissingValues());\n        assertArrayEquals(new int[] { 0 }, third.getMissingValues());\n\n        AnomalyDescriptor fourth = descriptors.get(3);\n        assertEquals(40L, fourth.getInputTimestamp());\n        assertArrayEquals(new double[] { Double.NaN, Double.NaN }, fourth.getCurrentInput());\n        assertNotNull(fourth.getMissingValues());\n        assertArrayEquals(new int[] { 0, 1 }, fourth.getMissingValues());\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/TransformTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\n\npublic class TransformTest {\n\n    @ParameterizedTest\n    @EnumSource(TransformMethod.class)\n    public void AnomalyTest(TransformMethod method) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 10;\n        int length = 40 * sampleSize;\n        int totalcount = 0;\n        for (int i = 0; i < numTrials; i++) {\n            int numberOfTrees = 30 + rng.nextInt(20);\n            int outputAfter = 32 + rng.nextInt(50);\n            // shingleSize 1 is not recommended for complicated input\n            // The test sets alertOnce(true) to suppress cascades after a single injected spike.\n            // Current suppression only triggers for overlapping shingles: gap < shingleSize\n            // (in PredictorCorrector.detect), so “late” follow-on spikes (gap ≥ shingleSize)\n            // still raise grades.\n            // If shingleSize is small (e.g., 2), gap is small, anomaly would not be suppressed,\n            // may cause totalcount > numTrials.\n            int shingleSize = 3 + rng.nextInt(15);\n            int baseDimensions = 1 + rng.nextInt(5);\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            System.out.println(\" forestSeed \" + forestSeed + \" method \" + method + \" seed \" + seed + \" outputAfter \"\n                    + outputAfter + \" shingleSize \" + shingleSize + \" baseDimensions \" + baseDimensions + \" dimensions \"\n                    + dimensions + \" numberOfTrees \" + numberOfTrees + \" rng \" + rng + \" i \" + i + \" shingleSize \"\n                    + shingleSize + \" rng.nextLong() \");\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                    .numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter).alertOnce(true)\n                    .transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).build();\n\n            int count = 0;\n            double[] point = new double[baseDimensions];\n            double[] anomalyPoint = new double[baseDimensions];\n            for (int j = 0; j < baseDimensions; j++) {\n                point[j] = 50 - rng.nextInt(100);\n                int sign = (rng.nextDouble() < 0.5) ? -1 : 1;\n                anomalyPoint[j] = point[j] + sign * (10 - rng.nextInt(5));\n            }\n            int anomalyAt = outputAfter + rng.nextInt(length / 2);\n            for (int j = 0; j < anomalyAt; j++) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    ++count;\n                }\n            }\n            assertEquals(0, count);\n            assertTrue(first.process(anomalyPoint, 0L).getAnomalyGrade() > 0);\n            for (int j = anomalyAt + 1; j < length; j++) {\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    ++count;\n                }\n            }\n            // differencing introduces cascades\n            totalcount += count;\n        }\n        System.out.println(totalcount);\n        int finalTotalcount = totalcount;\n        assertTrue(\n                totalcount < numTrials || method == TransformMethod.DIFFERENCE\n                        || method == TransformMethod.NORMALIZE_DIFFERENCE,\n                () -> String.format(\n                        \"Assertion failed: totalcount=%d, numTrials=%d, method=%s, sampleSize=%d, length=%d, seed=%d\",\n                        finalTotalcount, numTrials, method, sampleSize, length, seed));\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class, names = { \"NONE\", \"NORMALIZE\" })\n    public void AnomalyTestSine1D(TransformMethod method) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 50;\n        int length = 4 * sampleSize;\n        int found = 0;\n        int count = 0;\n        double grade = 0;\n\n        for (int i = 0; i < numTrials; i++) {\n            int numberOfTrees = 50 + rng.nextInt(20);\n            int outputAfter = 64 + rng.nextInt(50);\n            int shingleSize = 8;\n            int baseDimensions = 1; // multiple dimensions would have anti-correlations induced by\n                                    // differring periods\n            int dimensions = baseDimensions * shingleSize;\n            long forestSeed = rng.nextLong();\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                    .numberOfTrees(numberOfTrees).randomSeed(forestSeed).outputAfter(outputAfter)\n                    .transformMethod(method).internalShinglingEnabled(true).shingleSize(shingleSize).build();\n            double[][] data = ShingledMultiDimDataWithKeys.getMultiDimData(length, 50, 100, 0, rng.nextLong(),\n                    baseDimensions, 0, false).data;\n\n            int anomalyAt = outputAfter + rng.nextInt(length / 2);\n            for (int j = 0; j < baseDimensions; j++) {\n                int sign = (rng.nextDouble() < 0.5) ? -1 : 1;\n                // large obvious spike\n                data[anomalyAt][j] += sign * 100;\n            }\n\n            for (int j = 0; j < length; j++) {\n                AnomalyDescriptor firstResult = first.process(data[j], 0L);\n                if (firstResult.getAnomalyGrade() > 0) {\n                    // detection can be late\n                    if (j + firstResult.getRelativeIndex() == anomalyAt) {\n                        ++found;\n                    }\n                    ++count;\n                    grade += firstResult.getAnomalyGrade();\n                }\n\n            }\n        }\n        System.out.println(found);\n        // catch anomalies 80% of the time\n        assertTrue(found > 0.8 * numTrials);\n\n        // precision is not terrible\n        assertTrue(count < 2 * numTrials);\n\n        // average grade is closer to found\n        assertTrue(grade < 1.5 * numTrials);\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class, names = { \"NORMALIZE\", \"NORMALIZE_DIFFERENCE\", \"DIFFERENCE\" })\n    public void RCFCastTest(TransformMethod method) {\n        int sampleSize = 256;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        Random rng = new Random(seed);\n        int numTrials = 1;\n        int length = sampleSize / 2;\n        int forecastHorizon = 2;\n        for (int i = 0; i < numTrials; i++) {\n            int numberOfTrees = 30 + rng.nextInt(20);\n            int outputAfter = 32 + rng.nextInt(50);\n            // shingleSize 1 is not recommended for complicated input\n            int shingleSize = 4 + rng.nextInt(5);\n            int baseDimensions = 1;\n            int offset = rng.nextInt(10);\n            int dimensions = baseDimensions * shingleSize;\n            RCFCaster first = new RCFCaster.Builder().dimensions(dimensions).numberOfTrees(numberOfTrees).randomSeed(0)\n                    .outputAfter(outputAfter).alertOnce(true).forecastHorizon(forecastHorizon).transformMethod(method)\n                    .internalShinglingEnabled(true).shingleSize(shingleSize).build();\n\n            for (int j = 0; j < length; j++) {\n                ForecastDescriptor firstResult = first.process(new double[] { j + offset }, 0L);\n                if (j >= outputAfter - 1) {\n                    for (int y = 0; y < forecastHorizon; y++) {\n                        assertTrue(Math.abs(\n                                firstResult.getTimedForecast().rangeVector.values[y] - (j + offset + 1 + y)) < 0.3);\n                    }\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/calibration/ErrorHandlerTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.calibration;\n\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.api.Test;\n\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\npublic class ErrorHandlerTest {\n\n    @Test\n    public void errorHandlerConstructorTest() {\n        ErrorHandler.Builder builder = new ErrorHandler.Builder();\n        // builder().compact(true).dimensions(dimensions).randomSeed(seed + 1)\n        // .numberOfTrees(numberOfTrees).shingleSize(shingleSize).sampleSize(sampleSize)\n        // .internalShinglingEnabled(true).precision(precision).anomalyRate(0.01).forestMode(ForestMode.STANDARD)\n        // .transformMethod(transformMethod).outputAfter(outputAfter).forecastHorizon(forecastHorizon)\n        // .calibration(calibration).errorHorizon(errorHorizon).initialAcceptFraction(0.125);\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));\n        builder.errorHorizon(1).forecastHorizon(2);\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));\n        builder.errorHorizon(2).forecastHorizon(2);\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));\n        builder.dimensions(1);\n        assertDoesNotThrow(() -> new ErrorHandler(builder));\n        builder.errorHorizon(10000);\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(builder));\n    }\n\n    @Test\n    public void testCalibrate() {\n        ErrorHandler e = ErrorHandler.builder().errorHorizon(2).forecastHorizon(2).dimensions(2).build();\n        assertThrows(IllegalArgumentException.class,\n                () -> e.calibrate(new double[2], Calibration.SIMPLE, new RangeVector(5)));\n        RangeVector r = new RangeVector(4);\n        e.sequenceIndex = 5;\n        e.lastDataDeviations = new float[] { 1.0f, 1.3f };\n        float v = new Random().nextFloat();\n        r.shift(0, v);\n        assertThrows(IllegalArgumentException.class,\n                () -> e.calibrate(new double[1], Calibration.SIMPLE, new RangeVector(r)));\n        e.calibrate(new double[2], Calibration.SIMPLE, new RangeVector(r));\n        assertEquals(r.values[0], v);\n        e.calibrate(new double[2], Calibration.NONE, r);\n        assertEquals(r.values[0], v);\n        assertEquals(r.upper[0], v);\n        assertEquals(r.values[1], 0);\n        e.lastDataDeviations = new float[] { v + 1.0f, 1.3f };\n        e.calibrate(new double[2], Calibration.MINIMAL, r);\n        assertEquals(r.values[0], v);\n        assertEquals(r.values[1], 0);\n    }\n\n    @Test\n    public void testSerializedConstructor() {\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(0, 0, 0, 0, 0, null, null, null, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(0, 1, 0, 0, 0, null, null, null, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(1, 1, 0, 0, 0, null, null, null, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(1, 1, -1, 0, 1, null, null, null, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(1, 1, 0, 0, 1, null, null, null, null, null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(1, 1, 0, 0.1, 1, null, null, null, new Deviation[2], null, null));\n        assertThrows(IllegalArgumentException.class,\n                () -> new ErrorHandler(1, 1, 0, 0.1, 1, null, null, new double[1], new Deviation[3], null, null));\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(1, 1, 0, 0.1, 1, null, new float[0],\n                new double[2], new Deviation[3], null, null));\n        Deviation[] deviations = new Deviation[3];\n        deviations[0] = deviations[1] = deviations[2] = new Deviation(0);\n        assertThrows(IllegalArgumentException.class, () -> new ErrorHandler(1, 1, 0, 0.1, 1, new float[2], new float[1],\n                new double[2], deviations, null, null));\n        assertDoesNotThrow(() -> new ErrorHandler(1, 1, 0, 0.1, 1, new float[0], new float[1], new double[2],\n                deviations, null, null));\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/RCFCasterMapperTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\n\nimport java.util.Random;\n\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.CsvSource;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.ForecastDescriptor;\nimport com.amazon.randomcutforest.parkservices.RCFCaster;\nimport com.amazon.randomcutforest.parkservices.config.Calibration;\nimport com.amazon.randomcutforest.returntypes.DiVector;\nimport com.amazon.randomcutforest.returntypes.RangeVector;\n\npublic class RCFCasterMapperTest {\n\n    @ParameterizedTest\n    @CsvSource({ \"SIMPLE,1\", \"MINIMAL,1\", \"NONE,1\", \"SIMPLE,2\", \"MINIMAL,2\", \"NONE,2\" })\n    public void testRoundTripStandardShingleSizeEight(String calibrationString, int inputLength) {\n        int shingleSize = 8;\n        int dimensions = inputLength * shingleSize;\n        int forecastHorizon = shingleSize * 3;\n        for (int trials = 0; trials < 1; trials++) {\n\n            long seed = new Random().nextLong();\n            System.out.println(\" seed \" + seed);\n            // note shingleSize == 8\n            RCFCaster first = RCFCaster.builder().dimensions(dimensions).randomSeed(seed).internalShinglingEnabled(true)\n                    .anomalyRate(0.01).shingleSize(shingleSize).calibration(Calibration.MINIMAL)\n                    .forecastHorizon(forecastHorizon).calibration(Calibration.valueOf(calibrationString))\n                    .transformMethod(TransformMethod.NORMALIZE).build();\n\n            Random r = new Random(seed);\n            for (int i = 0; i < 2000 + r.nextInt(1000); i++) {\n                double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();\n                first.process(point, 0L);\n            }\n\n            // serialize + deserialize\n            RCFCasterMapper mapper = new RCFCasterMapper();\n            RCFCaster second = mapper.toModel(mapper.toState(first));\n            assertArrayEquals(first.getErrorHandler().getIntervalPrecision(),\n                    second.getErrorHandler().getIntervalPrecision(), 1e-6f);\n            assertArrayEquals(first.getErrorHandler().getErrorRMSE().high, second.getErrorHandler().getErrorRMSE().high,\n                    1e-6f);\n            assertArrayEquals(first.getErrorHandler().getErrorRMSE().low, second.getErrorHandler().getErrorRMSE().low,\n                    1e-6f);\n            assertArrayEquals(first.getErrorHandler().getErrorDistribution().values,\n                    second.getErrorHandler().getErrorDistribution().values, 1e-6f);\n            assertArrayEquals(first.getErrorHandler().getErrorDistribution().upper,\n                    second.getErrorHandler().getErrorDistribution().upper, 1e-6f);\n            assertArrayEquals(first.getErrorHandler().getErrorDistribution().lower,\n                    second.getErrorHandler().getErrorDistribution().lower, 1e-6f);\n            // update re-instantiated forest\n            for (int i = 0; i < 100; i++) {\n                double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();\n                ForecastDescriptor firstResult = first.process(point, 0L);\n                ForecastDescriptor secondResult = second.process(point, 0L);\n                assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);\n                verifyForecast(firstResult, secondResult, inputLength);\n            }\n        }\n    }\n\n    void verifyForecast(ForecastDescriptor firstResult, ForecastDescriptor secondResult, int inputLength) {\n        RangeVector firstForecast = firstResult.getTimedForecast().rangeVector;\n        RangeVector secondForecast = secondResult.getTimedForecast().rangeVector;\n        assertArrayEquals(firstForecast.values, secondForecast.values, 1e-6f);\n        assertArrayEquals(firstForecast.upper, secondForecast.upper, 1e-6f);\n        assertArrayEquals(firstForecast.lower, secondForecast.lower, 1e-6f);\n\n        float[] firstErrorP50 = firstResult.getObservedErrorDistribution().values;\n        float[] secondErrorP50 = secondResult.getObservedErrorDistribution().values;\n        assertArrayEquals(firstErrorP50, secondErrorP50, 1e-6f);\n\n        float[] firstUpperError = firstResult.getObservedErrorDistribution().upper;\n        float[] secondUpperError = secondResult.getObservedErrorDistribution().upper;\n        assertArrayEquals(firstUpperError, secondUpperError, 1e-6f);\n\n        float[] firstLowerError = firstResult.getObservedErrorDistribution().lower;\n        float[] secondLowerError = secondResult.getObservedErrorDistribution().lower;\n        assertArrayEquals(firstLowerError, secondLowerError, 1e-6f);\n\n        DiVector firstRmse = firstResult.getErrorRMSE();\n        DiVector secondRmse = secondResult.getErrorRMSE();\n        assertArrayEquals(firstRmse.high, secondRmse.high, 1e-6);\n        assertArrayEquals(firstRmse.low, secondRmse.low, 1e-6);\n\n        assertArrayEquals(firstResult.getErrorMean(), secondResult.getErrorMean(), 1e-6f);\n        assertArrayEquals(firstResult.getIntervalPrecision(), secondResult.getIntervalPrecision(), 1e-6f);\n    }\n\n    @ParameterizedTest\n    @CsvSource({ \"SIMPLE,1\", \"MINIMAL,1\", \"NONE,1\", \"SIMPLE,2\", \"MINIMAL,2\", \"NONE,2\" })\n    public void testNotFullyInitialized(String calibrationString, int inputLength) {\n        int shingleSize = 8;\n        int dimensions = inputLength * shingleSize;\n        int forecastHorizon = shingleSize * 3;\n        int outputAfter = 32;\n        for (int trials = 0; trials < 10; trials++) {\n\n            long seed = new Random().nextLong();\n            System.out.println(\" seed \" + seed);\n\n            // note shingleSize == 8\n            RCFCaster first = RCFCaster.builder().dimensions(dimensions).randomSeed(seed).internalShinglingEnabled(true)\n                    .anomalyRate(0.01).shingleSize(shingleSize).calibration(Calibration.valueOf(calibrationString))\n                    .forecastHorizon(forecastHorizon).transformMethod(TransformMethod.NORMALIZE)\n                    .outputAfter(outputAfter).build();\n\n            Random r = new Random();\n            for (int i = 0; i < new Random().nextInt(outputAfter); i++) {\n                double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();\n                RCFCasterMapper mapper = new RCFCasterMapper();\n                RCFCaster shadow = mapper.toModel(mapper.toState(first));\n                ForecastDescriptor a = first.process(point, 0L);\n                ForecastDescriptor b = shadow.process(point, 0L);\n                assertEquals(a.getRCFScore(), b.getRCFScore(), 1e-6);\n                first.process(point, 0L);\n            }\n\n            // serialize + deserialize\n            RCFCasterMapper mapper = new RCFCasterMapper();\n            RCFCaster second = mapper.toModel(mapper.toState(first));\n\n            // update re-instantiated forest\n            for (int i = 0; i < 100; i++) {\n                double[] point = r.ints(inputLength, 0, 50).asDoubleStream().toArray();\n                ForecastDescriptor firstResult = first.process(point, 0L);\n                ForecastDescriptor secondResult = second.process(point, 0L);\n                assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);\n                verifyForecast(firstResult, secondResult, 1);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/ThresholdedRandomCutForestMapperTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.copyAtEnd;\nimport static com.amazon.randomcutforest.preprocessor.Preprocessor.shiftLeft;\nimport static org.junit.jupiter.api.Assertions.assertArrayEquals;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertNull;\n\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.EnumSource;\nimport org.junit.jupiter.params.provider.MethodSource;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.ForestMode;\nimport com.amazon.randomcutforest.config.ImputationMethod;\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.parkservices.AnomalyDescriptor;\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.amazon.randomcutforest.parkservices.returntypes.RCFComputeDescriptor;\nimport com.amazon.randomcutforest.returntypes.TimedRangeVector;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.testutils.MultiDimDataWithKey;\nimport com.amazon.randomcutforest.testutils.ShingledMultiDimDataWithKeys;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class ThresholdedRandomCutForestMapperTest {\n\n    @Test\n    public void testRoundTripStandardShingleSizeOne() {\n        int dimensions = 10;\n        for (int trials = 0; trials < 1; trials++) {\n\n            long seed = new Random().nextLong();\n            RandomCutForest.Builder<?> builder = RandomCutForest.builder().dimensions(dimensions).randomSeed(seed);\n\n            // note shingleSize == 1\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                    .randomSeed(seed).internalShinglingEnabled(true).anomalyRate(0.01).build();\n            ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                    .randomSeed(seed).anomalyRate(0.01).forestMode(ForestMode.STANDARD).internalShinglingEnabled(false)\n                    .build();\n            RandomCutForest forest = builder.build();\n\n            Random r = new Random();\n            for (int i = 0; i < 2000 + new Random().nextInt(1000); i++) {\n                double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                AnomalyDescriptor secondResult = second.process(point, 0L);\n\n                assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-10);\n                forest.update(point);\n            }\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n            // update re-instantiated forest\n            for (int i = 0; i < 100; i++) {\n                double[] point = r.ints(dimensions, 0, 50).asDoubleStream().toArray();\n                AnomalyDescriptor firstResult = first.process(point, 0L);\n                AnomalyDescriptor secondResult = second.process(point, 0L);\n                AnomalyDescriptor thirdResult = third.process(point, 0L);\n                double score = forest.getAnomalyScore(point);\n                assertEquals(score, firstResult.getRCFScore(), 1e-10);\n                assertEquals(score, secondResult.getRCFScore(), 1e-10);\n                assertEquals(score, thirdResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getDataConfidence(), secondResult.getDataConfidence(), 1e-10);\n                forest.update(point);\n            }\n        }\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    public void testConversions(boolean internal) {\n        int dimensions = 10;\n        int shingleSize = 2;\n        for (int trials = 0; trials < 5; trials++) {\n\n            long seed = new Random().nextLong();\n            System.out.println(\"Seed \" + seed);\n            RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).internalShinglingEnabled(internal)\n                    .shingleSize(shingleSize).randomSeed(seed).build();\n\n            ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                    .randomSeed(seed).internalShinglingEnabled(internal).shingleSize(shingleSize).anomalyRate(0.01)\n                    .build();\n\n            double[] shingle = new double[dimensions];\n            Random r = new Random(seed + 1);\n            for (int i = 0; i < new Random(seed + 2).nextInt(1000); i++) {\n                int length = dimensions / shingleSize;\n                double[] point = r.ints(length, 0, 50).asDoubleStream().toArray();\n                shiftLeft(shingle, length);\n                copyAtEnd(shingle, point);\n                first.process((internal) ? point : shingle, 0L);\n                forest.update((internal) ? point : shingle);\n            }\n\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setSaveExecutorContextEnabled(true);\n            mapper.setSaveTreeStateEnabled(true);\n            mapper.setPartialTreeStateEnabled(true);\n            RandomCutForest copyForest = mapper.toModel(mapper.toState(forest));\n\n            ThresholdedRandomCutForest second = new ThresholdedRandomCutForest(copyForest, 0.01, null,\n                    first.getPreprocessor().getShingledInput());\n\n            for (int i = 0; i < new Random(seed + 3).nextInt(1000); i++) {\n                int length = dimensions / shingleSize;\n                double[] point = r.ints(length, 0, 50).asDoubleStream().toArray();\n                shiftLeft(shingle, length);\n                copyAtEnd(shingle, point);\n                AnomalyDescriptor firstResult = first.process((internal) ? point : shingle, 0L);\n                // second only accepts the shorter input\n                // -- but note transformation is NONE as default\n                AnomalyDescriptor secondResult = second.process(point, 0L);\n\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore((internal) ? point : shingle), 1e-10);\n                forest.update((internal) ? point : shingle);\n            }\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper newMapper = new ThresholdedRandomCutForestMapper();\n            ThresholdedRandomCutForest third = newMapper.toModel(newMapper.toState(second));\n\n            // update re-instantiated forest\n            for (int i = 0; i < 100; i++) {\n                int length = dimensions / shingleSize;\n                double[] point = r.ints(length, 0, 50).asDoubleStream().toArray();\n                shiftLeft(shingle, length);\n                copyAtEnd(shingle, point);\n                AnomalyDescriptor firstResult = first.process((internal) ? point : shingle, 0L);\n                AnomalyDescriptor secondResult = second.process(point, 0L);\n                AnomalyDescriptor thirdResult = third.process(point, 0L);\n                double score = forest.getAnomalyScore((internal) ? point : shingle);\n                assertEquals(score, firstResult.getRCFScore(), 1e-10);\n                assertEquals(score, secondResult.getRCFScore(), 1e-10);\n                assertEquals(score, thirdResult.getRCFScore(), 1e-10);\n                assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);\n                forest.update((internal) ? point : shingle);\n            }\n        }\n    }\n\n    @Test\n    public void testRoundTripStandardShingled() throws JsonProcessingException {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().dimensions(dimensions).randomSeed(seed);\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).shingleSize(shingleSize).internalShinglingEnabled(false).anomalyRate(0.01).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).shingleSize(shingleSize).internalShinglingEnabled(false).anomalyRate(0.01).build();\n        RandomCutForest forest = builder.build();\n\n        // thresholds should not affect scores\n        double value = 0.75 + 0.5 * new Random().nextDouble();\n        first.setLowerThreshold(value);\n        second.setLowerThreshold(value);\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(10 * sampleSize, 50,\n                shingleSize, baseDimensions, seed);\n\n        for (double[] point : dataWithKeys.data) {\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-4);\n            forest.update(point);\n        }\n\n        ObjectMapper jsonMapper = new ObjectMapper();\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        String json = jsonMapper.writeValueAsString(mapper.toState(second));\n        ThresholdedRandomCutForest third = mapper\n                .toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.generateShingledDataWithKey(100, 50, shingleSize,\n                baseDimensions, seed);\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n            AnomalyDescriptor thirdResult = third.process(point, 0L);\n            double score = forest.getAnomalyScore(point);\n            assertEquals(score, firstResult.getRCFScore(), 1e-4);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);\n            forest.update(point);\n        }\n    }\n\n    @Test\n    public void testRoundTripStandardShingledInternal() throws JsonProcessingException {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        System.out.println(\" seed \" + seed);\n        RandomCutForest forest = RandomCutForest.builder().dimensions(dimensions).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).randomSeed(seed).build();\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .autoAdjust(true).boundingBoxCacheFraction(0).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .autoAdjust(true).build();\n\n        double value = 0.75 + 0.5 * new Random().nextDouble();\n        first.setLowerThreshold(value);\n        second.setLowerThreshold(value);\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        long count = 0;\n\n        for (double[] point : dataWithKeys.data) {\n            AnomalyDescriptor firstResult = first.process(point, count);\n            AnomalyDescriptor secondResult = second.process(point, count);\n            ++count;\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), forest.getAnomalyScore(point), 1e-4);\n            if (firstResult.getAnomalyGrade() > 0) {\n                assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n            }\n            forest.update(point);\n        }\n\n        ObjectMapper jsonMapper = new ObjectMapper();\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        String json = jsonMapper.writeValueAsString(mapper.toState(second));\n        ThresholdedRandomCutForest third = mapper\n                .toModel(jsonMapper.readValue(json, ThresholdedRandomCutForestState.class));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            AnomalyDescriptor firstResult = first.process(point, count);\n            AnomalyDescriptor secondResult = second.process(point, count);\n            AnomalyDescriptor thirdResult = third.process(point, count);\n            ++count;\n            double score = forest.getAnomalyScore(point);\n            assertEquals(score, firstResult.getRCFScore(), 1e-4);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getDataConfidence(), thirdResult.getDataConfidence(), 1e-10);\n            forest.update(point);\n        }\n        TimedRangeVector one = first.extrapolate(10);\n        TimedRangeVector two = second.extrapolate(10);\n        assertArrayEquals(one.upperTimeStamps, two.upperTimeStamps);\n        assertArrayEquals(one.lowerTimeStamps, two.lowerTimeStamps);\n        assertArrayEquals(one.timeStamps, two.timeStamps);\n        assertArrayEquals(one.rangeVector.values, two.rangeVector.values, 1e-6f);\n        assertArrayEquals(one.rangeVector.upper, two.rangeVector.upper, 1e-6f);\n        assertArrayEquals(one.rangeVector.lower, two.rangeVector.lower, 1e-6f);\n        for (int j = 0; j < 10; j++) {\n            assert (one.lowerTimeStamps[j] <= one.timeStamps[j]);\n            assert (one.upperTimeStamps[j] >= one.timeStamps[j]);\n            assert (one.timeStamps[j] == count + j);\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class)\n    public void testRoundTripStandardInitial(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .autoAdjust(true).transformMethod(method).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .autoAdjust(true).transformMethod(method).build();\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,\n                baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            second = mapper.toModel(mapper.toState(second));\n        }\n\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class)\n    public void testRoundTripStandard(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = 0;\n        new Random().nextLong();\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .transformMethod(method).autoAdjust(true).boundingBoxCacheFraction(0).weights(new double[] { 1.0 })\n                .build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .transformMethod(method).autoAdjust(true).weights(new double[] { 1.0 }).build();\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            if (firstResult.getAnomalyGrade() > 0) {\n                assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n            }\n\n        }\n\n        // serialize + deserialize\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n            AnomalyDescriptor thirdResult = third.process(point, 0L);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class, names = { \"WEIGHTED\", \"NORMALIZE\", \"NORMALIZE_DIFFERENCE\", \"DIFFERENCE\",\n            \"SUBTRACT_MA\" })\n    public void testRoundTripAugmentedInitial(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        double value = 0.75 + 0.25 * new Random().nextDouble();\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0, 2.0 }).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0, 2.0 }).build();\n\n        first.setLowerThreshold(value);\n        second.setLowerThreshold(value);\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,\n                baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            second = mapper.toModel(mapper.toState(second));\n        }\n\n    }\n\n    @Test\n    public void testRoundTripAugmentedInitialNone() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        double value = 0.75 + 0.25 * new Random().nextDouble();\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(TransformMethod.NONE).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0, 1.0 }).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(TransformMethod.NONE).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0, 1.0 }).build();\n\n        first.setLowerThreshold(value);\n        second.setLowerThreshold(value);\n\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,\n                baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            second = mapper.toModel(mapper.toState(second));\n        }\n\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class)\n    public void testRoundTripTimeAugmented(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        double value = 0.75 + 0.25 * new Random().nextDouble();\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0 }).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(method).anomalyRate(0.01).autoAdjust(true)\n                .weights(new double[] { 1.0 }).build();\n\n        first.setLowerThreshold(value);\n        second.setLowerThreshold(value);\n        Random r = new Random();\n        long count = 0;\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            long stamp = 100 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            AnomalyDescriptor secondResult = second.process(point, stamp);\n            ++count;\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            if (firstResult.getAnomalyGrade() > 0) {\n                assertEquals(secondResult.getAnomalyGrade(), firstResult.getAnomalyGrade(), 1e-10);\n            }\n        }\n\n        // serialize + deserialize\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            long stamp = 100 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, 0L);\n            AnomalyDescriptor secondResult = second.process(point, 0L);\n            AnomalyDescriptor thirdResult = third.process(point, 0L);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getAnomalyGrade(), thirdResult.getAnomalyGrade(), 1e-10);\n            ++count;\n        }\n    }\n\n    @ParameterizedTest\n    @EnumSource(value = TransformMethod.class, names = { \"WEIGHTED\", \"NORMALIZE\", \"NORMALIZE_DIFFERENCE\", \"DIFFERENCE\",\n            \"SUBTRACT_MA\" })\n    public void testRoundTripTimeAugmentedNormalize(TransformMethod method) {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).transformMethod(method)\n                .internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .weights(new double[] { 1.0, 2.0 }).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true)\n                .internalShinglingEnabled(true).transformMethod(method).shingleSize(shingleSize).anomalyRate(0.01)\n                .weights(new double[] { 1.0, 2.0 }).build();\n\n        Random r = new Random();\n        long count = 0;\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            long stamp = 1000 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            AnomalyDescriptor secondResult = second.process(point, stamp);\n            ++count;\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n        }\n\n        // serialize + deserialize\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            long stamp = 100 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            AnomalyDescriptor secondResult = second.process(point, stamp);\n            AnomalyDescriptor thirdResult = third.process(point, stamp);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            ++count;\n        }\n    }\n\n    @Test\n    public void testRoundTripTimeAugmentedNone() {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true).transformMethod(TransformMethod.NONE)\n                .internalShinglingEnabled(true).shingleSize(shingleSize).anomalyRate(0.01)\n                .weights(new double[] { 1.0, 1.0 }).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.TIME_AUGMENTED).normalizeTime(true)\n                .internalShinglingEnabled(true).transformMethod(TransformMethod.NONE).shingleSize(shingleSize)\n                .anomalyRate(0.01).weights(new double[] { 1.0, 1.0 }).build();\n\n        Random r = new Random();\n        long count = 0;\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            long stamp = 1000 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            AnomalyDescriptor secondResult = second.process(point, stamp);\n            ++count;\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n        }\n\n        // serialize + deserialize\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            long stamp = 100 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            AnomalyDescriptor secondResult = second.process(point, stamp);\n            AnomalyDescriptor thirdResult = third.process(point, stamp);\n            assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            ++count;\n        }\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"args\")\n    public void testRoundTripImputeInitial(TransformMethod transformMethod, ImputationMethod imputationMethod) {\n        int sampleSize = 256;\n        int baseDimensions = 2;\n        int shingleSize = 4;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n        System.out.println(seed);\n\n        ThresholdedRandomCutForest first = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(transformMethod).imputationMethod(imputationMethod)\n                .fillValues(new double[] { 1.0, 2.0 }).anomalyRate(0.01).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(transformMethod).imputationMethod(imputationMethod)\n                .fillValues(new double[] { 1.0, 2.0 }).anomalyRate(0.01).build();\n\n        Random r = new Random(0);\n        long count = 0;\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(sampleSize, 50, 100, 5, seed,\n                baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            if (r.nextDouble() > 0.1) {\n                long stamp = 1000 * count + r.nextInt(10) - 5;\n                AnomalyDescriptor firstResult = first.process(point, stamp);\n                AnomalyDescriptor secondResult = second.process(point, stamp);\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            }\n            ++count;\n\n            // serialize + deserialize\n            ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n            second = mapper.toModel(mapper.toState(second));\n        }\n\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"args\")\n    public void testRoundTripImpute(TransformMethod transformMethod, ImputationMethod imputationMethod) {\n        int sampleSize = 256;\n        int baseDimensions = 1;\n        int shingleSize = 8;\n        int dimensions = baseDimensions * shingleSize;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest first = ThresholdedRandomCutForest.builder().dimensions(dimensions).randomSeed(seed)\n                .forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true).shingleSize(shingleSize)\n                .transformMethod(transformMethod).imputationMethod(imputationMethod).fillValues(new double[] { 1.0 })\n                .anomalyRate(0.01).build();\n        ThresholdedRandomCutForest second = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).forestMode(ForestMode.STREAMING_IMPUTE).internalShinglingEnabled(true)\n                .shingleSize(shingleSize).transformMethod(transformMethod).imputationMethod(imputationMethod)\n                .fillValues(new double[] { 1.0 }).anomalyRate(0.01).build();\n\n        Random r = new Random();\n        long count = 0;\n        MultiDimDataWithKey dataWithKeys = ShingledMultiDimDataWithKeys.getMultiDimData(10 * sampleSize, 50, 100, 5,\n                seed, baseDimensions);\n\n        for (double[] point : dataWithKeys.data) {\n            if (r.nextDouble() > 0.1) {\n                long stamp = 1000 * count + r.nextInt(10) - 5;\n                AnomalyDescriptor firstResult = first.process(point, stamp);\n                AnomalyDescriptor secondResult = second.process(point, stamp);\n                assertEquals(firstResult.getRCFScore(), secondResult.getRCFScore(), 1e-10);\n            }\n            ++count;\n        }\n\n        // serialize + deserialize\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForest third = mapper.toModel(mapper.toState(second));\n\n        MultiDimDataWithKey testData = ShingledMultiDimDataWithKeys.getMultiDimData(100, 50, 100, 5, seed,\n                baseDimensions);\n\n        // update re-instantiated forest\n        for (double[] point : testData.data) {\n            long stamp = 1000 * count + r.nextInt(10) - 5;\n            AnomalyDescriptor firstResult = first.process(point, stamp);\n            // AnomalyDescriptor secondResult = second.process(point, stamp);\n            AnomalyDescriptor thirdResult = third.process(point, stamp);\n            // assertEquals(firstResult.getRcfScore(), secondResult.getRcfScore(), 1e-10);\n            assertEquals(firstResult.getRCFScore(), thirdResult.getRCFScore(), 1e-10);\n            ++count;\n        }\n    }\n\n    @Test\n    public void testRoundTripLastDescriptor() {\n        int dimensions = 2;\n        long seed = new Random().nextLong();\n\n        ThresholdedRandomCutForest trcf = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).build();\n\n        double[] point = { 1.0, Double.NaN };\n        long timestamp = 123L;\n\n        trcf.process(point, timestamp, new int[] { 1 });\n\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n        ThresholdedRandomCutForestState state = mapper.toState(trcf);\n        ThresholdedRandomCutForest deserializedTrcf = mapper.toModel(state);\n\n        RCFComputeDescriptor deserializedDescriptor = deserializedTrcf.getPredictorCorrector().getLastDescriptor();\n        assertNotNull(deserializedDescriptor);\n        assertArrayEquals(point, deserializedDescriptor.getCurrentInput());\n        assertEquals(timestamp, deserializedDescriptor.getInputTimestamp());\n    }\n\n    @Test\n    public void testRoundTripProcessSequentially() {\n        int dimensions = 2;\n        long seed = new Random().nextLong();\n        ThresholdedRandomCutForestMapper mapper = new ThresholdedRandomCutForestMapper();\n\n        // Case 1: Zero input\n        ThresholdedRandomCutForest emptyTrcf = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).build();\n        ThresholdedRandomCutForestState emptyState = mapper.toState(emptyTrcf);\n        ThresholdedRandomCutForest deserializedEmptyTrcf = mapper.toModel(emptyState);\n        assertNull(deserializedEmptyTrcf.getPredictorCorrector().getLastDescriptor());\n\n        // Case 2: 100 inputs\n        ThresholdedRandomCutForest trcf = new ThresholdedRandomCutForest.Builder<>().dimensions(dimensions)\n                .randomSeed(seed).build();\n\n        int numPoints = 100;\n        double[][] data = new double[numPoints][dimensions];\n        long[] timestamps = new long[numPoints];\n        Random random = new Random(seed);\n\n        for (int i = 0; i < numPoints; i++) {\n            for (int j = 0; j < dimensions; j++) {\n                data[i][j] = random.nextDouble();\n            }\n            timestamps[i] = i * 100L;\n        }\n        // Set the last point to have a missing value\n        data[numPoints - 1][dimensions - 1] = Double.NaN;\n\n        trcf.processSequentially(data, timestamps, d -> true);\n\n        ThresholdedRandomCutForestState state = mapper.toState(trcf);\n        ThresholdedRandomCutForest deserializedTrcf = mapper.toModel(state);\n\n        RCFComputeDescriptor deserializedDescriptor = deserializedTrcf.getPredictorCorrector().getLastDescriptor();\n        assertNotNull(deserializedDescriptor);\n\n        double[] lastPoint = data[numPoints - 1];\n        long lastTimestamp = timestamps[numPoints - 1];\n\n        assertArrayEquals(lastPoint, deserializedDescriptor.getCurrentInput());\n        assertEquals(lastTimestamp, deserializedDescriptor.getInputTimestamp());\n    }\n\n    static Stream<Arguments> args() {\n        return transformMethodStream().flatMap(\n                classParameter -> imputationMethod().map(testParameter -> Arguments.of(classParameter, testParameter)));\n    }\n\n    static Stream<ImputationMethod> imputationMethod() {\n        return Stream.of(ImputationMethod.values());\n    }\n\n    static Stream<TransformMethod> transformMethodStream() {\n        return Stream.of(TransformMethod.values());\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFByteBase64Resource.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport lombok.Getter;\n\n@Getter\npublic enum V2TRCFByteBase64Resource {\n\n    TRCF_STATE_1(\"byte_base64_1.txt\"), TRCF_STATE_2(\"byte_base64_2.txt\");\n\n    private final String resource;\n\n    V2TRCFByteBase64Resource(String resource) {\n        this.resource = resource;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFJsonResource.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport lombok.Getter;\n\n@Getter\npublic enum V2TRCFJsonResource {\n\n    TRCF_1(\"state_1.json\"), TRCF_2(\"state_2.json\");\n\n    private final String resource;\n\n    V2TRCFJsonResource(String resource) {\n        this.resource = resource;\n    }\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/state/V2TRCFToV3StateConverterTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.state;\n\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Base64;\nimport java.util.Random;\n\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.EnumSource;\n\nimport com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;\nimport com.fasterxml.jackson.core.JsonProcessingException;\nimport com.fasterxml.jackson.databind.MapperFeature;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport io.protostuff.ProtostuffIOUtil;\nimport io.protostuff.Schema;\nimport io.protostuff.runtime.RuntimeSchema;\n\npublic class V2TRCFToV3StateConverterTest {\n\n    private ThresholdedRandomCutForestMapper trcfMapper = new ThresholdedRandomCutForestMapper();\n\n    @ParameterizedTest\n    @EnumSource(V2TRCFJsonResource.class)\n    public void testJson(V2TRCFJsonResource jsonResource) throws JsonProcessingException {\n        String json = getStateFromFile(jsonResource.getResource());\n        assertNotNull(json);\n        ObjectMapper mapper = new ObjectMapper();\n        mapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);\n        ThresholdedRandomCutForestState state = mapper.readValue(json, ThresholdedRandomCutForestState.class);\n        ThresholdedRandomCutForest forest = trcfMapper.toModel(state);\n        Random r = new Random(0);\n        for (int i = 0; i < 20000; i++) {\n            double[] point = r.ints(forest.getForest().getDimensions(), 0, 50).asDoubleStream().toArray();\n            forest.process(point, 0L);\n        }\n        assertNotNull(forest);\n    }\n\n    @ParameterizedTest\n    @EnumSource(V2TRCFByteBase64Resource.class)\n    public void testByteBase64(V2TRCFByteBase64Resource byteBase64Resource) {\n        String byteBase64 = getStateFromFile(byteBase64Resource.getResource());\n        assertNotNull(byteBase64);\n        Schema<ThresholdedRandomCutForestState> trcfSchema = RuntimeSchema\n                .getSchema(ThresholdedRandomCutForestState.class);\n        byte[] bytes = Base64.getDecoder().decode(byteBase64);\n        ThresholdedRandomCutForestState state = trcfSchema.newMessage();\n        ProtostuffIOUtil.mergeFrom(bytes, state, trcfSchema);\n        ThresholdedRandomCutForest forest = trcfMapper.toModel(state);\n        assertNotNull(forest);\n    }\n\n    private String getStateFromFile(String resourceFile) {\n        try (InputStream is = V2TRCFToV3StateConverterTest.class.getResourceAsStream(resourceFile);\n                BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8))) {\n            StringBuilder b = new StringBuilder();\n            String line;\n            while ((line = rr.readLine()) != null) {\n                b.append(line);\n            }\n            return b.toString();\n        } catch (IOException e) {\n            fail(\"Unable to load resource\");\n        }\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/java/com/amazon/randomcutforest/parkservices/threshold/BasicThresholderTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.parkservices.threshold;\n\nimport static org.junit.jupiter.api.Assertions.assertDoesNotThrow;\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertFalse;\nimport static org.junit.jupiter.api.Assertions.assertNotEquals;\nimport static org.junit.jupiter.api.Assertions.assertNotNull;\nimport static org.junit.jupiter.api.Assertions.assertThrows;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\n\nimport java.util.Random;\nimport java.util.stream.Collectors;\nimport java.util.stream.DoubleStream;\n\nimport org.junit.jupiter.api.Test;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.ValueSource;\n\nimport com.amazon.randomcutforest.config.TransformMethod;\nimport com.amazon.randomcutforest.statistics.Deviation;\n\npublic class BasicThresholderTest {\n\n    @Test\n    void scoreDifferencingTest() {\n        BasicThresholder basicThresholder = new BasicThresholder(0.01);\n        assertThrows(IllegalArgumentException.class, () -> {\n            basicThresholder.setScoreDifferencing(-new Random().nextDouble());\n        });\n        assertThrows(IllegalArgumentException.class, () -> {\n            basicThresholder.setScoreDifferencing(1 + 1e-10 + new Random().nextDouble());\n        });\n        assertDoesNotThrow(() -> basicThresholder.setScoreDifferencing(new Random().nextDouble()));\n    }\n\n    @Test\n    void constructorTest() {\n        BasicThresholder thresholder = new BasicThresholder(null);\n        assertEquals(thresholder.getDeviations().length, 3);\n\n        BasicThresholder thresholder2 = new BasicThresholder(new Deviation[] { new Deviation(0) });\n        assertNotNull(thresholder2.getSecondaryDeviation());\n\n        double[] list = new double[] { 1.0, 2.0, 3.0 };\n        BasicThresholder basicThresholder = new BasicThresholder(\n                DoubleStream.of(list).boxed().collect(Collectors.toList()), 0.01);\n        assertEquals(basicThresholder.getPrimaryDeviation().getCount(), 3);\n        assertEquals(basicThresholder.getSecondaryDeviation().getCount(), 3);\n        assertEquals(basicThresholder.getPrimaryDeviation().getMean(), 2, 1e-10);\n        assertEquals(basicThresholder.getSecondaryDeviation().getMean(), 2, 1e-10);\n        assertEquals(basicThresholder.getPrimaryDeviation().getDiscount(), 0.01, 1e-10);\n\n        System.out.println(basicThresholder.count);\n        assertFalse(basicThresholder.isDeviationReady());\n        basicThresholder.updatePrimary(0.0);\n        basicThresholder.updatePrimary(0.0);\n        System.out.println(basicThresholder.count);\n        assertFalse(basicThresholder.isDeviationReady());\n        basicThresholder.setScoreDifferencing(0);\n        assertFalse(basicThresholder.isDeviationReady());\n        basicThresholder.setMinimumScores(5);\n        assertTrue(basicThresholder.isDeviationReady());\n        basicThresholder.setScoreDifferencing(1.0);\n        assertFalse(basicThresholder.isDeviationReady());\n\n        basicThresholder.update(0.0, 0.0);\n        basicThresholder.update(0.0, 0.0);\n        assertTrue(basicThresholder.isDeviationReady());\n        basicThresholder.setScoreDifferencing(0.5);\n        assertTrue(basicThresholder.isDeviationReady());\n        assertEquals(basicThresholder.intermediateTermFraction(), 0.4, 1e-10);\n        basicThresholder.updatePrimary(0.0);\n        assertNotEquals(1, basicThresholder.intermediateTermFraction(), 0.0);\n        basicThresholder.setMinimumScores(4);\n        assertEquals(1, basicThresholder.intermediateTermFraction());\n\n    }\n\n    @ParameterizedTest\n    @ValueSource(booleans = { true, false })\n    void gradeTest(boolean flag) {\n        BasicThresholder thresholder = new BasicThresholder(null);\n        thresholder.setScoreDifferencing(0.0);\n        if (flag) {\n            thresholder.setInitialThreshold(0.0);\n            thresholder.setAbsoluteThreshold(0.0);\n        }\n        assertEquals(0, thresholder.threshold());\n        assertEquals(0, thresholder.getPrimaryThreshold());\n        assertEquals(0, thresholder.getPrimaryGrade(0));\n        assertEquals(0, thresholder.getPrimaryThresholdAndGrade(0.0).weight);\n        assertEquals(0, thresholder.getPrimaryThresholdAndGrade(1.0).weight);\n\n        assertEquals(thresholder.initialThreshold,\n                thresholder.getThresholdAndGrade(0, TransformMethod.NONE, 1, 1).index);\n        assertEquals(thresholder.initialThreshold,\n                thresholder.getThresholdAndGrade(1.0, TransformMethod.NONE, 1, 1).index);\n        thresholder.setCount(12);\n        assertTrue(thresholder.isDeviationReady());\n        assertEquals(thresholder.getSurpriseIndex(1.0, 0, 2.5, 0), 2);\n        assertEquals(thresholder.getPrimaryGrade(0), 0);\n        assertEquals(0, thresholder.getPrimaryThresholdAndGrade(0.0).weight);\n        assertEquals(0, thresholder.getPrimaryThresholdAndGrade(1.0).weight); // threshold 0\n        thresholder.updatePrimary(1.0);\n        assertEquals(1.0, thresholder.getPrimaryThresholdAndGrade(2.0).weight);\n        thresholder.update(1.0, 1.0);\n        thresholder.update(1.0, 0.5);\n        assertEquals(0, thresholder.longTermDeviation(TransformMethod.NONE, 1));\n        assertEquals(thresholder.getThresholdAndGrade(0, TransformMethod.NONE, 1, 1).weight, 0);\n        assertTrue(thresholder.longTermDeviation(TransformMethod.DIFFERENCE, 1) > 0);\n        assertTrue(thresholder.longTermDeviation(TransformMethod.NORMALIZE_DIFFERENCE, 1) > 0);\n        assertTrue(thresholder.longTermDeviation(TransformMethod.NONE, 2) > 0);\n        assertTrue(thresholder.longTermDeviation(TransformMethod.DIFFERENCE, 2) > 0);\n        assertTrue(thresholder.longTermDeviation(TransformMethod.NORMALIZE_DIFFERENCE, 2) > 0);\n    }\n\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/resources/com/amazon/randomcutforest/parkservices/state/byte_base64_1.txt",
    "content": "CgMyLjETCgMyLjAQ+QMZLUMc6+I2Gj8gHiiAAjAIOCBAIEgBUAFZAAAAAAAAAABgAGgBcAB4AIIBCEZMT0FUXzMyiwEKAzIuMBAgGIE8IAgqCEZMT0FUXzMyMIAQOoBAAAAAAEKcmZpCxgAAQmwAAAAAAABCgkAAQr4AAEJEAAAAAAAAQp1F0ULGAABCSAAAAAAAAEKJF0ZCrgAAQkwAAAAAAABChVVVQsIAAEJEAAAAAAAAQovMzUK6AABCTAAAAAAAAEKYXRdCvgAAQmgAAAAAAABCmszNQsAAAEJsAAAAAAAAQpEcckKwAABCggAAAAAAAEKYccdCxgAAQkwAAAAAAABCoszNQsYAAEJ0AAAAAAAAQo2OOUK4AABCRAAAAAAAAEKZmZpCvgAAQkgAAAAAAABCoGZmQsYAAEJkAAAAAAAAQo1F0UK6AABCWAAAAAAAAEKRAABCwgAAQlQAAAAAAABClcAAQroAAEJwAAAAAAAAQpHMzUK8AABCRAAAAAAAAEKaMzNCxAAAQkQAAAAAAABClmZmQsIAAEJIAAAAAAAAQoxxx0K2AABCUAAAAAAAAEKPAABCuAAAQkwAAAAAAABCmGZmQsQAAEJEAAAAAAAAQpEAAEK8AABCUAAAAAAAAEKEmZpCsgAAQkgAAAAAAABCkgAAQrgAAEJEAAAAAAAAQp9F0ULGAABCWAAAAAAAAEKWMzNCrgAAQogAAAAAAABCnUXRQsIAAEJMAAAAAAAAQpRmZkLCAABCSAAAAAAAAEKoAABCwgAAQoYAAAAAAABCm6LpQsgAAEJkAAAAAAAAQpEXRkK6AABCVAAAAAAAAEKTgABCxgAAQlQAAAAAAABCkZmaQsAAAEJgAAAAAAAAQo+OOULCAABCUAAAAAAAAEKOccdCugAAQkwAAAAAAABCjRdGQrQAAEJEAAAAAAAAQoiAAEK0AABCZAAAAAAAAEKO445CvgAAQlQAAAAAAABCm4AAQr4AAEJ4AAAAAAAAQpZxx0K8AABCZAAAAAAAAEKBZmZCsgAAQlAAAAAAAABCi6LpQrgAAEJYAAAAAAAAQppmZkLAAABCVAAAAAAAAEKbjjlCwAAAQkwAAAAAAABCoqqrQsYAAEJ8AAAAAAAAQpIAAEK4AABCaAAAAAAAAEKMccdCrAAAQlgAAAAAAABCi8ccQq4AAEJcAAAAAAAAQo8cckLAAABCSAAAAAAAAEKgZmZCxAAAQlQAAAAAAABChjjkQsIAAEJEAAAAAAAAQpiLo0K6AABCRAAAAAAAAEP75mZFh7gAQkQAAAAAAABCiWZmQrQAAEJUAAAAAAAAQpnMzULEAABCZAAAAAAAAEKjmZpCwgAAQkgAAAAAAABCiui6QsYAAEJIAAAAAAAAQoGqq0LAAABCSAAAAAAAAEKfF0ZCwgAAQkgAAAAAAABCowAAQsAAAEKKAAAAAAAAQpiqq0LGAABCSAAAAAAAAEKW445CxgAAQkgAAAAAAABCnaLpQsQAAEJEAAAAAAAAQoSZmkKwAABCTAAAAAAAAEKWqqtCxgAAQkwAAAAAAABCl445QsQAAEJYAAAAAAAAQopxx0LAAABCRAAAAAAAAEKWLoxCvgAAQkgAAAAAAABCoQAAQsIAAEJ4AAAAAAAAQpgAAEK4AABCRAAAAAAAAEKZxxxCwgAAQnAAAAAAAABCkmZmQsQAAEJUAAAAAAAAQpt0XULGAABCVAAAAAAAAEKyAABCxgAAQoIAAAAAAABCmHHHQroAAEJQAAAAAAAAQo7oukLCAABCYAAAAAAAAEKjQABCxAAAQkQAAAAAAABCkUAAQsQAAEJYAAAAAAAAQqXMzULCAABCdAAAAAAAAEKNRdFCxgAAQlAAAAAAAABCkOOOQsAAAEJgAAAAAAAAQnhmZkKYAABCSAAAAAAAAEKT0XRCvAAAQmQAAAAAAABCmui6QrwAAEJ0AAAAAAAAQoIAAEKqAABCRAAAAAAAAEKaZmZCwAAAQlwAAAAAAABCmszNQsYAAEJMAAAAAAAAQpRdF0K+AABCTAAAAAAAAEKXzM1CwgAAQnAAAAAAAABCji6MQrwAAEJQAAAAAAAAQoLjjkLGAABCSAAAAAAAAEKSKqtCugAAQlQAAAAAAABCiKqrQrAAAEJEAAAAAAAAQoFmZkK4AABCRAAAAAAAAEKLoulCuAAAQlAAAAAAAABCk8zNQrwAAEJcAAAAAAAAQprMzULGAABCWAAAAAAAAEJ/jjlCtAAAQkgAAAAAAABChRdGQroAAEJEAAAAAAAAQqDMzUK2AABCeAAAAAAAAEKoOORCwgAAQooAAAAAAABCpgAAQsQAAEJIAAAAAAAAQoPRdEK2AABCVAAAAAAAAEKOOORCxgAAQkwAAAAAAABCgzMzQqQAAEJMAAAAAAAAQptmZkK2AABCdAAAQSAAAEICzM1CvgAAAAAAAAAAAABCljMzQrgAAEJQAAAAAAAAQphmZkLAAABCWAAAAAAAAEKAccdCmgAAQlQAAAAAAABCloAAQsQAAEJMAAAAAAAAQpEzM0KuAABCRAAAAAAAAEKUKqtCyAAAQkgAAAAAAABCnczNQsAAAEJoAAAAAAAAQqiZmkK8AABClgAAAAAAAEKdZmZCxgAAQlgAAAAAAABCkAAAQrYAAEJYAAAAAAAAQoI45EK2AABCRAAAAAAAAEKOAABCxAAAQlQAAAAAAABCqHHHQsQAAEKUAAAAAAAAQp1mZkLGAABCeAAAAAAAAEKDZmZCugAAQkwAAAAAAABCjsAAQqQAAEJYAAAAAAAAQpwAAELCAABCUAAAAAAAAEKbzM1CwAAAQlgAAAAAAABCji6MQrYAAEJYAAAAAAAAQpboukK+AABCRAAAAAAAAEKRoulCwAAAQlgAAAAAAABCmTMzQsIAAEJQAAAAAAAAQpuZmkLGAABCYAAAAAAAAEKgmZpCwAAAQmQAAAAAAABCoHHHQsIAAEJoAAAAAAAAQpSZmkK8AABCggAAAAAAAEKkAABCxgAAQkwAAAAAAABCpAAAQsAAAEKCAAAAAAAAQpLMzULCAABCWAAAAAAAAEKoMzNCwAAAQngAAAAAAABChpmaQrwAAEJIAAAAAAAAQozMzUK6AABCRAAAAAAAAEKRmZpCxAAAQkQAAAAAAABCi6qrQsQAAEJMAAAAAAAAQpBxx0LCAABCTAAAAAAAAEKUZmZCxAAAQlQAAAAAAABCioujQq4AAEJIAAAAAAAAQpEzM0K+AABCRAAAAAAAAEKLMzNCvAAAQkgAAAAAAABCigAAQsAAAEJMAAAAAAAAQp7MzULGAABCXAAAAAAAAEKci6NCvAAAQmwAAAAAAABCnxdGQsYAAEJsAAAAAAAAQqLjjkLGAABCWAAAAAAAAEKMgABCrgAAQkQAAAAAAABCnmZmQsAAAEJYAAAAAAAAQpAAAEK+AABCRAAAAAAAAEKEui9CwgAAQkwAAAAAAABCmGZmQsYAAEJYAAAAAAAAQneZmkKuAABCTAAAAAAAAEKP1VVCxAAAQlwAAAAAAABCkC6MQrQAAEJMAAAAAAAAQpoAAEK+AABCRAAAAAAAAEKci6NCwAAAQkgAAAAAAABCnZmaQsYAAEJYAAAAAAAAQpyZmkLCAABCVAAAAAAAAEJ+LoxCwgAAQkgAAAAAAABCk445QsQAAEJIAAAAAAAAQpPMzUKsAABCaAAAAAAAAEKaOORCxgAAQlwAAAAAAABCh8zNQsYAAEJEAAAAAAAAQpXMzULGAABCRAAAAAAAAEKdMzNCwAAAQoAAAAAAAABCgIujQrYAAEJQAAAAAAAAQo+i6ULGAABCRAAAAAAAAEKoXRdCxgAAQngAAAAAAABCirovQsYAAEJIAAAAAAAAQpFVVULEAABCUAAAAAAAAEKhjjlCxAAAQlQAAAAAAABCimZmQsAAAEJUAAAAAAAAQp2ZmkK+AABCTAAAAAAAAEKS6LpCxAAAQkQAAAAAAABCkRxyQsIAAEJkAAAAAAAAQooAAEK8AABCRAAAAAAAAEKQui9CtgAAQkQAAAAAAABCi5maQqwAAEJMAAAAAAAAQpUAAELGAABCTAAAAAAAAEKeOORCxgAAQlQAAAAAAABCli6MQsIAAEJMAAAAAAAAQoMAAEKkAABCUAAAAAAAAEKcqqtCwgAAQmwAAAAAAABCnxdGQsYAAEJcAAAAAAAAQo4AAEKyAABCTAAAAAAAAEKVRdFCuAAAQlAAAAAAAABClui6QsYAAEJkAAAAAAAAQpYAAELCAABCWAAAAAAAAEKIAABCwAAAQlAAAAAAAABClIujQsYAAEJUAABBYAAAQZKqq0KyAAAAAAAAAAAAAEKDKqtCmAAAQkgAAAAAAABCkQAAQsAAAEJEAAAAAAAAQqIAAELAAABCeAAAAAAAAEKiXRdCxgAAQmwAAAAAAABCnaLpQsYAAEJUAAAAAAAAQpCZmkLEAABCUAAAAAAAAEKjzM1CwgAAQlAAAAAAAABClVVVQroAAEJQAAAAAAAAQp7jjkLEAABCTAAAAAAAAEKNZmZCtgAAQmAAAAAAAABCkgAAQsYAAEJIAAAAAAAAQpIujELAAABCRAAAAAAAAEKSccdCugAAQmgAAAAAAABCi8ccQsQAAEJIAAAAAAAAQpq6L0LEAABCYAAAAAAAAEKOmZpCwAAAQlgAAAAAAABCmY45QrwAAEJQAAAAAAAAQqDsT0LIAABCRAAAAAAAAEKVzM1CxAAAQlQAAAAAAABCg445QsIAAEJEAAAAAAAAQprjjkK4AABCeAAAAAAAAEKWLoxCugAAQkQAAAAAAABCkWZmQsQAAEJIAAAAAAAAQqIzM0LCAABCYAAAAAAAAEKjRdFCxAAAQmQAAAAAAABCkC6MQsYAAEJIAAAAAAAAQpmOOUK2AABCWAAAAAAAAEKEZmZCwAAAQkQAAAAAAABCnui6QsQAAEJMAAAAAAAAQphxx0LEAABCTAAAAAAAAEKVMzNCvgAAQkQAAAAAAABCoWZmQroAAEJwAAAAAAAAQpEzM0K4AABCRAAAAAAAAEKRAABCugAAQkQAAAAAAABCnszNQsQAAEJUAAAAAAAAQotmZkLAAABCRAAAAAAAAEKFAABCvgAAQkQAAAAAAABCnY45QsIAAEJkAAAAAAAAQo9VVUK0AABCVAAAAAAAAEKWccdCwAAAQmQAAAAAAABCnzMzQr4AAEJEAAAAAAAAQo+ZmkLEAABCUAAAAAAAAEKeccdCxgAAQkQAAAAAAABCiAAAQqQAAEJEAAAAAAAAQp5mZkLEAABCUAAAAAAAAEKQ6LpCxgAAQlQAAAAAAABCnDjkQsAAAEJoAAAAAAAAQo4AAEK+AABCSAAAAAAAAEKczM1CxgAAQlAAAAAAAABCiAAAQsQAAEJIAAAAAAAAQprMzULEAABCUAAAAAAAAEKCAABCqgAAQkQAAAAAAABCiqqrQsIAAEJIAAAAAAAAQqiZmkK8AABChAAAAAAAAEKGAABCtAAAQkQAAAAAAABCk6LpQroAAEJEAAAAAAAAQpTMzUK+AABCUAAAAAAAAEKMqqtCqgAAQkQAAAAAAABCk8zNQr4AAEJcAAAAAAAAQpwAAELGAABCXAAAAAAAAEKLHHJCvgAAQkQAAAAAAABCoczNQsAAAEKAAAAAAAAAQorMzUKwAABCRAAAAAAAAEKDAABCoAAAQkQAAAAAAABCkjjkQsAAAEJoAAAAAAAAQpRmZkLGAABCRAAAAAAAAEKroulCxgAAQoQAAAAAAABCn8ccQsIAAEJEAAAAAAAAQopdF0LEAABCSAAAAAAAAEKNxxxCtgAAQnAAAAAAAABCoMzNQsYAAEJEAAAAAAAAQqK6L0LGAABCZAAAAAAAAEKSQABCvAAAQmgAAAAAAABCjLovQsYAAEJYAAAAAAAAQpYAAELGAABCYAAAAAAAAEKQAABCuAAAQkwAAAAAAABClwAAQrwAAEJYAAAAAAAAQpyAAELGAABCRAAAAAAAAEKCMzNCwAAAQkQAAAAAAABCpXRdQsQAAEJYAAAAAAAAQqBxx0LGAABCdAAAAAAAAEKUui9CvAAAQlQAAAAAAABCmF0XQsIAAEJEAAAAAAAAQofHHEK6AABCRAAAAAAAAEKVKqtCugAAQlQAAAAAAABCkF0XQrAAAEJMAAAAAAAAQpzjjkLAAABCVAAAAAAAAEKOui9CtAAAQkwAAAAAAABCkIujQsIAAEJQAAAAAAAAQpA45EKwAABCSAAAAAAAAEJ6VVVCnAAAQlAAAAAAAABCigAAQq4AAEJkAAAAAAAAQpCqq0LAAABCTAAAAAAAAEKZzM1CvAAAQnQAAAAAAABCnZmaQr4AAEJEAAAAAAAAQqHMzULGAABCcAAAAAAAAEKh0XRCxAAAQoAAAAAAAABCnOi6QsAAAEJEAAAAAAAAQpK6L0LAAABCSAAAAAAAAEKKccdCsgAAQlQAAAAAAABClyqrQsQAAEJoAABBIAAAQhoAAEK6AAAAAAAAAAAAAEKY445CuAAAQkQAAAAAAABClLovQsAAAEJYAAAAAAAAQpVVVUK6AABCZAAAAAAAAEKdVVVCwgAAQkQAAAAAAABCkLovQsIAAEJEAAAAAAAAQpGOOUK+AABCTAAAAAAAAEKTRdFCuAAAQlQAAAAAAABCnjjkQsIAAEJEAAAAAAAAQplmZkLAAABCaAAAAAAAAEKOAABCxgAAQkgAAAAAAABCoDMzQsIAAEKAAAAAAAAAQpcAAEK0AABCWAAAAAAAAEKRVVVCxAAAQlwAAAAAAABCkWZmQsIAAEJcAAAAAAAAQpccckLGAABCbAAAAAAAAEKVMzNCxgAAQkwAAAAAAABCkui6QsYAAEJMAAAAAAAAQp7jjkK8AABCWAAAAAAAAEKWui9CvAAAQoYAAAAAAABCk2ZmQsYAAEJEAAAAAAAAQppdF0LGAABCUAAAAAAAAEKgmZpCxgAAQkQAAAAAAABCm8ccQr4AAEJMAAAAAAAAQpkcckLGAABCTAAAAAAAAEKWOORCwAAAQkgAAAAAAABClUAAQrIAAEJwAAAAAAAAQp/MzULGAABCaAAAAAAAAEKNVVVCxAAAQlQAAAAAAABCh5maQrwAAEJIAAAAAAAAQpNAAELCAABCWAAAAAAAAEKQAABCugAAQlAAAAAAAABCnTMzQsQAAEJUAAAAAAAAQno45EKgAABCRAAAAAAAAEKZjjlCwgAAQlQAAAAAAABCi445Qp4AAEJsAAAAAAAAQpfMzUK4AABCRAAAAAAAAEKCccdCtAAAQkQAAAAAAABClCqrQrwAAEJEAAAAAAAAQozMzUK8AABCWAAAAAAAAEKOMzNCwgAAQlQAAAAAAABCiNtuQr4AAEJMAAAAAAAAQpSLo0LGAABCSAAAAAAAAEKVRdFCwAAAQkQAAAAAAABCni6MQsYAAEJMAAAAAAAAQpNmZkLGAABCVAAAAAAAAEKfxxxCxgAAQnQAAAAAAABCnDMzQrgAAEJMAAAAAAAAQpszM0LGAABCcAAAAAAAAEKQzM1CsgAAQmAAAAAAAABClwAAQsIAAEJEAAAAAAAAQp0zM0LGAABCdAAAAAAAAEKKqqtCpgAAQkQAAAAAAABClpmaQsAAAEJMAAAAAAAAQpIAAEK2AABCbAAAAAAAAEKUAABCxgAAQkQAAAAAAABCkXRdQrwAAEJQAAAAAAAAQqBxx0LCAABCUAAAAAAAAEKbmZpCxAAAQkwAAAAAAABCmDjkQsIAAEJIAAAAAAAAQquZmkLGAABCggAAAAAAAEKXMzNCvgAAQnAAAAAAAABCmQAAQrgAAEJgAAAAAAAAQpNF0ULEAABCVAAAAAAAAEKQzM1CvAAAQlgAAAAAAABClY45QsAAAEJMAAAAAAAAQpNF0UK6AABCWAAAAAAAAEKRVVVCxAAAQkgAAAAAAABCmQAAQroAAEJYAAAAAAAAQonHHEK0AABCSAAAAAAAAEKVxxxCwAAAQkQAAAAAAABCmMzNQsIAAEJcAAAAAAAAQpQAAELGAABCUAAAAAAAAEKY6LpCxgAAQkQAAAAAAABClwAAQrQAAEJIAAAAAAAAQnbMzULCAABCRAAAAAAAAEKemZpCxgAAQmgAAAAAAABCiaqrQrIAAEJEAAAAAAAAQqAAAEK+AABCcAAAAAAAAEKIzM1CxAAAQlQAAAAAAABCl0XRQsQAAEJEAAAAAAAAQpiLo0LCAABCRAAAAAAAAEKWAABCuAAAQnAAAAAAAABCnZmaQsIAAEJUAAAAAAAAQpszM0K8AABCfAAAAAAAAEKMzM1CwAAAQkQAAAAAAABCjgAAQsQAAEJEAAAAAAAAQo5dF0LCAABCTAAAAAAAAEKT0XRCxgAAQlAAAAAAAABChwAAQrwAAEJEAAAAAAAAQo/MzULAAABCWAAAAAAAAEKYAABCuAAAQkQAAAAAAABCl0XRQrwAAEJUAAAAAAAAQqBxx0LCAABCcAAAAAAAAEKGzM1CvAAAQlQAAAAAAABCjzMzQsIAAEJcAAAAAAAAQplmZkK+AABCVAAAAAAAAEKOZmZCogAAQmwAAAAAAABCfqqrQqAAAEJEAAAAAAAAQrEXRkLGAABCgAAAAAAAAEKGLoxCrAAAQlgAAAAAAABCiAAAQsIAAEJMAAAAAAAAQpdmZkLEAABCSAAAAAAAAEKRzM1CvAAAQlAAAAAAAABCrKqrQsIAAEKCAAAAAAAAQo0qq0KwAABCTAAAAAAAAEKDZmZCpgAAQkwAAAAAAABCi5maQsIAAEJIAAAAAAAAQo/MzULCAABCTAAAAAAAAEKEccdCugAAQkQAAAAAAABCki6MQsYAAEJUAAAAAAAAQqJmZkLEAABCTAAAAAAAAEKeAABCvgAAQlAAAAAAAABCipmaQqwAAEJMAAAAAAAAQo3MzUK4AABCUAAAAAAAAEKIwABCxAAAQlQAAAAAAABCmKqrQsYAAEJYAAAAAAAAQoQ45EKqAABCXAAAAAAAAEKKAABCtAAAQkQAAAAAAABCiszNQqwAAEJEAAAAAAAAQpRmZkK6AABCUAAAAAAAAEKXdF1CvAAAQkQAAAAAAABChxxyQroAAEJkAAAAAAAAQpei6ULEAABCXAAAAAAAAEKT0XRCugAAQkgAAAAAAABClNVVQsIAAEJEAAAAAAAAQo10XULGAABCTAAAAAAAAEKQAABCwAAAQlQAAAAAAABCnpmaQsgAAEJQAAAAAAAAQpkAAELGAABCSAAAAAAAAEKeZmZCxAAAQmwAAAAAAABCjcAAQsYAAEJEAAAAAAAAQpRmZkLEAABCSAAAAAAAAEJ9MzNCqgAAQlAAAAAAAABCjIujQrwAAEJEAAAAAAAAQpPHHEK6AABCSAAAAAAAAEKEOORCsgAAQkQAAAAAAABCiHHHQsYAAEJEAAAAAAAAQoMXRkK0AABCTAAAAAAAAEKFxxxCsAAAQkwAAAAAAABCi2ZmQsQAAEJMAAAAAAAAQoyqq0KuAABCVAAAAAAAAEKUi6NCxgAAQlQAAAAAAABClgAAQsYAAEJMAAAAAAAAQnVVVUKkAABCSAAAAAAAAEKa6LpCvAAAQmAAAAAAAABCh3RdQqQAAEJkAAAAAAAAQoa6L0K2AABCUAAAAAAAAEKFHHJCtAAAQlAAAAAAAABCjcccQsYAAEJIAAAAAAAAQoxdF0LCAABCRAAAAAAAAEKO1VVCxgAAQlAAAAAAAABClDjkQsIAAEJcAAAAAAAAQoo45EKsAABCRAAAAAAAAEKWqqtCuAAAQkwAAAAAAABCjIujQrYAAEJEAAAAAAAAQqcAAELCAABCggAAAAAAAEJ7gABCrAAAQlAAAAAAAABCk8zNQsYAAEJQAAAAAAAAQpQAAELAAABCTAAAAAAAAEKjHHJCvgAAQowAAAAAAABCioujQq4AAEJEAAAAAAAAQpoqq0LGAABCYAAAAAAAAEKMAABCwAAAQkwAAAAAAABChszNQsQAAEJEAAAAAAAAQpNF0ULAAABCYAAAAAAAAEKkAABCxgAAQlAAAAAAAABCkgAAQr4AAEJEAAAAAAAAQo1mZkK6AABCaAAAAAAAAEKJZmZCqAAAQkwAAAAAAABCkszNQrQAAEJQAAAAAAAAQpRxx0LGAABCSAAAAAAAAEKdzM1CvgAAQlQAAAAAAABCnoujQsYAAEJIAAAAAAAAQpI45EK8AABCXAAAAAAAAEKZMzNCxgAAQmwAAAAAAABCi1VVQqYAAEJIAAAAAAAAQofMzULAAABCWAAAAAAAAEKSmZpCvgAAQlQAAAAAAABCljjkQrgAAEJgAAAAAAAAQpUzM0LAAABCVAAAAAAAAEKVdF1CxgAAQmAAAAAAAABCki6MQsAAAEJQAAAAAAAAQpqLo0LIAABCZAAAAAAAAEJ8AABCugAAQkQAAAAAAABCgTMzQqYAAEJMAAAAAAAAQoJmZkKsAABCRAAAAAAAAEKeAABCvAAAQnwAAAAAAABCldF0QsQAAEJYAAAAAAAAQp10XULCAABCaAAAAAAAAEKei6NCwgAAQnQAAAAAAABCixdGQrAAAEJMAAAAAAAAQpIzM0K4AABCZAAAAAAAAEKQZmZCvAAAQkQAAAAAAABCjZmaQsAAAEJsAAAAAAAAQpmOOUK8AABCfAAAAAAAAEKaAABCwgAAQlgAAAAAAABCkIAAQsIAAEJsAAAAAAAAQpjMzULCAABCVAAAAAAAAEKkgABCxgAAQlgAAAAAAABCl2ZmQrwAAEJYAAAAAAAAQo/HHELAAABCYAAAAAAAAEKGZmZCsgAAQkQAAAAAAABCigAAQsAAAEJIAAAAAAAAQpd0XUK6AABCVAAAAAAAAEKiMzNCxAAAQlgAAAAAAABCkcAAQrgAAEJUAAAAAAAAQoCZmkKoAABCSAAAAAAAAEKpzM1CwgAAQlwAAAAAAABCkDjkQrgAAEJMAAAAAAAAQoy6L0KoAABCTAAAAAAAAEKrVVVCwAAAQoIAAAAAAABCl8AAQsIAAEJUAABAAUsITBAEEBYQ+QMQ38OA6QEQ+qGQqwIQ5J3tlQIQ4tjQvgEQrOm2uwEQ/bqe6wEQ4fuQpgEQkNbdvgEQ0cK8YRCYwO7sARCzgYvFAhCmpuiAAhD/qcTwARCyi4DtARD22/DtARDT3tyaAhDE2MvXARDtt+HzARCi0Z3AARCct7/qARDZi9OGAhCJ89TXARCYwdrdAhD+0rHyARDfjf/7ARCSucfWARCi/IqnARCmn5XWARDUuc7eARDH+PSsARDWmrjRARD1ptV2EP/uy8wBEM2W+akCEMHqhK0BEMmOhtcCEOTCuWgQiufwfRDVur2rAhC/x/7tARDh05rmARDqgrTwARC9i6vvARCVmMesAhCLobj+ARCWvZCJAhD6l6CfAhCnjP7tARDw+NGTAhC8xMqkARCChp+nARC1ksSYAhCwzOvXARClpZvcARCkrv3uARDFi+bXARCEprN5EMD+t/4BEP7dnMIBEN/+w8QBEID6kvABENy74tgBEIKx7+wBELKtx9YBENqL34ACEIzyldkBEM/fotwBEPeS5NcBEIPB2JEBEJ/ahKwBEObc1f8BEJSW0tkBEApMUABbCIACEAAQ4A8Q+QMQhD8QlL0BEKS7AhC0uQMQxLcEENS1BRDkswYQ9LEHEISwCBCUrgkQpKwKELSqCxDEqAwQ1KYNEOSkDhD0og8QhKEQEJSfERCknRIQtJsTEMSZFBDUlxUQ5JUWEPSTFxCEkhgQlJAZEKSOGhC0jBsQxIocENSIHRDkhh4Q9IQfEISDIBCUgSEQpP8hELT9IhDE+yMQ1PkkEOT3JRD09SYQhPQnEJTyKBCk8CkQtO4qEMTsKxDU6iwQ5OgtEPTmLhCE5S8QlOMwEKThMRC03zIQxN0zENTbNBDk2TUQ9Nc2EITWNxCU1DgQpNI5ELTQOhDEzjsQ1Mw8EOTKPRD0yD4QhMc/EJTFQBCkw0EQtMFCEMS/QxDUvUQQ5LtFEPS5RhCEuEcQlLZIEKS0SRC0skoQxLBLENSuTBDkrE0Q9KpOEISpTxCUp1AQpKVRELSjUhDEoVMQ1J9UEOSdVRD0m1YQhJpXEJSYWBCkllkQtJRaEMSSWxDUkFwQ5I5dEPSMXhCEi18QlIlgEKSHYRC0hWIQxINjENSBZBDk/2QQ9P1lEIT8ZhCU+mcQpPhoELT2aRDE9GoQ1PJrEOTwbBD07m0QhO1uEJTrbxCk6XAQtOdxEMTlchDU43MQ5OF0EPTfdRCE3nYQlNx3EKTaeBC02HkQxNZ6ENTUexDk0nwQ9NB9EITPfhCUzX8QpMuAARC0yYEBEMTHggEQ1MWDARDkw4QBEPTBhQEQhMCGARCUvocBEKS8iAEQtLqJARDEuIoBENS2iwEQ5LSMARD0so0BEISxjgEQlK+PARCkrZABELSrkQEQxKmSARDUp5MBEOSllAEQ9KOVARCEopYBEJSglwEQpJ6YARC0nJkBEMSamgEQ1JibARDklpwBEPSUnQEQhJOeARCUkZ8BEKSPoAEQtI2hARDEi6IBENSJowEQ5IekARD0haUBEISEpgEQlIKnARCkgKgBELT+qAEQxPypARDU+qoBEOT4qwEQ9PasARCE9a0BEJTzrgEQpPGvARC077ABEMTtsQEQ1OuyARDk6bMBEPTntAEQhOa1ARCU5LYBEKTitwEQtOC4ARDE3rkBENTcugEQ5Nq7ARD02LwBEITXvQEQlNW+ARCk078BELTRwAEQxM/BARDUzcIBEOTLwwEQ9MnEARCEyMUBEJTGxgEQpMTHARC0wsgBEMTAyQEQ1L7KARDkvMsBEPS6zAEQhLnNARCUt84BEKS1zwEQtLPQARDEsdEBENSv0gEQ5K3TARD0q9QBEISq1QEQlKjWARCkptcBELSk2AEQxKLZARDUoNoBEOSe2wEQ9JzcARCEm90BEJSZ3gEQpJffARC0leABEMST4QEQ1JHiARDkj+MBEPSN5AEQhIzlARCUiuYBEKSI5wEQtIboARDEhOkBENSC6gEQ5IDrARD0/usBEIT97AEQlPvtARCk+e4BELT37wEQxPXwARDU8/EBEOTx8gEQ9O/zARCE7vQBEJTs9QEQpOr2ARC06PcBEOAPXGgAcACAAfkDiAEAkAEBmAGABKABgASMAZMBCgMyLjATCOIBFXFMEb8VhQEUvxUAEBe/FRO9Hb8Vjec/vxVoex2/FZ7gHb8V8GosvxWTBh+/FYvYRL8VgAFMvxVe9iK/FZ44Hr8V38pJvxWt3im/FbzXcL8VtTmJvxUgySG/FeS6IL8VQ4pWvxXm3nW/Ffx7Xb8VJH9MvxX9Iii/FRacLr8ViTFxvxWgwWG/FUNhSr8V0eBhvxXPpF+/FWN4Wr8VIPdzvxVSP8O/FSlCmb8VS7anvxUXYTy/FV9cUL8VEqYzvxV+gYW/FbOfi78VvI5kvxWPn6e/FaHOe78VBt9fvxX5yHe/Fb0Tmr8ViFyIvxVtoje/FftbMr8VfWhzvxUn+kW/FTLDkr8VVqqsvxW7TZW/FWXCib8VDBVPvxWVm/m/Fc7+lL8VDVAPwBXg6qi/FThSgL8VXj+GvxXuWpy/FXV3H8AVsASJvxXDzfW/FTXD0r8VC+nLvxXqgQfAFfA1ur8Vz0MYwBWq2Zu/FTKzv78V9ax5vxVESnq/FWwnNb8VO1T8vxWOO/+/FSwLtb8Vy5HkvxUIbu2/FegBsL8VqUyXvxVF5RTAFbbFX8AVnE78vxWqs6W/FftAxr8VXiKavxVp/bu/FRKHu78VNVkQwBUP+Ka/FSK5l78VGXXdvxW6856/FRSMkL8V5g42vxUJg0y/FWRrpL8VD32NvxUlTJ6/FcXgbb8VvjWVvxVV/SrAFXU6378Vjyr/vxWv95y/FV834r8Vx1cWwBXoTYy/Fd8rZb8V+CyDvxWdHJ3AFUfnhMAVEL4JwBWV7eG/FV09HMAVoeIvwBXUUkTAFYmKR8AV+IHpvxXLSfK/FZ90TcAVivzzvxUl5p6/FVJBob8VlX+GwBV/Oy7AFUYyE8AVtaGrvxWD9eDAFVX1mcAVrtpUwBVizQDAFZmwTMAVvzM1wBXhIyLAFY2wFcAVNG88wBWoT/K/FWQJGcAVG04gwBVgRI3AFdlio78Vi+l5wBXtSk/AFZ6Jwr8VCDSjvxWZNuu/FdZ8asAVCnfJvxVDRpi/FbCLA8AV3ShcwBW6m2jAFVuGh8AV1V2KwBXo6tS/FUWmQ8AVAIUFwBWJXUrAFZFzesAVrYMCwBWjRU7AFa5b5L8Vt7v9vxXLuSrAFdjNN8AVwlGUwBWLkX7AFQNpEcAVaSdAwBUddEDAFV0WrL8VMIWiwBV1ugnAFUWl7L8VRKT2vxV47gvAFbtqhsAVmT/SvxX+2MG/FdLaoMAVm34kwBXrcjfAFU/MAMAVlOCZvxURRVvAFc7b5L8V31dZwBXrPYjAFV9aEMAV0yEmwBVd5EPAFRBG8L8VeRFqvxXmTIW/FUEO278V9bewvxW9OsC/FXUrtr8VEfoHwBULGLq/FYv/qb8V8IM8wBWIz8S/FR6CJ8AVrWisvxUuITvAFTu6Z8AVMfIYwBVvipjAFeMTEMAVC8hfwBWTxRTAFZ35n78VA1ogwBVdaBfAFfyRg8AVw8MYwBUNGifAFb1rw8AV/K+QwBW8DB3AFRVUu78UGwhPEAQQ+AMQ4gEQi8T4EBDJxuw1EM6WyxcQhJ7cKRD82PcGEPyWpxYQwsjOHBDi3ZYzEKzFly8QmrrXChDu3dIHEJWmsC0QmZLWGRDW1Z40EPnZjiIQreSvJhD20bcrEKLb+TUQ2KmcBhCN1MoSEP/Y4DgQruwxEIrezhQQw86zCBDLg4YBENbIsQkQ9YjBJRC43r00EKf7/C4Qi7ymCxCEm4MkEOvWtAwQ68elKBCX9pMqEIeL2y0Q0ayHMhDo8qQ3ELCxrg4Qx/qIBBDC1IUQEM2YggYQkqrgERDmnaIDEO3ZlxQQ6PKfGBDoqfEUEMXn1wsQsOLJARCpzrAjEOb94ioQ7L3kChDcicIMELTnozUQk5LwCRDZm9AbEOit6TsQgtbNBBDQ4YMgELj27R8QnKDtBBD7+7cLEMPq/zkQ9OH8NBDq8nQQorrXJhCTgYM0ELKvuDcQ+ecNEJ/DzioQ7v+aLRDY64wBEIy87C8Q7d/sMhCgvvU5EMWnizkQkgMcIAAw4gE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaPfs/5Sx3ey8VpQBkwEKAzIuMBMI4QEVDS0kvxWylCy/FXj1J78VcQUuvxUEmj+/FfVbKr8VWPQrvxUIAzm/FeHuL78VOkZGvxUIrUS/FQOoMb8VqUA5vxXXBEa/FTUFLL8VvCxNvxWx0lO/FaWwNr8VzsBFvxWXXmu/FVkWS78VrBZivxV2fVG/FRjeNr8VExFyvxUhHkm/FcR2Vr8VruNKvxU7moK/FbNmeb8Vh71OvxVUgXi/FQf8Zr8VJT1mvxVdH8G/FaRgib8V+cZYvxX1/VS/FRF9or8VsD15vxX73Ia/FU/xWL8VgkJ/vxXUTm2/FS+9bb8VW/SBvxVhnnC/FX7BWL8V3nE6vxVFSI+/FZBek78VBOJ8vxVfWIe/FT7JcL8VGfh1vxWxhG+/FTimAsAVGUunvxVeVbO/FYrhir8VNO2VvxXSdmG/FS6j0L8VZ9LyvxW1zpK/FVOOub8VmpagvxVKapG/FfhNqb8V8R8owBWebMS/FUjEmr8V9I+5vxWTMGy/FV5ixb8V/5y6vxVWuue/FcV95L8VVe61vxU1MQHAFa13lL8VpH2fvxWkk+y/FRDOz78Vw6+KvxXMN4m/Feafxr8Vb0OFvxUJQf6/FTP8gb8V8giFvxV+nZC/FQ+4lL8VLZ7xvxWgxMG/FVPih78Vq6GGvxWlKIC/FcUreL8VaKe/vxXldMe/FQlKqb8V90P6vxXSipG/Fd0Qlr8VeSKkvxXgHLC/Fcq3er8VLiGNvxU/hoe/FfRVsL8VYWdzvxXjfWHAFWVvCcAVPT9mwBV3XUzAFf0+rb8V01M0wBXethTAFUBCN8AV5gm8vxVPoay/FXcW8b8VvodxwBVnlbS/FUGJAsAVCrbRvxXP5H/AFRmNGcAVQfSavxVgjae/FRTuwL8VjDwGwBVBT4HAFYkZD8AVc9vhvxV43RbAFUUqzb8VXMytvxVgcmHAFT0VTcAV4pLHvxV4jirAFQ8a4L8VhbHBvxXabJ/AFfPY378VaSvWvxUPAkPAFXQdzr8V17J1wBWv8QbAFXD9z78Vu3ACwBWDqXPAFWV//78Vb8KvwBV7gh7AFdE0K8AVpkoYwBXWxiXAFVnlr78Vls+XvxUquSHAFc8t8b8VdvmhwBX6wi7AFae02r8VHCYMwBVt7w3AFaj/CcAVl0zavxVstJ6/FWe7M8AVJkGbwBXjn96/FY24j78VxrlawBUljRbAFSs2778VoSC3vxUA9Lu/FXFB0b8VG9vpvxUl+RjAFYTmMcAVImeJwBVOV3DAFdFJGMAVhsvKvxWehj/AFRH7GcAVyQfBvxU8DQrAFdxis78VhxTHvxVqKoO/FdjPJcAVAXOLvxUvdD3AFYk4IsAVeyjcvxW1MBfAFZlEGcAVsAo8wBUGXoLAFdXaZcAVH4A4wBWcf5vAFTtmAsAVY8NAwBU50kDAFUtkWcAVmcBZwBVeASPAFdCPg78VVnqAvxX515nAFT75u78V24fsvxUgstG/FRQpG8AVkkCyvxX5ghvAFbrT9L8UGwhOEAIQ+AMQ4QEQ3/KfNRCDoq44EP7/yhYQ15yyJxDWp60HEMnVhAkQ0M/zHRCe294CEMiOuDEQ48O5KhDXu8oUENXh2ysQxMa8IxDD9/sQEKafiQUQzq33IhCqob0sENeBwQkQ0OmUEBDOxow5EMnwtAMQ5o+ACBD4i+UpEL+SxTcQ4sTlIhDYwQYQgvD+GxC7lpECENqrswsQsJPZHxDMmuEgEKibjg0QwcrrJRDKnNErELq99TEQsIjfBhDr64YmEI3q5AYQtL/GLBCFsJwHEMu33hEQypv4KxDo4YEwEI6j/xMQ9IDMGBCB/+sJEPPQ2CQQ2NpqELut3Q8Qzf2PCRDh3PskEJbZuA0QlvLSGxCD/Z8KEJ/n6CoQoIakLRC6o5weEJCi7SMQ8ceqDRDFpeI3ELn2wyoQ8JSZDBD7xZsOEIGztCIQydr/IhD/4sInEOjf1jsQj6X5KhDcvf8tEO31vzMQuZ2UMBCV6aYBEJ/6xTUQ6MHZOBCPocg8HCAAMOEBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWihj626r73n8l+UAZMBCgMyLjATCNsBFWXZIr8Vy40kvxUKWCa/FU4jNb8VOeUqvxWbTCq/FcZGLb8VZw49vxUfml2/FbdtVL8VbXw0vxUTUDK/FQp1M78V2pdWvxULPoW/FesXPr8VIPxxvxXc22K/FWiojL8VaaBcvxVSgWa/FZrhVr8VdZhCvxWGx2C/FSBXNL8VC+c+vxV3sVC/FTvki78Vyb2DvxW8ZrC/FXEEuL8VIhGovxV68oO/FXnRdr8Vz+OOvxU5vWW/FX/Wi78Vnty2vxVCmZ2/FbI5gb8VLFNnvxXPSne/FUeCfL8VjlVgvxXvYJC/Ffgbr78VlExQvxX1lqe/FWI3bb8V+8Q9vxVfYmW/FTrxYb8VoFBMvxVfqWa/FYyger8V4E67vxUapsW/FTPVhL8VeFOGvxUVzwnAFTvZwb8VppG/vxW7C9K/FbYJ/r8V/vzsvxVTqaG/FbK3GcAVxvTqvxWGjtK/FXmTrb8Vpq+jvxV9crS/Fa2vj78VfvP5vxUo89O/FTPnAMAVU5YvwBUVtbq/FZrOx78VX2YVwBUw5YS/FfN7tr8VqEaLvxVgFKC/FafLEMAVFuaJvxWUQIi/Ffo5fb8VFhN1vxValSHAFRg2mL8VgJa+vxXJjQ7AFf3fwL8VEKCYvxUo4uO/FW6N7L8VWvGavxXFdIu/FfWIWL8VJg4+vxVfPY2/FeuRdL8V1SW3vxVl2bS/FWAPm78Vwd5TvxUTDW2/FRYtar8VxWmBvxXiym7AFRggIsAVLQzrvxUn1STAFbpz+b8V84idvxXQ4Ia/FQEs/r8VL3kPwBW8IFTAFerWdcAVv+A1wBX9/hnAFZtg3b8VtbcJwBW1y+W/FbpuAsAV1uWCwBX1qDHAFWX2AsAVI6oxwBWo8HTAFYBUUcAVJrJbwBWCkiLAFd6h8b8VfThbwBUnC/G/FZEg878VyLPOvxVvb63AFe85qL8V45yvvxUaGCzAFbN9JcAVnX0QwBXs05O/Fflko8AVsTL6vxXoutjAFR7tcMAVCO5JwBWHc5zAFfFIl8AV/sOSwBVRstu/Ffg8acAVkLzdvxUSBwzAFST2k8AVRx40wBW+05+/Fdw8CsAVMiLivxU6s76/FVgnqb8VCOOEwBUi4rS/Ffgwm8AVcJkywBVS9ljAFZH1E8AV+i+UvxU12de/FeFqScAVFBMmwBU1yZi/Ffs6pcAVHlWIvxVLrE3AFa/AK8AVIyRQwBWU0KC/FS8RF8AVz0w3wBXJUj/AFe7ngMAVI170vxVmCAfAFQs51r8VY+e8vxXjJ/6/FWNqO8AVgxlBwBWvGQLAFcoMbsAVuWzLvxX3vmLAFZLuLsAVI0SrvxXU6HC/FYT3KMAVT3ZpvxUbmwHAFSqzSsAVoA+MvxWcGkXAFcF1f8AVggMxwBVga/2/FXsBwb8VuSdfwBWN1RjAFWksRsAVf+QzwBUsyAnAFQhD578VxaLBvxUXVY+/FBsITBABEPcDENsBEJO1rAMQwLyoKRCylac5EJOE3ysQrpKhCBD6zOwtEKWjozIQqI+aERCQsdI8EJXZ1hwQ862kFBCn9oMoEMDsuBoQouX9BRCe9voJEPip4CYQxKq1NRD9lNYPEP7m4RAQjaDnLhDl6togEJzv4wMQxrKXFhDq93IQ4rK6NRD+gaU7EO6ayywQpNerHhDNjOYREL28wyIQh9/UJBDRsJMmEK7gkDQQzYmtMBCzyo43EMnd3zoQ+dSTEBDUjZwCEPrjjhEQhbH3ERC+mK0SEPDcojEQgqOYExCQ2JYIEPqvhR4QoOf8AxCv5qcgENyCkBgQ4bzgLRCpspIZEN3euygQ7o/IDRDfp8McEMCiriAQxZW5HRDqmPgEELyjvR8Q4OuTIBDL2cIrEIuSizQQ79+8IxD4xIcnEN3YnSUQmaHLOhDsnNkyELTs0CsQn/vcLBC1qfowELz76zEQm96xNBC1/N83EO3PnTsQ0oXIDxwgADDbATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAForv2mzuTzgu2OAZQBkwEKAzIuMBMI5gEVAIf6vhX6lf6+FT7x+r4VbY4fvxX41xq/FXR6AL8VZYv8vhU8Vy6/FeEVIL8VpIwcvxXFIB6/FfvRF78VJ9kTvxUC5ga/FfDrKr8V1384vxVTXTa/FULSLb8Vu+0+vxWxMk2/FQYHXb8VY34ovxUtdyW/FWEaIb8VDIUlvxUJiSW/Fc6kIb8VJoYHvxWwLoS/Fd4VS78V8PiJvxVGMEe/FVZNgL8V5+M9vxW2Y3u/Fbvtf78VkBV0vxUkTm2/FR3GRr8VhKVivxWUWoq/FS5Zab8VhUGOvxWR4KS/FVm7Mr8VvEGGvxXS61K/FTDQRL8V6OlivxVoyS2/Fbp+SL8Vsg89vxVlFEG/FeZ/L78VIwQvvxXcXJS/FVTrHL8VOSirvxXJ/Za/FXm7ab8VfuTlvxVx442/FV1bAsAViYiMvxXmpMe/FW3ZF8AVuJnTvxVYaEe/FQU3f78VOxSpvxUIs56/FZUSjL8V1KHtvxW4ppq/FWmgAcAV1EqTvxXd/Yq/Fbzheb8V+xrWvxXyiq+/Fcmllr8V0fGlvxVKebC/FU/pib8V2TehvxUKTMC/FVdW4L8VtL4DwBWeGK6/FcPdmb8VuAhEvxWbYSPAFZccjr8V3hx6vxUxu1u/FW+GiL8V1ad3vxXEwYu/FZelB8AVKiA0vxV/BIq/Fa1Gpr8V4TjivxXf6K6/FV39jL8VxXdcvxW7k4u/FfQhU78VPeWEvxWDkDO/FfjOkr8VsX3KvxWW9wTAFRdMVb8VHuk9vxX2LgDAFU5lw78VMbwXwBUbWtK/FafTC8AVSFkNwBVL7kTAFV+HksAVp7j/vxVnnwPAFa8TZ8AVRSFMwBWdrNm/FR0wn78V7JLdvxU61FbAFbpFK8AVeAc4wBWJfxLAFcjRBcAVFTaovxUQXve/FXSWhMAVBqjrvxW6gNe/FYZHvb8VcJnGvxWsjMW/FVOnX8AVzn48wBX0YT7AFf7LdMAVbMuWwBUzWjjAFberCcAVoq0QwBV3Kfq/FUVX3r8VYsU2wBXNyAHAFSSotL8VfMHWvxUybmjAFa44PcAV8U7KvxXY3AXAFVo98L8VuKW4vxUuKNS/FZk6tsAVT4oCwBXe0Pe/FR7XPsAVsUCuvxVlgOC/FZn8psAVeY+swBUDfWzAFc8e6cAVLv0CwBVvlJTAFcBkZcAVEaYVwBXs4tS/FUXQL8AVVRDMvxVH7Z2/FVTaw78Vye5NwBWS4yvAFQ1Axb8VMXQOwBWYxZm/FfYZpb8VNUeawBVlvqHAFRFW6L8V8mauvxX1G1vAFS9MA8AVG+GVvxXf0va/FTJAEsAVpyoVwBVESxzAFR9T/b8Vhc/bvxWUhRPAFQuzs78VyTE0wBWFvkXAFfAdZcAV0CgQwBUYmPe/Fcn+3cAVkGI+wBXCHg3AFUdndL8VMxghwBWv/b2/FWBrDsAVLgyzvxVFPSDAFWm2SsAV5KlOvxVUhL6/FQyc4r8VBJoRwBX9vvq/FXVvI8AVQPQewBXWb2TAFYz0zb8V1bmRvxVo0EO/FBsIUBACEPYDEOYBEJzOizUQl7jpKBCs7+cbEJH0vikQiPr5ERC7q44EEJaElh8QhLnQIBDj4vQ2EPXolAsQ3sL6BxCOvcw2EIeIihkQoJn3HRDui8cKEKiw2gUQr8HkCxCEx90wEOXdoTsQuau5HhCj3ucREL6CswUQt7yyCBCg2JccEMCv3RkQ0OOnGRCC5dMOEK3LrigQ9qChHxC78c4hEKDp1gsQkankIBDm+vUOEJ7N8ysQ7vWcARDRzakGEKKeszcQz8OfOBDP24gQEP7iuQEQ7s7EJxCKgJIJEMT32iIQypP8EhCErLEUENb9ky4Qz9HvAxDO7PQTEPSzvSUQypvmFxDCrLYYEMLZtx8QwuG2HBC7m4MKEN3+nzIQuL/hHBDZwqAeEJXh3BUQ8tyZJxDW6J8FELmdgwEQ/KCGIxCCqrQ2EPmNpAwQitH7JRDgrss7EKTIkA0QjcKRGxC5/p0rEKDf7CwQsMeBLxC/naowEOOtyDMQhqzzIBDA55U6EOvYrDkQ3/0OHCAAMOYBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWiRhNfF1KqXkjiUAZMBCgMyLjATCOUBFX20774VRMz1vhWf7P6+FVbSAb8VTAsKvxWqX/++FXtwBL8VQzANvxVCBhG/FVnTDL8VqWEOvxXzFg6/FXHAEL8VOB4NvxVZ8R6/FdiKGb8VplA2vxV4Vha/Ff31Eb8VR9YZvxXjhxe/FUuWE78V5ngZvxVl2Fe/FSxIOL8Vf6UcvxWokBu/FSC9Hb8VHQphvxWeqmG/FVT0nL8VcPwgvxXD2We/FS6mPb8VAIVJvxWWLi+/FcTEGL8VnlJ6vxXMoXa/Fby2lr8VXmkjvxUVwKe/FY47HL8VAt01vxXX3Gu/FQEZUb8VdlcovxUUj1q/FZ7Zgb8VEiGOvxXRWjy/Fbo7KL8VrXpAvxUEySi/FRrzI78VYsspvxW5roy/FeX7jr8V2LDIvxXEZq6/FTTzir8VaVbFvxXRlcW/FXy9dL8VFmyUvxWPq46/Fa38f78VeQt9vxWs/ri/FYvml78VMPlxvxUvLJe/Feon078VDQepvxUUm1+/FaMlnr8Vw8ufvxWM8Ym/FcAO6b8VvYUAwBWrgwnAFWht6b8VRsqZvxX3qa2/FaOC2r8VpyusvxX21ZW/Fd5Ztr8VzuamvxUBQoy/FTXIbL8V6q+tvxUqKlm/FYfgLL8VpHpYvxWfto6/Ff6g0r8VSwOcvxXRcby/FRAC6r8Vg0fFvxWS61i/FeIpbb8V9CVcvxVNrYy/FbmWVr8VbgS4vxV1vGC/FSnRKr8V9uCMvxXCVkC/FQgMPL8VRykqvxUWmbG/FS4kH8AVjNQ/wBX2mxPAFU8eK8AVOM5UwBXuEAjAFWM+DMAV9lnvvxU3OuC/FR8rz78VOtrPvxWQgxHAFYZmrMAVSfSawBXbO0nAFVtHWsAV3CnyvxUd/by/FeLZkb8VDdePwBUX2oO/FaNciMAVfOmmvxUSutW/FXEvFcAVwnL9vxW/QSDAFatjeL8VfIDPvxVk4xnAFcHdKcAVjljrvxXAt3nAFROq+b8VXfREwBWuZZu/FV3VhL8VwksrwBVaOgLAFQOxAcAVSfayvxUPa5S/FQcKAsAVG3HzvxVSK/S/FX7iRcAVuUSZwBW4+RrAFZ9xJsAV0p9TwBWn6VPAFaKZxb8VPGaNwBVDWdy/FawOCsAV2YM3wBVCsOK/FRhgTsAVeEv5vxXvZhrAFZSDDMAVpWAmwBWD+r2/FasEIsAVS50pwBWube2/FUG1JMAVlMK9vxV26iTAFZAHEMAVKBWuvxXvLiHAFWRhh78VThR4wBXKKi7AFc/kdb8VCOTBvxUUsTfAFX+I7L8VyZ1VwBWRIvy/FZwC9L8VSa/CvxVHtnDAFaK90L8VXAACwBUO14TAFczIz78VQnb5vxWl1c3AFRUVkL8VtRiovxUsYiXAFUBqi8AVawWvvxXfqJu/FT3WOMAVmBCqvxU+OHC/FbXfUcAVr56PwBVPeHS/FcetnL8Vgt+BvxV4zwrAFbzVwb8Vax2NvxXZnRrAFVIwIsAVpCC8vxVa0Ve/Feca+r8VnZOPvxWEgGTAFRoI7r8UGwhQEAAQ9wMQ5QEQ5qGzLhCMwZYrEMrG7hwQlZ6PDBCi/McJEJOFtRUQ0qCTHBCY4PkoELqIzTYQ57C7DxDXlbwSEODEwhUQlMzGCxCxz7YbENST/CkQh5jqJhCwpbEtEJKzqTQQyOO0DhD+x6IPENzjyhAQvrbrERCJ3MEcELDmuCUQ+ovGLBCIjZcXEO7F3gkQ1Y+BPBDUw5klEPCEoywQxO6EKxDY5P0lEKTR2CkQs9/jLBC5vKk3EPGV9zEQha2FNRDSlfMGEOmPphIQ5c3CAxDTgckVEM741w0QsYv/NxDWvMwREOiy9Q0Q5dSTCBDyt/wJEMC0vgIQg9nqOxCPuaQrEP3nmwIQm+CbDhCZtrsKELfOjyIQ1YuWGhDK3bMwEND2qioQ2rbKHhCoqOIeEIGPqzYQrOyWNxC2iO0iEKCXzSMQr+reJRC4j4goELq0uSMQjouYKRCUjucMEIi9/y0QkuXFARDOxYsvEOTkwjIQlqrlNBCJidY4EIXA3joQmOfmPBD3AxwgADDlATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFo8uHpk9uPj/+dAZQBkwEKAzIuMBMI5AEV1XEIvxWzegm/FQJ0CL8VjkAWvxW65Qm/FYLFCL8VV78LvxUoTh2/FWaqKr8VqDsPvxWVkjm/FT2PD78VHBAMvxVekBK/FVVgD78VeOQdvxXUix2/FTwvUb8VKpAvvxUUwBO/FertHL8V+2xGvxWRNU6/FUWOJL8VaaosvxUhwS2/FWpFGL8VDGAfvxW6wT2/FRSWI78VswEivxXwE0e/FXz1Q78VWtc9vxUYFVS/FQDWVr8Ve6SFvxVvpDu/FftSf78VyI5BvxXQ6UK/FeKKW78Vn00fvxVBxUe/FWplYr8V9MhyvxUoxmO/FR0dWL8VEwZDvxXkJ8u/FXYvfr8VPWRBvxVFREy/FcMsPL8VNxYfvxXxYUu/FbxOSr8VX6lVvxXzu7+/FZiZR78VisuEvxXefk+/FS/L778V0RhhvxUmjmq/FXjzk78VixSgvxVDI5O/FW67wr8V6EMQwBXzl5i/FUvTrL8VpNt7vxVPn4e/FUhbrL8VTfN5vxXgHom/FYOPu78VBIekvxWmWgXAFcnrcL8Vu2h2vxXaS1G/FRFMsb8VJO2WvxVsxiu/FZhnQ78VjKaWvxVwab+/Fffkdb8VEP2EvxVULZq/FWfe6r8V7iG7vxURcqa/FSObc78VkreAvxUAWAHAFbGxn78V6lAAwBWk1Oq/FZXtkr8VqAfuvxXFM3y/FV+Yfb8V571yvxXDt1q/FTNGdb8VTedwvxX6FC2/FX+Tc78VeHSMvxVUIGO/FTk0Tb8V2aWZvxVJO4m/FUmuXL8Vfx/QvxV1eXfAFWuvbr8V4v9hvxVDN7+/FWRFwb8VtQ8mwBWE81bAFSJLl8AVXJcJwBUKlGK/FfyZfL8Vx7KnwBV0UMu/FQxGl78V5kaZvxXQ9d+/FeMGgMAVWAK9vxWL67C/FSsT8L8VLG3SvxUetrbAFfpiRMAVN1kcwBUIc+O/FTYDO8AVxRvcvxUsk5m/FY5/o78VkbNKwBWAGru/FaUY5b8VkUlOwBUlRZG/FWKArr8VB1bsvxUaI5TAFZ7n178V+9DuvxWQlsu/FXd/DMAVWtREwBW7xyXAFY4mwL8VqpXWvxWreg/AFSNQoL8V2MUFwBUtIcq/Fd/Nu78VocC1vxXO76K/FX8F078V2YfFvxVxdm/AFdayisAVazJHwBUdpLG/FbBx578V7XQbwBVmwQLAFTJdxb8V5r/PvxXzIgLAFRhEyb8VzOs3wBUcKLu/FWoZIMAVwlF8wBVOT8O/FduK7L8VrWgPwBUnJ/O/FVcvor8VYkyOvxV6IWDAFfymCsAVGWQDwBXOG0LAFe3M778V0fb0vxU17bfAFYy20cAVFXbtvxWUOg7AFcbDT8AV5hoRwBW6v03AFfnM778Vw32WvxWJWp+/Ffcjo8AVHRqOvxVwoGfAFZSlk78V8O4GwBWeTnW/FRsygsAVcPmcvxVNB1XAFdWK0r8VFcaLvxWhBSXAFYLVXsAVMr6fvxVNokjAFTUgmb8VoPDQvxWRIQfAFX8iW78UGwhPEAAQ9wMQ5AEQquusLBDvhLkPENCL9QIQ57WPDhCa590XELrHgAkQwf7XBxDoh7ApEIul+zYQiZe/EBDdy7AcEI6C4AgQ1azFGxD00M4SEMvN4SEQ9+/VDBDZzvMsEIPmljQQrZ/6PBCLwpQHENyE7DQQzMH+KBCt3/ATEP/k1yIQ876dHBDOtq43EKiS6BsQ+4XXHRCY9tQeEPDO3DkQ8dDuCxD++4A1EMTxkCsQuva/LBDw8bs8EJq23TIQruWXOBDP4rc8EM2xlB8Q243dARCv5co3EMbjnCMQ/87ALRCxuOQ2EKyblwsQ1N/yMxD4uf4nEIf3MxDTmqodEJSyox0Q2/i3MRDgtuAnEKmjwCEQzNDxJBCn+csfEM/MvQIQocvKHhDIn9EBEIq31DwQq//OIRCQg5IEEOiyuzYQrc7PJRDgjacUENTllSgQ24eeKhCipXkQjduNDxCC8rMZENm68y4Qp4GHOBCm7KADEN3ktjQQurn4FBDB07M5EO248wYcIAAw5AE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaJvN2dK8kseydpQBkwEKAzIuMBMI4QEVrIoHvxVWwA6/FRCCCr8VcZ8QvxUgohS/FVdGGb8Vu30XvxXpxRu/FUO0Nb8VEyQwvxUs8BW/FU2cG78VM0EivxVftBi/FVdNGr8VgfQcvxX+HB2/FdrjS78Vgt1evxUop0a/FXXDhL8VYqUovxV5MBy/FbGsOL8VabA1vxXcRSq/FSFHKb8VeZAZvxWW/X+/FTh1dL8V5zUdvxWgAlm/FYTaNb8VvOmKvxWyXna/Fdb8aL8V1myAvxWxnZe/FT5jsb8V5VhbvxUvk0u/FVPGiL8VnW20vxW9RCq/FbWKhr8VsQ6IvxWN/Ey/FZpePb8Vt8pHvxWlz7e/FX0SP78VYf4tvxUaRD6/FUKhk78V2fxXvxVHkxu/Ff26fL8VpfO3vxVf3p+/FUU42b8VnEvPvxWVuqi/FVyPj78V+HKHvxWSWni/Fep4db8VTTdTvxVss6i/Fbplmr8VKyOuvxVsjnq/FYK5pL8VKNXQvxV/toO/FaPLgL8VPovnvxX7SfW/FewO4L8VSRoswBVOgWy/FXV+uL8VIOnsvxX/Toe/FfcHmL8VtAKWvxUp5vi/FVCcB8AVfKO/vxW7cY2/FepwyL8Vc3anvxXOOau/FSsx8L8VdbaLvxWHBVq/FQmkT78VFiWQvxWFE0u/Fdoewb8VBhzYvxXZ0dC/FStHpL8VudC/vxXurD2/FYIeZL8VcRVEvxXSe3C/FbP0y78VadbHvxXGAam/Ff2Fyr8VzkccvxUBB3W/FcsMEcAVCbKQwBW4Cry/FfsBIMAVY1PRvxWHQtq/FVdah8AVEcUKwBWB7SDAFewrEMAV/dVSwBVJawjAFWHp6b8V4euGwBXD5ca/FVz/p78V0V/xvxVIRo6/FfUXcsAVx5WyvxUw9yXAFV+mp78VjlHSvxUTdkDAFcwPCsAVJz5ZwBXbVg3AFU0Gur8VA6aivxWxfYm/FTJIz78VL6UPwBWjYx7AFc2gBMAV/AnavxXqc66/FVxWFcAVHjwEwBWCmGXAFYWRDcAVXrgRwBVcsELAFSOr5b8VUUImwBXWvHjAFV+WVsAVnawIwBUCLXC/FcOVAMAVLMcwwBWWxlvAFRFmI8AVVqPtvxXpgs6/FWd09L8VushNwBU/dQvAFYFkOMAVdpH6vxVsOjXAFY1bQcAVeK8MwBXJRhbAFbze2r8VYckowBXOzt+/FfwtJsAV2m8wwBVJlVfAFekT678V/FqfwBW76zHAFS8rBcAV2EyHwBXso0zAFfVGI8AVUOiVwBXjJPS/FT7NosAV5waKvxUxU+S/FX90XcAVk0lpvxXhSvq/FQScEcAV9NkzwBUf5pjAFSikjsAVNf86wBX95B7AFRdFKcAVA2PGvxWq/+G/FaeS1b8V3eP5vxXd6jTAFYtlK8AV6VJzvxWN6nq/FafsHsAV/Wd+wBXf0u+/FQ2HL8AVrAXSvxV8nxHAFVCgAMAVIVe9vxX1zd6/FRZ5+78VTYkOwBVEIlTAFffTiL8UGwhOEAAQ+AMQ4QEQsIiyBhC9kpU1ENW7vjQQo+e3LRDUrKoEEJ+ejzIQ8ZHVIBDkof0rEIGpmDcQ36PFAxDej9gVEN6O5hkQ5pPwHBDspIUhEJ7SjTAQ4dK6DRCf0t8wEIm0gREQwujmERCux+YBEJmHnRQQnKbkFRCri4cYEI6rqhgQ2OGzDBChi5swEJ382goQmbDUDhCc1YUlEOC7wyUQ9IW6HRC55Y0REMn1hywQ/NzkLhCHg4I1EPDyDRDj59gBEJzN7C8QvbKjEhCQyowzEJn70xMQpoSUNBCLiPAYEKq7qy4Qr97cGBDHsagKENW2+DsQtf7kExDy5toZEILu9BoQ95OJEBCC+7gRENPvvgkQspSECxDWo6opEJblrwsQwcb/IRCK9KojEMyFgCQQxK6ZLhDd1p4DEIeBuigQnuPjKhCH5PEhENv83isQ+NvAGRDH0IEOEPef2DIQmLPpCRCyiuE0EP6U9Q8QyoTYNxDwiaoBELeRvjoQxLG0PRwgADDhATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFotv2a47+b45kBlAGTAQoDMi4wEwjoARVgWgy/FS2QD78VaCMUvxWS+w+/FSr3FL8VJeMbvxUtPCy/FcYYEL8VOGAQvxWO8B6/FalPGb8V7/o4vxW9wh6/FUkxLr8VjU9BvxWoVjy/FbHhI78VyBRZvxUJ/yW/Fd8JLb8V5KNUvxVS3im/FfBnJL8Vk5A9vxUBmDq/FR/kIL8VSS5EvxVcaky/FUQDZ78V2VqrvxWlGJO/Fc3PQr8VSZNyvxXe3UG/FZlTpL8Vd22NvxUqXJa/FbehML8V+cFhvxUmc1G/FZIiLr8VYTFdvxVcMJG/Ffz0Qb8VFmhRvxX7pzG/FW1fXr8VPXhSvxUQiVm/FfOjPL8VB8eOvxU/1Dq/FVjzQr8VOuCbvxV3P2q/FZUqdr8V+6ZYvxUdX5O/FcA1b78VJlLRvxV0/qu/FdHzv78V7++rvxVrRRPAFZzbUb8VAPSZvxUMb4+/FezpYL8VHHXJvxXA6A7AFeZOd8AVVVq/vxVdIr+/FVUfpL8VSr/cvxXa0XK/FUryub8VNKdvvxW/Cry/FXwZXL8VWRSpvxV25za/FSMqLr8V4oBovxXTTfW/FSfP278VWby2vxVm7LG/FeSrpr8V2YAAwBXwqfK/FRNyeb8VMQWwvxVSmJy/FSDbm78V3kFrvxW2Hmu/FfA2dL8V+txhvxVr6p6/FR6R3L8VyAjKvxVJyK2/FeX4kL8V2NqHvxWQCaO/FZdtnL8VPJDkvxXsqe+/FfL6q78VIbShvxX9x5G/FXA0kL8VKdxivxXJllu/FX7L1b8VInmKwBWR2Ky/FZsA378VwPIlwBVCESTAFaLWr78VpKGyvxXD4Mu/Fd8CisAVNlKIwBVNEyDAFbfFWMAVqqd5wBUD14XAFRBLacAVvo2yvxU55PW/FdHM+78VA7HEvxWH0FTAFTMTEMAVGGT+vxWG5N2/FWT9G8AVTyw6wBXL+nzAFUN8j8AV1Zn7vxUWaPC/FRrNRcAVUhqiwBXIeIDAFdQld8AVomXhvxXl3gXAFdI07r8Va1ULwBUSqGvAFa/kUcAVh8T2vxWvhSvAFZYJucAVD02+vxW5HEHAFdW7rL8Vr4swwBUpacO/FS4u58AVXUOBvxXvWvm/FWA1/L8VY6RUwBVh4rG/FcfQ9r8VjRoqwBX8ljfAFcltNsAVG1vWwBWNGM+/Fb9QzL8Vg7KzvxV+3ta/FdU/HsAVCe1IwBWbfAvAFSM5BMAVkSdKwBX8TUfAFRiTBMAVzDuRwBUXS2fAFTnP/r8VVj5swBWyu2jAFVnQxr8VlZu4wBXLZ5rAFVv6s78Vco84wBUntz7AFQcCjL8Vcl76vxVXLoW/FSI33L8VnBE+wBXTzUHAFcxOBcAVNssGwBVnLfy/Fe2NSsAVgdCkwBWPIZy/FRKmIMAVhsMGwBUeKCnAFdHpBsAVu+eOwBXOGmjAFeSko78Vhu87wBVTu/K/FUMqMMAV7zpywBWRShXAFYXAjsAV+pm9vxXOcqa/Ffd3rb8V1szkvxUBnGTAFaElmr8VeryPvxUO0xXAFQKL2r8VLubWvxV6YkDAFBsIURABEPMDEOgBEMbF/xAQqp7MLRD15/sIEPWd4yYQ443AEBDfjLUWEJX76x0Q2K7MLBDD7rcyEImKcBDPya8REPf7/BgQ19nqGBDujaIdEMLZxigQndvuExCH/PgqEI6k7w4Qo9b0NxCv47gzEK6tsw4Q19ndEhDh080hEPqkhA0Qo6HpFxDN+5cZELaIuBsQtrvAHRCe2eEfEN/T3hUQu/3NDBDvuMoXEI+qlycQiY+7LxCUiP0tELatlzAQtJjADhCvirocELnn/BEQvN/JARDXluMQEI6VhSAQgZfwJRCx3ZA3EOHmrRoQlJGHChC836Y2EKmaigQQ9KbSFhDVjIokEOzFuAQQor/GIxDQkaobEJLgzTYQ5dfAHBDxqOMEEOb/6DUQktS9JxC2xrk3EK/04TIQmIreNRCro9UaEMvK4jEQiJDSJBCo1K4mEJ2QhDUQj6abKBD3yLUqEPOhpysQn8aNLRDUzo8vELzk4gcQv6qRMRDPutQxEKfpljUQ18zWNxD2otU6EPIDHCAAMOgBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWielMPco5j34laUAZMBCgMyLjATCOcBFdnL+L4VGwn/vhW6TwG/FUhNB78VpvH/vhVQMQK/FbNjEb8V3UsVvxWnUgu/FURiE78VP64CvxUAWQu/FVoxDb8VWigbvxUg/Sy/FdTbLr8VQPAsvxXjFBe/FRyjC78V0a01vxUSpx2/FavTBr8VUE4EvxXzaA+/FUJtGb8VRkYmvxV30SS/FaqUHL8ViRQovxXC3Dm/FcQfWr8VVHJUvxVPXjC/FaXvTb8VXa1QvxXF9x+/FbUPS78Vq08VvxUslxm/FUAROr8Vb9xlvxWSBCG/FTEMNL8VmP5ovxWcHDm/FS9BQL8VzlRRvxUWDYW/FVGXEr8Vk+98vxXMi0u/FcCoTr8VLhNTvxX8tEy/Fb9+Jb8VeJAkvxXT2x6/FQqkSb8VSq1zvxUtOrG/FUv2g78VlAe/vxVto1+/FeAFZ78Vio/xvxXwPz2/FfugN78VdiRmvxXD+KW/FTtAB8AV9FcBwBVT+my/FcrrKr8VojOnvxW5Hpy/FST1lr8VUCw0vxWP+hbAFeitnr8VnpqCvxUi4XW/FbQh4b8VEBCjvxWqrTG/FT5yqL8V8wN1vxWe+b2/FX07gr8VqCq2vxUdKXm/FRQNdL8VIp1gvxWcJ4+/Fb/7pr8Ve9IqwBVFLYm/FUsIs78VLnEevxW9Bma/FU0HpL8VNaHkvxVzsZi/Fe/MY78VeO1dvxWztE6/Ffu3lr8V6fGNvxXgnQPAFf1TBMAV5N9IvxV8f1C/FQs2Lb8VOnlnvxWBZDy/FYxgIL8VCopRvxXpuybAFRwz8b8V7rSFvxWxXtG/FR/w0b8V0HfDvxUvXhLAFWDf4L8V70nmvxX4HEDAFU7DqsAVDK3PvxWbz4K/FXRQk8AVPy+GwBXQtULAFefS9L8VB1FDvxW0HUzAFe3nLMAV0y2PvxU3SQrAFYdbAMAV9eYvwBUwx0rAFbecdcAV3z8FwBV8KYe/FYyulr8VX0OqvxWNxNe/FSVX2b8VQPghwBVKhjHAFXKCDcAV8vK/vxWm9aW/FZrLlMAVOnTtvxXNpiLAFaiKZMAVlbWevxWkljPAFc+tCcAVnYfqvxUmy5nAFWk1878VVC4GwBUgpRjAFTBnk8AVNBI6wBVt4WLAFVsagr8VFrYtwBXoKFfAFRRvwb8VAtt+vxVpETHAFZeiCsAVvd+/vxX9YKe/Fb03HcAVFSjSvxUnPw3AFRqc+78VCFZQwBUifOm/Ffowy78Vx6VCwBWv2CHAFTCej78VmHFWwBWirOS/FfeyQcAVmllKwBVLygLAFR7pz78Ve67VvxX6/ui/FXBIyMAV6pMovxXF3PO/FYr/g78VYGnTvxW3GifAFU7zEcAVdbcRwBWUhTrAFR5mWcAVvDjEvxVN+5jAFcIZ8L8Vi5IIwBWaPKW/FRXjY8AVjPm/vxWZmcm/Fa4Fv78VNK6TvxXiWJfAFcU4HMAVOiyHwBVSdxjAFdpLGcAVUTqIvxUjVdq/FXboo78VMPf3vxUbz6i/FbshkMAV3nYuwBWoOVO/FX3VjL8VcCiwvxUXMW2/FBsIUBABEPIDEOcBEJGxkDoQlYeyJxCP/8oZEK7M5icQrf71BBCshZIYEJyNxxwQ/vqFJRD/i+YuEIDR1ikQvqWNFBDK7YQEENqR9SQQzKS/EBCZ+PwFEM+hvgwQ1tbQKxD88fUjEPfq8DoQvJavBxDQlPwDEKP1lwoQ3f6iOBCttI4pEN3uoSoQ7La4DRCp2OorEPP04jYQ25rYHhCH+D0Qs5HEIRCdgt83EKCnmjAQ2fagKBDEqNkxEOnn1y4QivTNNBCfvuk5EJic5xAQ3t2fBxDM4rkSELrUgxIQl8uhAhDu+r4pEIicgBQQpZuQBRCojqoVEPKtpwoQi/udGRCAkcE2EKHBkwYQo4r+NxDR4ZA2ELzoxBsQ2YH3GxDH/9M2EIbcviIQ9dWWCxDnycgMEOD/3SAQ/MK2IBDyr4EMEOKunyIQgobKKRD1r4YGEMupiwwQ36XDJxCBzI8oEN7ZrioQt9WBKxDflcEtENr19DcQ7r+mAxDt/b40ELjRjDcQp7TeBhCO3LgoHCAAMOcBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWjFnLPy2fSQyskBlAGTAQoDMi4wEwjfARWEDhm/Feb4Gb8VK44avxU/dxq/Ff0gHb8VunscvxVwxh+/FZYkIL8VoS8xvxWC/TO/FQ69IL8VYucgvxUMph+/FfcyPL8VeO88vxXN9UO/FUHgL78VY/46vxWaVTu/Fcd2Or8Vd5dnvxUgOCO/FTO7Zr8VQMEivxWdKye/FdmqI78VzckfvxX8kIi/FWYFVr8VwSdjvxW1lmi/FWEEVL8VWRyEvxUeRzK/FXU/h78V+cJBvxXLWVa/FV8zrb8VZi5LvxXfFz+/FYphiL8VVShuvxVSB4y/FQOkY78VeykpvxWoHYm/FXBPeb8VS+eLvxX2LFS/FX4LNL8VIUZNvxV89lm/FdnCMr8Va1QzvxUyUy+/FdWllL8VD9u1vxVzTlu/FReujb8VCo6IvxXSM4i/FZCYn78VpIuQvxX7N4y/FXAU9L8VGcu1vxWjOgLAFexGO78VUJ1dvxWSShrAFeSQ1r8VaziGvxU9LYu/FW6GZ78VDlyPvxWzOgrAFRvTtr8VkuyEvxUNiFi/FYFGH8AVhiZHvxWUaoy/FU7Cl78Vx3v6vxU1z37AFajnsL8VU77svxWr56a/Fdw/278VhxlavxWitam/FegcBMAVnoiKvxWlUoK/Fft+h78VtD7KvxUdRJa/FXzAdL8VPlyEvxXZYz+/FcEnnL8V8jpbvxX0OuG/FepNbr8V0fBtvxW1UzO/FVVger8Vm4ypvxUutWi/FQ8Ut78VzfdEvxVW33/AFaNMCMAVI0X4vxUizA7AFSLcj78VPphDwBW5z/+/FesSx78Vdf3uvxXZzPK/FYggw78VYMzLvxX75La/Feb/3r8VKMwHwBXT7tq/FdGqKcAVP7ynvxVoskPAFUD8AcAVfHG6vxXKOgLAFRCnEMAV3ew6wBUg8aK/Fd2vTMAVex9+vxVUANu/Feb8KcAVsGwewBV0AwzAFZcaBMAV9taNvxXnd0XAFUD9qcAVyDKpvxXYHhXAFShdSsAVSNubvxWw5LG/FUc3hsAVKQ0fwBUfpyXAFVyWIcAVQG7LvxUdclvAFQMdt78VZd35vxVj5GHAFbXnIcAVq/YEwBXIE4G/FXK6wL8VJhf1vxVC3s2/FWL0l78VsqUEwBWwbQnAFVcnjsAVQXOVwBUdVGPAFbNYzb8VZ8hJwBXa3JDAFWRVDMAVu6XnvxW2QDDAFXdRjsAVYw2avxWL9I6/FYopqr8V/QsBwBWtJA3AFWgaa8AV7oYGwBXoT7C/FSzL578VZ83dvxX9yIHAFQR7zcAV9mkNwBX0favAFdk8vb8VHygJwBXEQWnAFdZvpL8VJzBXwBVz8Yu/FVR0HMAVyhqUvxWFlLq/FcQpEsAVrGSbwBWyGN6/FQAcD8AV3ioEwBUnQ76/FXI+v78VrQzUvxXJv/G/FWEmP78Vcz0FwBXxnpa/FQPM5b8VHGvGvxXj6bG/FVWz0L8Vo0e/vxVQjjDAFeXgOMAVzTESwBVxpYu/FBsIThAEEPcDEN8BEJyhiigQ+5qTNBCIw70YELqwqyoQnMz4ARDB8IEaEI2D6SMQq564KRDI+osxELLlxSsQtvHFOBDp/LslEOCemB4QtLWEIBCqnAMQn6zEJxCoyfQsEMeO6DIQsMXOJRDl4uUfEPbZ2hMQw6quAxCSio4XENC7ky4QodXiERDCz5MnEOf4thwQwez6OBD3ypQiEKKsgyQQqeSKJRCc8p8WEJLx6A0QxK/THRCa2PcFELnspgYQzramNxDx25YZEIy/ux0Qt/UiEJjU3BIQ0fW1ARDPpaYPEKmo3RUQvbSIExDp6dgWEPuqzAEQmJb0OhC92ooZELaaywkQ/8DiGRDP+8UKEPuV3RsQhN3DNxDe6o8dEO+S7R8Q+t/7IBCF0rATEM+t9QoQ7bfcOhDj2/IPEJ7uiCUQsevrJhCOi/omEKaZ/CgQ17r2KRC41qoOEKDcpywQhdDYLRDoi8UvEMP+9DkQ8NzLHBCJxLU3EJ2TgjgQ8wMcIAAw3wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaIuv45Wek/mFngGUAZMBCgMyLjATCOQBFRGdDL8V8dgOvxVz0A+/FXAXFL8V1YwPvxWzKRi/FdmAEL8VZOEhvxVE/SS/FWRwHb8VMpgSvxW0Sh2/Fav/GL8VBttpvxUD0hi/FR2oMr8VBKwpvxWNbyW/FeC9Ur8VhlYgvxWYSUi/FajVGb8VWV42vxXoDCK/FYtUML8VcKZfvxVesim/Fff0ir8VwFxsvxWzoIO/FSKlRb8V9ZNIvxU0WGS/FbKaRb8Vy2qHvxXr2Eq/FQlgMb8VcSxlvxWIFVq/FfTsJ78V2po7vxXWvJW/FR4Jdb8V0NlbvxVOYCW/FQE+O78VlZ5gvxUAO42/FeUjI78V16ZQvxX5YT6/FWw9YL8V/G6KvxVNoDe/FUolML8VL4ybvxV0h5O/Fb4OnL8VyVGGvxXoc8K/FXZ2yL8V5AnRvxXR1aO/FW7N4L8VP1OTvxUt6Za/FdPNbL8Ve7ZevxWtp7e/FaLZtb8VX0GTvxW3wFG/FSE0sL8VdXWUvxXOSEO/FW3Pn78VXXV2vxVxPYK/FY5Fqr8VJY62vxWGBzu/FVjeRr8VKYCJvxXSTZ6/FVK+v78VC9mwvxVKpsS/FYVtu78V8gGtvxWUljq/FRSqV78V0s6RvxWTAFG/FbLKnL8VRCuVvxWLE5S/FVlptL8Vy+BBvxUVfO2/FYw/bb8VEoajvxUgFFe/FQ/mt78VuM+VvxWSh4q/FZe09L8Vm/q3vxWJx5i/FezgOL8VzDB6vxUaqBPAFftVzr8VTS63vxUKws6/FbdudsAV3GEkwBV8vry/FS0hEsAVs1r5vxV7msq/FagLDcAV/UnrvxUnHTHAFWPrSsAVMw7XvxVaybO/FS1aXcAVyAexwBXGJgPAFbx8q78VJluivxXXNN2/FTlvmsAVqTiuvxWUsIG/FYXjFMAVqJDVvxVhQrnAFa1u7r8VTXJDwBXRnI3AFYlE078VwBcUwBU0uh7AFTGCWsAVfMQHwBVRjcC/FWCfs78VLmRIwBVPonO/FXq1B8AVlUZmwBWjt7+/FWcYir8VYdqnvxU41ULAFRkRlMAVb+8AwBVWiLG/FVbwFcAVDs/EvxVulKW/FXi6jsAV/q0EwBXfq5TAFdLvbcAVB4BDwBXNdATAFcZIo78VjsRAwBUYzNK/FTeFw78VH0MNwBXZzTXAFb/5y78V8SbmvxUZLwfAFTmwd8AVggkOwBW3tWfAFXJchL8VrELGvxUR7BHAFY/su78VWnOJwBXTwHi/FSLIlb8VvRO8vxXj8/W/FW8Yu78VBPcewBXldKW/Fa26o78V3O64vxUiPhXAFSesBMAVzJUYwBXOg/y/FQeV878VH210wBVXhVrAFTh1078VAvxJwBXCGZO/FZ9T7r8VMtE2wBUPiL2/FaBeB8AVadkvwBXVHt2/FU2yMMAVdNX8vxUvjAzAFTGD5L8V89ngvxUPGNm/FYOVrL8VeKzivxWDESTAFT8Y0b8VP+W3vxU/3GzAFfAVKMAVCyMEwBVZngrAFQsLvr8VGuhBwBXIyQvAFBsITxAAEPgDEOQBEL7BrC4QjOmtHBDrzJYfENH3jyYQ0tDFERDT17kYEMPk/SYQ3e2gJhD2q7M4ELPPtTQQ74esJhCp1/chEP2XlQoQl4ntCxDHxIkgEPfkzDkQmOmXDhD8vvw1EJuSghAQ87yODxD9/bk1EIbTpwYQ563IFRDFuLIXEPGZrxgQw47tCRD038QbEMbDsx4QwqfQCxCQoMc0ELGVxCEQ+JD/JBDu3rIZEIGe5SsQ2+GWLhDlt6EDELqPojkQv5+7MhD9kfEzEO62mAkQsaf5OBCksI0VEMa7/gMQj5LuExC96r4gEMa/7zsQve/SFhCAvIwCEKbQ3RcQ67vPBRDTmccZEJC0qiQQhonnGhDR/+UOEMHZ/hsQv4fhChCH1qQdEOq83R0Q9t7NHhDUvL4fEMSzqikQ2ePEDBDljswNEMyu7wIQ5qi7PBCRqvAnEOHDkCsQv4v5DRCu26IvEPjijzkQxdXcFRDt6NMzEJuE2BwQ2t3jORDb6pA7EL67/SgcIAAw5AE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaOKuxL7m07e5FpQBkwEKAzIuMBMI5gEVo1L7vhXoufu+FVt7/b4VqKgkvxW/ABi/FR6QAL8VIO4SvxVEHym/FTxNLb8VCvkovxXAhCG/FYANDr8VgfsRvxUl4je/FUscG78V/4QzvxXplja/FUfGOr8VjXwyvxXgiTG/FVtCOb8VuS8ovxUGgj6/FRF7Fr8V+k4OvxXoZC2/FQp+FL8V071AvxV04E+/FbPAPb8VpPogvxU20p+/FZi7QL8VKFk5vxVHNDe/FaTtQ78V+vNovxXHWIK/FeCWNL8VK4wzvxV5oGm/FTEuT78VpM07vxXaukG/FS9URb8VzwVYvxUmgFG/FdMvLr8V7fUgvxUu4CS/FYm3Pb8VnKI4vxWRYTa/FdIcLL8VznoivxWXIUO/FVYlUr8VSoGSvxWLDHO/Fdd3XL8Vo5TavxWGpGy/FQ44+78VrETMvxVRZae/Fb3Tl78VP++UvxU2NQvAFXzOWL8VRY65vxXbYMS/FXsjd78V+EmsvxViorW/FetciL8VcYOkvxXYwZm/FS7sWL8VLfJkvxWaDE6/FQpllb8VVG+mvxUov4e/FSwsbb8VCwzZvxUYMUa/FeUOjL8VCl3OvxXurX+/FWRPpr8VHXytvxUA7Le/FeGhnb8VBFCQvxXnlvC/FYuhb78VVQSPvxWW1Zm/FXNfmr8VOE1BvxUFqq+/FZFcbL8Vmpl4vxUvgp2/FSf+hL8VwpaBvxUAGzy/FXLUL78VJteLvxVBsd6/FdnNQb8VxCJvvxX+foC/FYSEVb8Vho+LvxVt6NC/FbpkosAVhPuivxWS2SbAFWmrAMAVE2rkvxWtS1bAFdH5E8AVjyvGvxWRcnq/FYriL8AVpWASwBVRpQfAFe3xOcAV8XkHwBXy00PAFQDGrL8Vox0MwBXhPuy/FQsiB8AVWoogwBX9iw7AFUhY7L8V0LEywBX1JoPAFbfr778VLRWUwBVNazDAFd1ptb8VDjeDvxUQhyXAFT5ZEMAVA3OqwBXR9t6/FTZc5L8V5kG5vxWt2QjAFROk1r8Vu9ADwBUN0w3AFVfscL8Vu0QKwBV4ra6/FZXbmb8VysufvxUbYSvAFbGFJsAV584wwBUGhsC/FTvBG8AV7OOyvxWMpKbAFTMR+b8VxYT3vxVFKifAFXqNO8AV0nMWwBV5DqK/FV7BrL8VsyanvxXtDEfAFTLIc8AV3clVwBVrIfC/Fe5s578Vcr/WvxVsti7AFWO6+L8VkPcZwBXceLm/FaS5rr8V0oG7vxVXLOS/FUHFlr8VatD8vxVd/SHAFeB0JMAVPA3cwBXZ7GPAFUQC3r8VbbUPwBWqmN6/FSz/sL8VHl2wvxVPbae/FZZXnr8VVoyDwBVsiA3AFSM6HcAV29vcvxV6duG/FVCCfb8V/R0JwBW5+OC/Facng8AVKBHxvxWjNqe/FUVhob8VfeBTvxVcbk+/FX1JDMAVS6uzvxXFhqa/FeS/MsAVmJhVwBXmwjPAFVkvDcAVbC9ZvxXJ/j/AFZgrs78VlcMjwBX4ewHAFTvl9L8V/RhZvxWeCqu/FBsIUBAAEPgDEOYBEM+RvzQQ07yuMBDu5ogZELvtpi0Q1tq1BxCy1MMJEN7gxR4Q65DyJRDt+YUzEI+1rCwQrpmNFBC8hs0XELDijjkQh+n9NBD977YMENmb5wUQyq2WAxDIhuQzEP6rqTwQwuzIIhDztqkUEOfG0ggQzYn6ARDvq28Q+ZG6BBC/koY0EOLj2QoQoMrFJxDs0dsrEOzR5iUQku/OOBDnndMlELet8gIQn5SmLhDY5I4xEK+hljMQ7v7QORCLzaoBELiYyRAQ76neARCw/+UREJXy+AsQwsT5IxCns48GEKGl0DIQ/MzuFBDS4Ig4EMeNphYQzp6jBBDDnKcYEMii0y0QyOi2LhDv75IcEPeftSkQ1pWaPBDLlOU6EODhsh8Qy5faHhCdqKMfEPWQuCIQh720OBDflLckELmbhSUQmsCLDRC774cnEK6xyw0Qw4n6DxCmtJsvEKHR9C4Qutf0LxC3/okyEKWW9zIQkuTHNxCanMUjENPlzToQ15TaOxCryA8cIAAw5gE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaL+378yDwruLhAGUAZMBCgMyLjATCOMBFep9Eb8VBksXvxV9LBO/FS6vGr8VmQ4avxXnABq/Fad2Fr8Vw5o3vxUUxR+/FXE2G78VqLUdvxXkpyG/FQPNG78VGd0XvxWtIDC/FQqBSL8VcC1avxWONyy/FZKpU78VNPQevxWfRRy/FQ2RPb8V/K00vxVK60u/FXbVXr8VyDM1vxW9Vke/Fc8QKb8Vz4xfvxXXlLO/FbRqS78VJPJfvxWFO1a/FUKwc78VDtN/vxXnoWW/FYmub78V8RVZvxVX4lW/FaCPSr8VdclMvxU4iyu/FVAHSr8VPY20vxUuwGO/FfW7gb8VAtNtvxUR/WK/FdD4TL8VWby+vxVshaG/FerJaL8Vwa9OvxXtsly/FcAJhr8VJbJFvxUE3aa/FQflX78VwUu6vxX88cq/FQ3g9L8V3vJwvxUgyIe/FSyraL8VnHW9vxV1Cn6/FTXZsL8V5uGfvxXqc4+/Fakt6L8V6XOFvxXr1rq/FchX0L8VlIudvxWX2oO/Fcnzkb8VJWG7vxU8omi/FXr/hr8VX+p8vxWeu5O/Fd1DU78VUB+KvxUmeHK/FXDcWb8VMPOdvxUw7BjAFRcHO8AVI3zFvxUOG8m/FVL7hb8Vi4vovxXtAay/FQgOob8VnpSfvxUSNtS/FR7A/r8Vk1jEvxVKc4a/FQJdzr8VbPnsvxVKcde/FZ1ror8V5ICFvxU2caa/FUQDYb8Vi9hxvxXEvrW/FfMgHcAVw6eZvxWYC52/FaAVkL8VFqZGvxXMIbq/FcBxj8AVKWWxvxVQ9bi/FVEl+78V/RHPvxVOE96/FZ3p9L8VLV5+wBXdFAjAFeFt4b8VMEnXvxW83jDAFT80QMAV/T+9wBWHMJG/FVxXOsAV8+/yvxXUpJC/FXnPFsAVJg0OwBUj6Lu/FX7vsL8VBSd2wBUgtBvAFXKQpr8VuTp5wBXjnR7AFW4Qr78VAxGhvxUPZBzAFecvUcAVZvsUwBUmCZnAFbV/678VOjUnwBXuQiDAFaW2/r8VpbyawBWdLC7AFdEbFsAVTchxwBWFSu2/Fbb8l78VzDWsvxWJ9I2/FZYwp8AVLUq4vxUOEZe/Fc2//L8VwDmBvxXR6ZK/Fdi8gcAVqj3FvxXXC4S/FWO/psAVEehHwBXHvoG/FU/5iMAVWxiuvxWvynfAFQBsHMAVcieJwBXXNLDAFUXwGsAVvAPTvxVSSQTAFdkObsAVdDwAwBVb3La/FQ1tXsAVbYZCwBV4wjLAFV8xtb8VhqxywBW73aDAFfGHAMAVtmnYvxW2MiHAFXju+b8VAn9xwBUJ+iHAFVJ0FcAVe7ASwBXZvjHAFXmlpr8VxbYvwBXrY3fAFQ3hhcAVQZD1vxX8xwrAFXg1CMAVCH16wBXu3+K/FeB/3L8VSK2NvxXkS8S/FevMEMAV27eKvxX3Bx3AFTdhs78V2EyRvxUmLda/FVNS1L8Vy75bwBW4DlzAFfYDasAVu5CuvxV3pg/AFbGco8AVTKtIwBXv0wvAFT+H4b8VtgTGvxQbCE8QABD3AxDjARCjnJsrENPuwwwQvO3GJhDT6OwtENrZ0AcQ67y3ChCkgwcQwNvIDRCq64s3EI+JghwQ+J+0MBDM6swVEMSAzwkQ8qm2HhCjwI8iEPeEnSYQ9Y45EIXB7DYQrMrPDxDe8c0QEK6V4QcQ87SbCRCMhZAtEI6M0gEQ49C8FxCDrosZEKH5+ikQ7NvKFhDXxYEiEO+sxQUQnM+kChCE/bI8EP6Xhw4Q7N7IKxDgzr0uEJmF0jUQj8zWOBC21PMGENbPrDEQkNjgLxDLoIo6ELLNYRDZ5dcREOL50hIQz/elExDlir8IEJ/rjxQQ0NPXFBCol68jEN3I3AoQisGJHRCokJUlEPDcjyQQ+7iUGhD5nsMiEPeL+xgQ2q+AIRDvlpgfEM7PgzAQxce1NxC1l7oLEN627yMQs+GIMhCYyOQgEMuUlicQ34z+AhD5u+MsEOburA0QuuzuNRDAtMwyEK3jozIQ8aeuFxCp1aw3ELa+lDkQjMaYPBDHvQ8cIAAw4wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaI7Qw4zRgdPc7AGUAZMBCgMyLjATCOwBFbjWor4VSarevhW6r9u+Fcpq374VRXXpvhVYAe++FT1/7L4V4VQRvxXqoOm+FRXbHb8VjAABvxUb6B+/Fd0D8b4VXKH0vhW57fu+FYluJL8VAS8tvxVUgO6+FU5DA78VQycgvxWVlB6/FejyBb8V4K4hvxXGsiu/FVesI78VKBkHvxX/Lfq+Fejm/L4V5Pn4vhW0+4G/FZsoJb8VUoZHvxVe2zC/FTK5Qb8Vl5g3vxXZD+++FdpHXb8VzVcEvxVmQVe/FWwcLL8VgZHbvxUchiC/FQTCe78V9TEqvxVwgym/FZCjUr8VhcRSvxWADy6/FasliL8V4JklvxWV5Fi/FS2CEb8V/FQevxVVWme/FXZ1+74Vx2YjvxUHbwO/FQb5Yb8VKdQQvxX0sIu/FZQPxr8VbzdNvxX69kC/FZ7oor8VnBqMvxVaLz+/FVQ+Or8VXmBQvxUfhXC/FWr5cr8V/GLqvxVczrS/FeSHBL8VqddfvxVvqMu/FdmOdr8VYNUgvxVO8Gy/FXRhor8VC5K+vxUNh5S/FXxp9b8VjufyvxWys0G/FR7lIL8ViB2IvxV46cW/FTyVDcAVipN2vxW7+XO/FS7Tcr8VKnx0vxVqiY2/FSR2YL8VzZGmvxWnGjW/FUg1ur8VhSWavxUsj4y/FV/QdL8VCapAvxVRWYO/FW40dr8VZU0XvxXlb36/FRynfb8VPgIsvxWD/2y/FXwa8L8V6Z2svxVBxSa/FZkhOL8VB5lJvxUxpbu/FTu6Hb8VCQR5vxXzRWO/Ff5di78V3VVtwBVB+6+/FaqcGcAVAScFwBWTD3PAFdR8eL8VITimvxX8H5S/FZoinL8VKEfFvxVER+6/FXBH3b8VbBKQvxVATS3AFdZh9b8Vm22+vxVogHy/FXq1AMAVJw6EwBVYW+m/FQOlsb8V9bEXwBUo6NO/FSU49r8VRhHrvxXR89W/FeVbFcAV9nXEvxUqQIG/Fb2TNMAVT9ZAwBV52c+/FaOVLMAVXS8TwBUwH4DAFQzIvb8VGHsrwBXalom/Fbxt9L8VztvTvxWxEIfAFeFsRsAVvV3KvxVWkP+/Ff8zg8AV99pPwBUMdGPAFTumIMAVYc8IwBUTAIvAFeinB8AViGe9vxXzwvy/FanPjb8VZ2WUvxW5iBjAFfL4FsAVtOVgwBVFvQ/AFWzrUsAVWg3qvxVVTvi/FW4+zL8VyGhZwBVoAIO/FfGNEsAVaEdlwBX4XurAFXOMob8VSfzVvxWDFLu/FVzA4r8Vv5OCwBWE0sm/FdHzXr8VsHZIwBV21WHAFWFkqr8VCOrUvxWaYf+/FYd2rb8V0e+2vxXB3aa/FeR6vL8VBt64vxVmmjXAFQSPesAVYdWQvxVPq2zAFUB4gb8VZwNpvxV5yATAFXS9C8AVNduPwBX6s7O/Fb271b8VI1VwvxVWsqK/Ffud6b8VlgcKwBUPG0PAFRlgv78Vbu4vwBV7RVfAFZpxpb8V+AjcvxW7lpe/FZtu6L8VgZDhvxWq+wrAFakXBcAVp3d1wBV5i4a/FV5MAcAV1CKAvxXRvdK/FU0TPcAV63NcwBQbCFIQABD4AxDsARD96+IsEMuvpRsQi9C6GBDNq4YOENCy3hEQkbbkCRCMgfoZEO6QiygQuKbVMhDHo2wQ2/iNExDvn44CEOL8uwEQnL3sKBDPq7YeEKaS/AUQ4fjmNhCbyfQuEK/EngcQv6OiERCUzoASEK3q6TgQ+YmSMhC1sIkWEN370B0QiJuAFBCl2e0YENSw/woQs4yiHBDO7HwQ4ez4IBDQm80iEL2ehScQ2pHoOhC29ectEI6g5w4Qvsu4MhCU8tccEIb89TwQhcSQARC8npk4EKLspxIQ0YD4IhDN/vILEIOVmR4Qxf3fMhCLqtcWENCvpAkQmb9yEOCH+hYQwpjAFhCwmv4zEP/FjBoQ5LauBRCfmq07EMbJuTAQgNzwGhCujsUbEPCljBwQ54qkMhC4mtMeEKuUmikQ3fPqDBCh5icQq+WONxDI/7UpEJyuwQ0QrfiENxCt/sArEIrQkwMQ2eWWLhDV1rYBEMvA1TcQg6GtDxC6yO01EMaihjgQo/uLOxCphPAIEMHVDBwgADDsATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFo2uzd493nlaXkAZQBkwEKAzIuMBMI5AEVTa0VvxWmPha/FV1PG78VC2MavxWKgBe/FWzcHr8VWAAevxWgnSW/FQfEJb8VG6sovxXOcR6/FfBiM78VZyYpvxWFbSm/FWkjU78VTPY7vxWWai2/FWa5QL8V2i6IvxXarFS/FWvdYr8Vsp0mvxUrUka/FebjU78VsiU7vxVLIDe/FawDKr8V1OswvxWRKEm/FW3BV78VA0OHvxV3Seu/FWIoPb8VEk5svxV1ZTu/FTfzTL8V4XluvxUnKpG/FTcLkL8Vb0eGvxWYjo2/FeM6p78VIhuBvxW/eGy/FSMWJ78V8y9LvxUkzE2/Fd2/nr8VZI5kvxVoOES/FQ0YUr8VmwG1vxUcPT+/FXqmN78VEm06vxXvXUS/FadwNr8VBi6HvxWvkHG/FaHSdL8VcKS7vxWJnIi/Fc4/VsAVkaVowBWtN/C/FdBkhr8V/zBdvxWuwqu/Fd/k1b8V+KOGvxWoC6q/FficiL8V2DSmvxUMRbe/FZsWpL8VnruWvxVowADAFaZymL8V3vfQvxX0ELC/FfBh7L8VAP6QvxVwacK/FVnqsb8VEzDSvxVcjoW/FRckzb8VpreRvxVClHi/FQ+4or8VuCg2vxWppq2/FQNmbL8Vm3NbvxWWTmS/FbtAwL8VMjXNvxXLeI+/FaIIkb8VBvuSvxUtTVO/FTsXer8VzclmvxXh6fu/FXRfyb8VO0R5vxU9LM6/FYWgyr8V/6ijvxW5RIG/FZkXZL8V3gRGvxVTz4G/FWMbOr8Vum27vxXfLC7AFcP2v78VsaRpwRUUsLO/Fbdotb8Vu7invxWmA8e/FUOFDMAVMU1uwBWPheO/FTqhWsAV5M+dwBX5WZnAFS1ehcAVXckIwBXJ9APAFcgSib8VJSEpwBU6c6G/FXGJ8b8Vex6yvxWMTIDAFVqy778VqO3zvxVQBhPAFWDcDcAVusE7wBXP4TDAFTYkpb8V/B/LvxUDI/y/FT+JCsAVD3w9wBWySSTAFUzBt78VVpckwBXBpQLAFUIkpb8VvAghwBWSlTDAFYqqAMAVhwtSwBXmktW/FXt6FsAVk5kCwBUUi7S/FcbjU8AVUAqvwBUOJmzAFfcvEcAVvh8twBUrWUrAFURdGMAVKi8ywBUi3fS/FZbEUsAVZJ7WvxW1TJO/FX6zFcAVrR6RwBXXIErAFZbq278Vw3mzwBXYmRfAFRNzE8AVHm7ovxUmO0XAFbfeDMAVB4T1vxX9vrS/FR6Cqr8VFXVywBWxB8e/Fb96b78Viv2GvxWB2Zy/FeOjB8AVjJrMvxW61JbAFVII3b8V0IMHwBUxI6a/FWwx578V6wWfvxXzQAvAFSSuub8VJHofwBVj7Wi/FXjpDMAVT4mGvxWeR/2/FRGeBsAVuhYfwBX8G3PAFUldMMAVNY83wBUGWVXAFadDjb8VSGUJwBVF7tu/FYUAGsEVo/jfvxXRlae/FXK+z78VVs7pvxXWv5e/FR8Wjb8ViwEhwBWlEO+/FQbqc8AVU7kXwBVunKu/FURbnb8UGwhPEAAQ9gMQ5AEQu7/dKBC488gpELLK/hgQwJjyCxC73dwmEITWjx4QjvTrIBChua8qEOr9hzMQg5mVGBCgys4UEJDQsRgQ6d3RBBDmzoUjEIScqw0Qto6KKBD89OYvEP/K8w8QmZHQOhCfx/wEEPbwiDQQoOXxAxCk8Z4WEPK1gQEQsNHtNRD18bYoEOChqyoQ6uf0HhC99rMLEOSpsAUQjNaVJRCzhu8MEIaVmi0QzMKhLxCJ79UwEP68/wIQp4TwFxDPi+0REKGR8hAQo43oHRCwosAgEKbz+wEQ7anEAhCVsdQUELLvnRIQ6JvsFhCltPAfELeL1A0QtZ7aChC4qakfEPrMvBoQgcaPBRC44vEbEMeshS4QmPSQCxDY2LwCEKzIzgUQhIjFLRDXkJQ3EKCu1TsQxK35BhDw5uolEJiOtCcQt7iCGhDkg4kvEJyKwQoQxJyALBC6hc0uEIHG9CgQpv7zLhCqyq0xEO7JoDMQ+dTWNBDn29w3EOqm7zoQ9Z7XPBwgADDkATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAForMW5n6zK3Jv+AZQBkwEKAzIuMBMI4QEViIIhvxX71CW/FT7/Ib8VneIxvxU+Jya/FatbI78Vm/w0vxWOnj+/Fe5kQ78VWGRRvxWo6iq/Fc4xL78VNT4ovxWH6UO/FblJOr8VjWFIvxUqSka/FRfKRb8Vc2tGvxW4Ani/Fa2miL8V3dQ/vxU7mGK/FYfIMr8VbvNVvxUaez6/FUHJMb8VdwJHvxVgtIG/FXpdYL8VkZlbvxXSnYy/FZby978V41uevxXJQoe/FS5Ai78V+E5OvxXQvke/FRPBb78VqaR+vxWzco2/FaFClr8VdK+KvxXT3aW/FbSnSr8VDaZyvxVXymi/FQwzYL8VSvZwvxVFP3i/Fa+gtL8VyClGvxU38kO/FTIWSr8VUfFAvxVwzVy/Ffe7ur8VuHeFvxVsYqG/FYCErr8VpO2DvxVHi4+/FRogbr8VjUrHvxVQQKK/FQ4gFcAVfGICwBVzebC/FWgDBcAVznO3vxURuQfAFe5+nL8VY2uXvxXM7Vi/FcPct78VclWLvxWHYd6/FbQbir8VY8m/vxUSTaW/FSE4jb8VSJrVvxXk/izAFd94mb8VKWravxVTbAfAFeHxFcAVthzWvxXiPMi/FSmvr78VSx1vvxU8632/FQBSmr8VfT+KvxVfL9S/FdR1rb8V5euCvxWgIN+/Fa6bpL8V8P+UvxUTBdi/FX1q4b8V2brQvxUogFi/FY2rV78VxpSevxUkxJq/FfMNjr8VhRKGvxWA812/FcN1lb8VgptgvxWfYlTAFbHk278VueHMvxVzGhTAFfK8W8AVlAkUwBVnPgHAFcSE6b8VCDrcvxUcK/e/FQymvb8VklIewBXoW+W/FXBDx78Vcl8BwBWkShTAFWLn478VcfIRwBWXxWDAFeGQI8AVTJ1OwBVp5z/AFYltL8AVWnbAvxX9YgbAFapfJcAVUAIrwBUlS+K/FXepx78ViDk+wBUBaifAFa18NMAVx4AGwBVClJ/AFaPWHMAVVTetvxVDsZW/FV5Iz78V2k7WvxWvJQvAFcYskMAV+a4awBWUrQvAFYjTmL8VjzxZwBXJcxPAFYEzBcAVselNwBX8YgPAFftLA8AVp/A9wBVk5dW/FTD4+r8V+vwzwBU5z7DAFagoHcAVhZ+/vxVd/ifAFa/VBMAVASBCwBWsYXHAFVt+MMAVp1BCwBX4PBDAFeQ1RsAV5JXxvxV6xCPAFdOMccAVxf47wBWrnizAFSig+r8V+p8GwBVdSs6/FdPf1r8VWQaWwBUZwQ3AFXFyi78VB/HnvxX26Oi/FaK+GMAVZr41wBX4nwPAFcWi878VCrGLwBVxPCvAFWegxL8VvXmVwBXqNnbAFYTO+r8VCUgmwBVEkWjAFU52kcAVZpQCwBWMGhDAFS07AMAVXIVtwBWwUVq/FUpphsAVcO68vxU93i7AFTfo2r8VmHgJwBX1L/6/FboVBsAViOFEwBXcRgXAFaJDAsAVtnW7vxUQoiTAFYK6GcAVUQ0CwBUeVVbAFfMneb8UGwhOEAAQ9QMQ4QEQ1MKFLBCLhPkFENjW8gkQqfL3KxCn1qYtEJS73SAQg+HtHRCQ5sExELzV6R8Q8fPCNxDHg9wxELfVnAQQ5M7tGRDtjJ8fEKOOyiMQ7dG3MhDT8/IxEPr6kTcQhd6dNRCkoVEQ883HHhCuj4guEI+imS8Q8N6uHxCXib0qELPAihwQyI7mChDx+vgsEPXRoSEQ+76OARDx/4cmEPipnhgQ/KmeNBCN8qMwEIv50Q4Q44HkNhDQ8aI6EMm8kCkQ4ID/EBDP7boRELzlmw4QwbO2CBDBnPoTEJrs4Q8QkOfLOBC0u40tEIS3rwQQu9eQFxC6jNwXEMSIyjYQ4fXjCxC+4c4ZEImfvhsQ3LayIhDX8rg4EOHpowsQhfPZOhCLu4AhEJHY8yMQz/eYAxCn/uM3ENbDywwQwKvFJhCDwZY8EO3QqSoQubLaKxDNv6gzEOjSjzEQnt+NMhD95p8xEMLM7TQQpOCHNhDDsNEPEKvj9DkQm8yzOxwgADDhATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFoi+D7qpHRqJ1klAGTAQoDMi4wEwjkARUChOq+FWbo674VV5rsvhXtXfy+Feov8r4Vyg7zvhXnk+6+FaikKr8V11QQvxX/x/S+FeVYB78V41MAvxUh7AG/FWP0774VFNM7vxXrVDW/FZtYLL8VfDgTvxWpF0G/FQZuCb8VqPf0vhWjZw6/FVv/Ob8VvSECvxXs3AC/Fd+OFb8V7lUkvxWd3xe/FQlBLL8Veg9DvxWYQVK/FYFpZr8Vliw4vxVvy2C/FaTMdb8V0oQlvxX5bWu/Fa3QZL8VHXxUvxWL1qG/FfkhQ78V39xbvxWxtwy/Fd1TGL8Vf7givxXsa12/FTX0cr8V2k5ZvxXvnia/FdhLQL8VOMYovxVaPku/FXl6QL8Vho4lvxWprki/Fe/1JL8V2l6LvxXUfkLAFVP3mL8V1blnvxVj16O/FT3zV78VS32evxUNSd2/FXc4ir8VH9ZDvxX6ApC/Fe0p2r8V+6sGwBVoUvy/FVuwhL8VkccFwBXzJa+/FcGAhr8VJju9vxVly4a/FX1Yb78VshOgvxVxoArAFdzbzr8VV7QCwBWxen6/FV9llb8Vl16wvxWD/ZW/FZhcNb8VNH0yvxUCtKS/FYKBJr8Vnj6xvxWDNEK/FW5QZb8VV/ysvxVihXm/FaQXi78VR+2NvxWQVoq/FRDtRb8Vl5GHvxU/UXq/FUcFhb8VsMzpvxXcMUW/FQ8tU78VAOTuvxWaJm6/FY9klL8V6F9XvxX6gUC/FV37iL8VBoqPvxUEiWC/FVP/KL8VfMnZvxWupo+/FUs4a8AVgg1SwBXBGAzAFdXvL8AVOkUFwBX/KQvAFdEcB8AVwiHevxWPIinAFdsHyr8Vrm0UwBXDHhPAFe6hkcAVHIErwBXq0pa/Fan+jL8VkXEBwBXJT4K/FZhMvb8VaRWYvxVeli3AFX3LE8AVG8krwBWi2BDAFZjbj8AVJoGZwBWj2jzAFXrDMMAVC2QZwBUtf1DAFUFsMcAVHCmNwBX6oOG/FSou3b8VQR4nwBXSsYjAFfKBOMAVPOeavxXUEH/AFSNAEcAVSWkRwBUcJdG/FYYdocAVekdzwBXKdVXAFcwEC8AVPycDwBWqBQ/AFTO0FMAV1bKOvxUZ6QLAFXAWBcAVo8k0wBWOtNi/FUcSNsAVEPCivxXfLLO/FRugRr8VCvevvxWmlEK/FVU/vb8Vk98XwBX9YoDAFaFzob8VmPMwwBXZqYLAFYKLHcAVbMH4vxX9ymm/FbhChL8VUXADwBXMaInAFZma4r8Vb6F6vxVpYZDAFVs8NMAVLR+YvxX4O9K/FV9A6b8VROc0wBUKt5G/Fbj4Tr8VkeMwwBV7WdC/FWsqh78VVsYSwBX1+rC/Fe1XCcAVDjoLwBUk30nAFe3rKMAVBf9dwBW3nzbAFTFKiL8VNOtcwBXyKFfAFdDNk78Vb5IfwBVa6xLAFe3zr78VEMtYvxXotae/FcCKCsAVEWAwwBVAapu/FTTbYsAVVjgnwBW/5N6/FTVRDcAVpZHrvxXSmpDAFQmgor8V3T4hwBQbCE8QARD3AxDkARCtrJc0EL7y9jMQyMHRFRDw67MpEMeRqB0QprCPFhCV9OAKEMDS+yUQpKXPLxDgtcsBEM/l2SQQr+jhFRCujcwMEJbuzDsQy5WdJBDuutgpEJPOuigQ8srDMBDi0O8GEJaK6hAQkKb2NxCquegDEOLZoQgQwq3lCBDkoIcJEI/m0DIQrdjsGRClza4CEM3LxTMQ6r22CxCki6wpENexogwQwK2LNxC+gPAkEJj/qTEQge6EORDl7O82EP7I1jwQyJ+aFhD/49A3EKXBlRkQna+GEhCR96EjEM/QqzYQh7nqExD9pbEyENPD7CMQsuR4EIbhzDgQ/I+DARC6x90mENfm1QkQrdqZIxCV6p4KEJGshRwQyNG8BBDonqUbEKauuCIQjeqsLxDnr7E1EJWgzyYQu/CvIRCG85oiELHgpgEQ5vXLJRDSsJoNEMfr9C4Q6/vRHRCpqOQ4EOuyzSwQjvXjLhDE0sEvEIvkxzEQq5C7NBCSgbQ3ELao3DQcIAAw5AE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaIG5s/mc9sSgbZQBkwEKAzIuMBMI5gEVnCcIvxV4eQ2/Ff7xCL8VqfYlvxXsZhG/FYLxCb8V9nsdvxVV8zK/Fc3SKb8VgtkTvxWl4yW/FfxwDL8VtaQMvxVLFym/FWjjXL8Vp+E3vxWKOlC/FTdZUb8V02xuvxXrKDG/FUy+JL8VCx43vxXC0Se/FfDofL8V2FUQvxViOBu/FaSvNr8V+kZavxVTLnK/FYsyaL8VKeCgvxVZqti/FQ9Rcr8VwoqdvxVgbXO/Fb76nb8V6t3LvxXphoG/FYx5hL8VE/dGvxXji3i/FQlaUb8VJ6g8vxWc+my/Fapqp78VvI0qvxW5ACu/FXPkgr8Vy+qFvxWFBoy/FflpHb8VXFxEvxU/USS/FZnfTL8VOL48vxU+YmW/FaV4kb8VHdl6vxUYN5a/FZwxs78V74yBvxUIesi/FX/ruL8VdwTcvxUVZ/m/FbyM2b8Vs0eSvxV77se/FUJto78V1QnhvxUmmTnAFcIEyL8VRlTmvxVW4xXAFfkX6L8VTeCpvxWzG5a/Ffm92L8VMgCMvxVK3WK/FV1S9r8VrFDlvxX1WIm/FcQpxr8VSzOXvxW1jHW/FaA5v78VTCCyvxVOjcG/FdDe4L8VUWzPvxWz60C/FROGeL8VnUTvvxXTODm/FU5Ihb8Vwy2yvxXFRZi/FZw1sb8ViPjZvxWRhaW/FZHWPb8VvKPhvxW/e2K/Fc92f78VIcUrvxXcedy/FUMrVb8VGGROvxUgMV+/FevgQ78VM01+vxW/3oG/FdSJIcAVuVXqvxUzX4G/FYna278VZ2+hvxWKra+/FXlqecAVahHZvxXZs6e/FQVpor8Vz0FSwBXyWRrAFc/CpMAVzvX+vxUsoovAFRfJC8AVdA5bwBWzrmnAFWYdq8AVMyFpwBVmZZa/FYe9c8AV8BbmvxWetHrAFQftqL8VQL9GwBVA5/2/FT0dVsAVqYBjwBXfIT3AFar6CcAVZOlMwBVsKEvAFXAB8L8VN8w2wBWa6pnAFbCRAsAV6+YkwBVnVgrAFQoLK8AVIrFTwBVME/+/FbmWQMAVdREjwBXSo8K/FZfbqr8VxwuevxW9M4q/FdZvPsAVMm/8vxXiuhrAFSRLL8AV2OstwBXeYcW/FfFA8b8V/1/TvxVm8iLAFfbLCMAVIpMEwBVvC0TAFeWcA8AVmbkCwBVUNcG/FZpg1b8Vx0uLwBWItP2/FSxrO8AVj9PxvxXaXobAFecQC8AVnn/GvxUV+i7AFSztib8V/tOsvxVEOwPAFdZlp8AVsVDCvxV2fa+/FdlpnL8VUA3avxVvBem/FZi9GcAVvkHxvxVZcby/FXEu1L8VXr+yvxX3FMHAFcxwbsAViuvYvxWLVQvAFb+Nor8VQP+IvxXPpCLAFU5nosAVFBQewBVbGyDAFSMqhL8VPa4DwBVMxIa/FRmVTL8V0V9fwBUnpCDAFbaUEsAV2RiXvxXb8FK/FQf4wcAVmSEXwBVdZ2K/FfjN/r8V8PegwBUjOKDAFbMxpL8VvheYvxULAIDAFf7/P8AV99sjwBVaqRLAFBsIUBADEPgDEOYBEIuhgDcQlNzdNhDe0ZIqELPGricQ4/TUJhDu4dcUEKib7gQQv++WFhCw1740EJ/Sgi0Q5oyTCBC0zUIQy66NBBDA1bMsEOn71B8Q8KnRIxDgt6QqEOji0jMQ9urYJhC97rEZEL696BEQhreMCRCt67otEJv0jiEQ/fjBFhDCmM4vEMnRmC4Q7N/RJBDe9rMyEIuVxR4Q983DIBD/jfMyEL28iSYQtLG4KRDsi/8tEIKE/TEQiLjINxDu8Q4Q3qfFNBCBkP4DEPfbmiEQq7ruFhD1ycwQEOS/iAwQ6cfzEhDByqYuEOWdzBMQ3eCNFBCIzP0UENeklwkQhYpiEPuR/y8QhLKQHRDmi90ZEL+vzhEQv4OSAhCniYccEM23jgsQlIDcHhDx3IouELToryoQi8b3FRDi/pIoEOPRpiMQzI3vJBDP++8JEJqT+iYQ2c3jKBCa9J0GENyeoTQQvr6ZLxD1tO4CEMKR3DMQwqKSNxDrk7c5EO6dmiMQ4bAPHCAAMOYBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWjFkoLqiavev5kBlAGTAQoDMi4wEwjiARUDyQK/FfNsCL8VSWMFvxXAaAq/FRuiIL8VY9sFvxU50we/FTNtW78VtAwLvxW+Qiy/FcMTVr8VyRcHvxXIAgy/FcVND78VFaIKvxWV9IW/FdnRar8VHV0mvxULehW/FXeZRL8VJPZVvxX1DF2/FaVUWb8Vk2MVvxVjYBe/FT7uLb8VJ58NvxVq+BO/FfjxMb8V6gSBvxVvYHC/FYAFh78VAKCMvxVnnX2/FeEAdb8VY2RtvxUrMzW/Ffc+Y78VH0EWvxWRjl+/FZ37R78Viy6BvxUftma/FeLydb8VTQ+TvxWdglu/FTXaiL8VashnvxUa43G/FXXBGL8V2BE1vxWqPYC/FaQNg78VjUYovxXyHie/FZjIG78Vz6lNvxWrmJ6/Fbb69r8VX2RKwBWUj+u/FVJDqr8VG5AvwBVamLq/FWkcmb8VKbuXvxVC0wnAFe4Ng78VavwJwBX7t4O/FVb2eL8V+G3DvxXHhKC/FUJEeb8VJLaavxXRaZO/FTwH178VIa5lvxUL6t2/FX/fhL8V43mhvxVwAvm/FXtxcb8V+FepvxVff46/FW95ab8V+8lwvxXG1Ze/FXkVn78VRI+zvxVQGpe/FfdpnL8Va8iKvxXGPaa/FTBeqb8VlxaQvxWIFry/FRi1478VBT2cvxXiqzO/Feo7TL8VbhGBvxUcHk6/FWjyjb8Vj3SQvxX96rm/Fe6L/r8Vqv37vxXqnVq/FaAvjr8VJlXevxXvWkC/FbF+pb8VTWQzwBWX0fK/FX/QO8AVSO5HwBWg6hPAFU8shcAVJbOXwBUJi3vAFZV1TMAV0Y0pwBWUgnTAFSaowb8VYoBrwBX0y1bAFaQ+BsAVhXwzwBXlGLi/FRxE6L8VB+vUvxVFw5fAFU8+SsAVm8l8wBWyZM+/FahLq78VfkG0wBU1SEjAFStD/L8VJuGTvxUevHTAFfLamcAVsHkbwBX6I8m/FQn6YMAVqTkewBV4yqG/FUK1xL8VlN0XwBX9UAHAFcrmYsAV4T+yvxVyN4zAFVCYDcAVYFMJwBWNxc6/FVsXB8AV75sDwRX4KY3AFY5vh78VwxykvxXxLbO/FT5YG8AVb0QZwBW/oHe/FfYfqL8VHZLWvxXA9sy/FZmvqr8Vws2dvxXZd3zAFX9sK8AV1ttTwBUgqPe/FbAqNMAVRW/lvxVl8QrAFYYYCsAV2WQywBVxZPW/FS2oub8VwDUswBX0fg/AFQK/oL8VQFiwvxUI2ZK/FZQTxL8V19S7wBVJbNS/FRfZAMAV2da8vxUdiFPAFYewPMAVw6XVvxXLsGvAFZc2AcAVXXM/wBWrsR7AFZ5ZK8AVeAZKvxVfic6/FRuXxL8VsRGIvxW/zSvAFWQDCcAVKvezvxWUM++/FYu0wr8VeAOUvxXvWOe/Fev9yr8VnPg5wBXvNQbAFaycLMAV1NdowBVVXi7AFeqhjL8Vw/quvxWwnR/AFeFVpr8Vs4thwBXUwOG/FXYk2b8VUbaevxXShbu/FBsITxAAEPYDEOIBEIL+sg8QsfKxNBCYgdgXEN/uwxIQma7sHRDW1SQQu7qzCxD1ufEnELfv2jcQ37DHLRCe57AIEO31ow4Q7pW4GxCovpQLEJKznSIQxNbSJxDJvI8sEOG+2TQQqaX/BhDI8acsEManFBCGgZgxEPef8wgQp5bbORCYjNsaEISORRCinKsdEJOpxh8Q2cDMAhCm+ecqEJSYxDgQsav8AhC/8vgpELPQvzYQ3PuGDxDq6Y4DEMnw5AYQ9L/iEBD4/5IKEKGC8hEQ6omqAhCql+EwEKWc4RIQ9cP0AxDfip8YEMnu3xUQktfwFRDTnIcXEKmbqgMQjLTVGRCRkdELEMiZwAEQxJ2BHRD+y7EcEO2o7CkQtKK4HxDs1O8fEOK87wsQt8muIRDr45A0ENG01CMQsLblJBDB8J0dEO7OwycQ25XvNxC3ivY0EPia0A4QoJLuKxC2rJcuEKvA7g0Q/Z7JMRDxxs8GEMGXiDUQvJeEEBCqoqQQEKkDHCAAMOIBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWib5YbxgtuJoXGUAZMBCgMyLjATCOgBFVKLzr4VoIoHvxXTJAS/FfERCb8VZaYOvxVtWA+/FTiwBr8VRQYovxURNFS/FUWZFr8VmP0rvxWjbQ+/FWGrGL8VKqAHvxViUhG/FbBcKb8VmrEtvxVb3mC/FSuxYL8VTwQivxXbdTW/FUz7Or8V9dgsvxWpKiy/FZEiHr8V82EfvxWNzV+/FfDLDL8VllcIvxVa2BG/FblEIr8Vlm46vxXSCzi/FWpJMb8VjKQyvxXovoq/FXmZib8VLgRzvxUEbIO/FaO2VL8VOmWMvxWAboS/FQQ4OL8VZ9RAvxWrA1K/FRMYML8VYUtzvxXwu0q/FQjESL8Vj5tIvxXuhDK/Fct1PL8V/LYjvxUl+Gy/Fek+qb8V6rAOvxVomBO/FeWoWr8VBG7FvxVBn56/FTZ+KL8VGBsyvxVnUDa/FZsqm78VmtSrvxViC1y/Ff4Xb78VELVpvxVJo36/Fd5Tg78VtWRWvxWWHpK/Fa0U7L8VkhAowBXJvvK/FewlmL8VQaBzvxW9naa/FehIC8AVLQdcvxWZvuG/FTw0zL8VDQKevxUQ0T/AFUH3o78VUxZHvxUErMq/FZ2hkb8VsFNQvxUEhLa/FXvFyL8VBjSkvxUCiJ+/FcR7hL8Vf/eCvxUSK2y/FfLRqr8VwoGgvxU4cVy/FQpbhb8Vew6ivxUNkIW/FdqQUr8VEcxyvxWcQUi/FSDeS78V8AlyvxURiLi/FVRy/L8VNVWxvxWMVNy/FX26Yb8VIQMpvxU5IoG/FYSmKb8V/Ia9vxXmpHjAFQM8X8AV6F4CwBViQNu/FT2tHMAV6Pp7vxWL6ZC/FbqlqL8VEWnivxVesX2/FYA9Ur8VKnMEwBUBny3AFXGrBsAVVIj5vxUkIMm/Fe6c+78VN584wBUoXwHAFWf0jr8VLDujvxUxE8a/FfKPBMAVmUqdvxUDShjAFSbsM8AVP19YvxXF9fy/FQm04r8VX5IBwBVmVBrAFcg2h8AVUqOkwBXHVmDAFZbVTMAV9ajfvxUE7JDAFUiwrr8VXDHNvxX8iVnAFVR7fcAVsuwfwBWEQxDAFTd21L8VoAkCwBUCKlzAFY2gHMAV91olwBWfY/a/FR+6OcAVYIIYwBUrikPAFW0EZcAVwBLhvxWrbKu/FVXtyr8VR5HkvxXv5IDAFa7nIMAVJq0ewBUQttO/FTS/678VO3ezvxXNGNm/FZiUz78VHHjevxWsp/C/FSDcJMAVURMswBURzr6/FeFVH8AV5OoBwBXsK5a/FSoCDsAVMDjLvxXU2jTAFYTODsAVe1XsvxVmtEbAFdZlsL8VEHy3vxWXggvAFZhgE8AVK9gowBXnQRfAFbcs2L8Vlk0swBW8mX/AFe8X8r8VoyRWwBV+6Pi/FfsbM8AVukPPvxXV2xHAFQN+1b8VxoQFwBWEXtm/FRgVDMAV71KnvxWamcK/Fdx3FMAVtlGEwBVsOVDAFZYblMAVDFQlwBV3a0bAFblg5r8Vqxb6vxX6nm6/FR5npr8VOAwpvxWDbLW/FQ2CjcAVVQyKvxVvoeu/FUTVA8AUGwhREAEQ+AMQ6AEQl/nCLBD829UnEIuDzhgQ0r/QKhCFlO0bEIC3lAQQ8M7fCRDSka4WEMqrzToQ7IDBAxDnlsEUEOjRkgkQzuPGIRC5sf44EN7f0iQQhJ2NKBDx+8AtEJCiShCarpw7EJby+QEQ9fH0EhCY5f4TEIzjlgoQ0cerChDuwNwUEOyOqwcQmoDUHBCbuJExEKz/hgsQ0+6yBRDXrNEVEP+P8gUQ0sHzJBDhwoQtEPSyyS8Q3s7BDhDNg/k3ENDo9zoQ/7i+ARCDlLUREI7q6QcQ1qSnCxDivZ4YEIHF+xMQ6dOHDxDxk9IIEOud9xUQ5eiULxDttu0XENGD5hgQ86OsJBCn0aMuENvX2RoQm7mjHBDpw8cTEN+Xux4QjqakIhCAxMkLENTMgyIQpbHkEBDQ9o4WEPbkoC4QhabrJhDhgaUBEID3/zoQnvOdKhDi7M0NEOKklS0QqPK1KhC7ppkjELXw5DAQ1vaCKhC3+Lw0EJOduzcQi8+3OBC1+Y8fEM3s5jwQGxwgADDoATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFo1cu72YKU4IVSlAGTAQoDMi4wEwjiARWuAMq+Fb150b4VGDfOvhVJ1Ne+FV3h3b4VyJDRvhVaK/u+FXEm6b4Vj2EivxXUIuC+FZPR7b4VMib9vhU+hfu+FZ7VDr8VT4gHvxXadA6/FdKRKL8VifoyvxXax0G/FSpRNr8VFcwavxVpaPu+FVVfEr8VBvgPvxU2+xG/FV5sEb8VbC0HvxUnNBS/FdDCH78VdFoSvxWs8Im/FUNRPr8VcKY/vxWZ9zC/FV0uKb8V8jdnvxXEiTi/FdQQRb8V7C1HvxVoSju/FQWIc78VmnE0vxWNATC/FeOhIL8VZ59JvxWzghO/Fc0fe78Vhu9LvxXLPTO/FYOhdL8VkgRMvxWh51W/FdOIL78VQWQRvxUM3lW/FRgOI78VyaxRvxUhRTa/FUEmXb8VeXtDvxXCg4O/FVskoL8VCmytvxVTmZq/FYQpib8VIXOuvxVCXGW/FbMFTL8V25qGvxW+j0m/FWG7Pb8VfmqDvxW3ttG/FS1uR78VDENFvxUh04K/FWHUhL8V4mp1vxVVRoi/FSKunb8VjXNXvxXk+Iq/FaHorL8VdzWDvxVxY0G/Fdd5or8VvoA1vxUVLMK/FeZ2ML8VmRttvxX/11W/FeVEVL8VxpYfvxWuGn+/FTL2x78VpzOcvxXqDJi/FSX+vL8VJoIdwBU+QYS/FYfgkb8VDfTMvxW1tpi/FQRBbb8VnxF5vxVlxKq/FUbwhL8VdzklvxVKUxu/Ff/EgL8VGFVxvxV5fGa/FcAfaL8VVvWdvxWEToa/FS49V8AVqh0CwBVpk4y/FR7SMcAVjaDGvxX05Q7AFYn+lr8VAeMnwBWeYgrAFR8RY8AVckzQvxVZUuy/FcDqsb8VEtePwBWz9BTAFc1fJsAVRJRHwBUNwf+/FURMNsAVH87VvxV9wh/AFXj20r8VGTdcwBVN7yjAFS8mP8AV1uIqwBUWMLW/FcXUy78VGtS2vxWq2Zi/FXhwKsAVjjvXvxXX3CrAFTuT778Vd2u5vxXjD3nAFX2j5b8V3GOuvxXEHQDAFZqNcsAVpeKCvxWxT/O/FQjTPMAVULONvxUJ496/FaZX3r8V3KiUvxVIM++/FX/08L8Vsw6QvxW6g9u/FTM54L8VGm6MvxV6ywjAFcokq78VxnymvxX6HdfAFexXxL8VZ9yFvxWtbKO/FWi4W8AV7zjUvxVUaQbAFbNg2L8VJKrpvxVkx+u/FQJ9Zr8VsLSKvxULEh/AFfwXI8AVB074vxVgMLe/FTNz1sAVcEyGvxVRgjnAFRUyUMAVNxcRwBWbYAvAFUNFMsAVyrbbvxU3TgzAFTWR0L8V3jdHwBUeA1HAFepzTMAVERkYwBXgfM6/Fe7c+b8VJ/ktwBWk1ui/FY25H8AVoJVhwBUZA+e/Fbgxtr8V5F0KwBUXsfK/FU8Nr78V6LK7vxVJ89C/FfW/sL8VFWUcwBWBktK/FQ9gR78V1uwfvxUdoKK/FS/8x78Vi0EpwBX5Y8q/FU92hr8VVu3TwBUcob/AFBsITxAHEPgDEOIBEPGo2RIQp6zDCxCxo5keEM6P2h4QqZasJhDRwd0lEKWr8SYQ+PSKJRDR16A2EJzhvRUQ4fiyHhCuwIw1ENOfpDUQ34mVHBDUtok2ELyI2jIQ6tD6KxDXycY1EJO1sgIQlpGPCRDb7tMGEPqcgRIQqtW1IhDtxo8VEJL4iRcQorqLCRDyy6MyEKnn8zIQ5u3kHRCxnIcgENH2iCIQq6z2MxD1+cIoEOyL4yoQr5a4LRCK6bQbEJDFjjkQj+7PNBDqqsIyELzw0AIQ9sqeCxCJ9b8xEM6/iwcQyNmiLRDBq8ISEJiH8xMQ5Jv5FBCiyd4UEKPB0REQ3++8AxD0z4IdELilpRgQ4a2yEhDk8vY2EN+ZmhoQwuzQBBCLlvwEENP80R0Q+MTqHhDHhv8fEITS2gcQrsHMIhDlwrYuELP26TQQjbehJxDjzpUoEPjKyykQ/96VBRDUmoczEMSizy0Qkvf9DBCn9PAxEOXUsTYQmf6zLRDnkao6EPEDHCAAMOIBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWjTuOXb3uG7lAaUAZMBCgMyLjATCOoBFRHH7b4VLEwNvxW/4wy/FeJdDr8V/fIRvxVAmhG/FaHnDL8VPecavxVvchC/FQVVIL8VPt4SvxUOoSG/FYdcGL8VsTQPvxVkKBe/FW9cIL8VAdWavxXI/hq/Fe0AhL8VMV4qvxVWGUy/FdSYFr8VuX5GvxWKwim/FXAOeb8VQAEfvxVJ6x2/FXhMFb8VqFkTvxV7IG6/FZcvML8VZOtqvxUP6GS/FY0vo78V+1mmvxVZrmO/FXvwH78VIdCYvxVIP7W/FdLHS78VMWhQvxVxPVW/FamlY78VyjUpvxWZBne/FZa+Ur8VykqOvxU1SEq/FUNrlb8V9I97vxUuxXq/FVKLJr8VNX8yvxVz/U+/FZbmV78V1tcavxXZZRa/FQVCQL8Vfp9tvxUkr8i/FZSDhr8VV52PvxUvWmW/FX8YlL8V3a+bvxUW0Hm/FYsJlr8VGbbQvxXAQLa/FWMoOMAVreiqvxXzonG/FUZJib8VLtWEvxVMkyu/FT50CsAVtN/WvxVih8K/FTZ4u78VruBYvxWYm1y/FbhQkb8VOt1bvxXFbqi/FTmSb78Vv+yxvxVd/Ke/FQpAT78VcceQvxWlmbm/FfQ4jL8V6Oq2vxWlK/W/FXWJkr8VcqGsvxXGF1m/FT9SpL8V2Q7JvxUNfwjAFWk2nr8VVGvmvxU1URrAFRj6lb8VJZpBvxULw0C/FQS/er8VNO66vxWiU1q/FSubb78VdhhgvxWGhom/FZeLd78Vur8nvxUkvUK/FSE3Jr8V/CiYvxXYmb+/FYvW7r8V3IvXvxXASznAFTu+AMAVzrr/vxWAqGPAFaU6XsAV62vTvxUQUu2/FWKcJcAVsUdMwBUieb2/FTSrTMAVYPWcvxVwx66/FW+pv78VZ9lcwBWX10rAFbdz9r8Vyt2kwBXY1ca/FYxG678VTQtAwBW44EDAFdpvB8AVLPLTvxWjA4S/FVz9yb8VM9q+vxVfjQXAFSY1/r8VhaS0vxVOqKe/FZyOVb8VX4gSwBXVljDAFWXSOMAVQnE6wBX5YTjAFZ8mI8AVfp52wBWjLui/FfoNkb8VSXhbwBVqusTAFQF9F8AV3H1ZwBVgD5u/FewWXsAVjpITwBUHL7nAFRs9BMAV6dzFvxWqVhbAFdwYUcAVLJLQvxXgoz/AFVIFFMAVaWo+wBVgS1DAFeaUpL8VbHjsvxXKuTbAFazY378VtIKQvxXJCMy/FQmaCMAVR8IPwBVasAnAFetXQcAV7phDwBXlcwnAFfWAub8VtRPHvxXxu8i/FZxzqr8VTWl5wBVLmbC/FeTYDsAVU9vWvxW7IivAFcsnRcAVINpDwBUgD1fAFX88VMAVSggQwBVvkTXAFbOnfcAVKR6/wBXgkc+/FUPmtL8VTtlWvxVrLLPAFWuSMcAVQKoywBW16OG/FftiEsAVu6X8vxWQ4CbAFSCBkr8VaAClvxWhaSnAFUwVAMAVpATmvxUvzDvAFdBeNMAVO21TwBWoQ3q/FcOOFcAVAK0LwBVrMkXAFZ/k+78VHcd8vxW9BDm/FeydKcAVnkyzvxVtxNK/FBsIURAAEPgDEOoBEMihlCEQqrHNKxCmv58JEOT32A0QzvzVCxClyM8bEKDtlgkQ0JSAAxD2koYzEIiAbBCVqckqEMzd4BcQkMeOBxCZrtsfEKiYtwkQmKriKBDv+aoQEKu4xjcQiuO/OxCpqs4cENvfzAMQ1tmvHBDg57QxEOuG0zsQybeCGRDb3iUQ0OuPHRCOvbIyEIuOvSsQvdO1BRCe4pkzEPyUjA8QnprsJxDrtqw5EKjgxS4QoL70BhDJors2EI2G/zoQya20PRDr/L8HEJ+CzQ8Q0pgLEJ3YnBQQv4LsKhDQu4AiEKnh7AgQ3cT/HBCLk9Y0EIWC0TIQ+YLeGBCvmugwEIyV1gQQitTEGxDK/PAKELG+rjEQnJ+pBBDUzKUFEP64zi8QpZHxNxDXjfwiENaK8yMQ2aGaDRCen8kNEIv2qSYQxt/JJhDVq7AOEO/0jC4Q3tSgKxDn/uctEN2cmy8Q7rv/MBDTlv4aEI/ewTQQpr7tNRCDq8UQENi2ghEQ35nzEBDX27MRHCAAMOoBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWiOobOTjNPIwxyUAZMBCgMyLjATCOsBFW/B4r4VcFzyvhWYeOu+FQREAb8V7aD3vhXfR/C+FY9YGL8VKwAEvxUE9gG/FSnxA78VssEBvxUSMwm/FcubA78V2ekZvxWLFiy/FddSGb8V18AJvxVOAAm/FVMRCr8VwXwOvxV1TAa/Fbk4Rr8V1e0QvxVyVAq/FeYkQr8VCuIQvxUlBAm/FbdULr8VZgAqvxWHGUi/FUPZRr8VtndTvxXUFCq/FeQOXL8VmEIbvxUzHRu/FZeBL78Vyr0WvxWgBCa/FZ12LL8VYFBAvxVacw2/FVi8Or8VdBRLvxUQ4JC/FeLPG78VRRk7vxVvvR6/FabCHL8Vvu1PvxUzqYm/Fa86IL8VOzcfvxWanVy/FfiiG78VobaSvxUcA2S/FbcgNb8VprV6vxUv77a/FbdzkL8VYiHYvxU/hYu/FR+Hg78VvbSJvxVpG4O/FczFqb8VPpYPwBUS9WC/Fd7Feb8VNcBMvxUR2iC/FT1QOL8VOw85vxVzH0C/FRiPbL8VrTcwvxUfTlO/FfI7oL8VC0O3vxXZElG/Fb6vpL8VMwyzvxVqf2y/FUqYUr8VdCxcvxV0BY2/Fenzbb8ViyVVvxWiOPm/FbMulL8VTnqivxUDB1S/Fdx6078V7eWTvxWiuEK/FXSn5L8VQsqHvxXJtEm/FQXwZL8V4TGuvxVugb2/FdQ1rL8V+ymfvxW3YCC/FTdDcL8V4zZDvxWxTaW/FfO0qr8VCIMzvxUg1Im/Ffrzrb8V2vPlvxWw3aO/FZaJn78Vb8lxvxUl5WO/FVcppMAV23D5vxVwDCbAFWW0ecAVCnevvxUes9m/FdIivMAV2zE7wBXhiQDAFQidxr8VOP34vxXwAS/AFeN5HsAVoMgWwBUjj9i/FUqrgcAVQZY4wBUGAojAFXKsK8AVTYQWwBWlUuLAFVhQeMAVavyIvxU0AeC/FY+U278VNHcSwBWeBI2/FWaugL8VL4B+wBW9PFy/FdS90b8VIsG1vxU1fYW/FZBcC8AVJqvovxWTC8C/FXSagMAVCy6LwBVmMV6/FXEQ9b8VxrXcvxW4wTHAFWUAJsAV3S81wBW8jJe/FTk4f78VRcOnvxVeshLAFTixLMAVHKzYvxW3hxDAFaVl7r8VsFTNvxWutY2/FZPyx78VkCTUvxW4oGXAFTbC7b8ViWuuwBUlAz3AFQaEsL8VkPlIwBUeNg/AFfEmGMAVF5S4vxW+kpq/FYh/0b8Vz3LJvxUPtBfAFTPfEsAV3KAQwBVNxVPAFfx/GMAVgym2vxWSGwXAFb8rasAVnxwywBVtShvAFfTTS8AV1xEhwBUjTALAFZ1oaL8Vufz6vxWoo4W/FYG0DMAVQf55wBWodB7AFWLoIcAV8vcRwBUQmu+/FYJs378Vq3PJvxXx28q/FRFU/L8Vod6tvxXYS/S/FXdaSb8VJttqvxUII7a/FYImisAVlZ4QwBXkPvi/FdfvFMAVWyYBwBVabcu/FdCoj78V4chSwBUtx/+/FYeMLMAVmISZwBVvvoHAFeot3L8VfjuVwBVJeCDAFf9xA8AVbkyOvxX6Cs+/FXxYg78UGwhSEAAQ+AMQ6wEQ/LC+LxC7wJ8kEKPMowkQyeveKBDX0tIQEJPG1hkQkYGXHhCv0tINELf06xoQr969EhDo59cVEL+3vBcQuej+CRCUipwFEOK90CEQ7LmJNhCPw/MqELq2wg8Q5tnuORCljbEOELH9gBQQt8fXBxD/0J0QEIPmqwcQ+bWPHxCu1fYaEODz0BwQ6+mqORDonLAfELvF+TkQjPHHIhCJiIMzEIeK8iYQwvCUBhC8y5wsEInwAxC4rqUzEPmxxzcQupiyPBC0iYQSEKSQ5y8Q8IXuExC+1ckTEJOWnBUQho/vFRCvu5wLELrwzwoQqqGrFxDp39EXEPP/1xgQtebDMBCsy/YJENPzxBsQxbzBHBCazr0nEP/ZhQUQyKTEHxD23Mg1EPOS9SAQxcOuExDEkbgxEIWr7CIQiM2eIBCh68gNEKCzgiUQgPTMJxDO2qEoEIOq5SoQ6I+XLxDEr9IsENex4zAQ99uYOxCCroMwEIv4lhgQ/dOpNBCUt9M4EPG6kzwQ+62FPRD4AxwgADDrATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFo2cvI44T4p/lHlAGTAQoDMi4wEwjoARV+ahG/FcPZE78VvJAUvxU6OBa/FZwyFb8VhJ8VvxUiwBe/FRVsHL8VqH4ivxU+eya/FT6qGL8V3+EWvxX3yxu/Fbj3KL8VoYEnvxVlOB6/Fd0OZb8VtnIyvxV4ayi/FWGOM78ViVIrvxWyX1O/FfFCPb8Vh18nvxUFDSC/FTGiLr8VIZQdvxVfai+/FS+6Lr8VThFdvxX7Wyy/FfBtdb8VtOcuvxVF7IS/FZ9Ygr8VF9yWvxXThjW/FeQNZr8VrzeWvxWDsX2/FUwHmr8VUdZMvxXv8Yi/FZqvgL8V6+uSvxXKmlO/FZiuRL8VG9KZvxXfNDa/FUn3JL8VT3YsvxV33EW/FbU1WL8VEYk+vxUYUUG/FXMFP78V7Lw9vxVgOlK/FYZthb8VE6+zvxUGsu+/FTfXPb8V6iOHvxUaFXe/Fe9Al78VYjmRvxXpnk6/FdfekL8VYjvKvxXi/oW/FanVqb8VESeZvxVJPcC/Ffpj+78V5Ht+vxXfQKW/FTzx0L8VgLCfvxVzlcu/FbVlNsAVpg7YvxXX4LK/FWz3wb8VmSWhvxW1j6y/FQcUjL8VY2msvxV2yYa/FdVqgb8V56MSwBUgWAPAFZrsp78V5VGNvxWHM7+/FSogcL8VPkGlvxVa4qG/FZGdu78VN7+DvxWqRme/FeDhZ78VQKKMvxUfbWu/FedOxr8VYX/mvxVSbV6/FU+6wr8V5b6uvxUFekS/FWuBmb8VLnaKvxXsbUq/FdKaXL8VakqavxW7kUa/FX0nW78VkZdcvxX9m7G/FcBYub8V7cPrwBUxT+K/Fdg09r8VYAUAwBUYtuG/FQfLf8AVoTjjvxVeGuG/FZwUrr8VSgUiwBVW+qG/Ff1+K8AV8xUVwBXvtyTAFdOMQsAV6EHKvxXhCLK/Fdj7q78VOhGNwBUYES7AFQnWscAV+dLTvxVDcue/FR6ZksAVDFUNwBV/l5q/FfSw9r8VKRcIwBXidw3AFexfPsAVyNznvxWgecS/FdP2psAVksNlwBXS2JjAFQxwCMAVFoqzvxX1vgHAFeGllsAVLqM/wBUiKYDAFf3BP8AVzpRGwBXiQBzAFYDkKcAV55T5vxXOAgLAFXa/KcAV2pf/vxUExbe/FWRZG8AVcHTOvxUPeeC/FX8Xn78VGmUDwBXGocPAFcGxTcAV9CbEvxVaYva/FQlcA8AVu3NqwBXZySPAFaHDasAVpZBawBVsi4LAFbYYRsAVeX2hvxWoSlbAFRr1b8AVKNT8vxVKUum/FYg4L8AVCeoTwBXprbK/FR6taMAVgifAvxUjF+i/FWp6z78VqZf8vxU3lAjAFSI8nL8VxXB3vxWg9oW/FQG73r8VYO6kvxUAAcC/FcZvi78VUx/pvxXYDDHAFQHCF8AV1JcJwBWRjSTAFeoAyr8VLA16vxUH4AnAFX+ETMAVDL3svxXzccu/FdXE5r8VIn1lvxXCONLAFeW0GMAVYWfbvxXMETTAFZtwWsAVek3rvxXp6iXAFfcYir8VTThewBXXORbAFRO2wb8VKVsAwBULK2q/FBsIURADEPgDEOgBENb7thIQsuzbBRC9t4wnEPiJviwQiumtHxC1pOgIEMn2rx0Q+o+8AhCVyOEwEICdthEQmPGCFBD1iLokEPCz7hkQjOiTAhCCjLYgEOnF5DgQ76unLRCQ5cMzENS/0wgQ3ZGUAxDz1bEnEOvCwwYQ4LucEBDKw+wWEPmu3hwQ0JnbLRDJvNsKEJyF7iAQla22CxDCnOIpEKSj+joQiKO6KBCXqKEqEPWkmBAQsbyRNBC+y+AkEKaQlzYQ/fCiOhCK8YEQEKPf5R8Qq+afExCAzP8DEPC/oCkQ2p/HAxDnxr4jEPDShQgQysuWNBDSl90WEPC88iIQsvSKCRC4vOQYENv63RkQlKLtBRCS1fglEOWltDEQ8qrRBBCHsPMcEJ664BQQ1YjnHRD/7PMfEIbi1CAQu/GEJhCFnqgiEOXk4QwQ4f33KBDJwcklELyrtg0Q37qIFxDzto4uEOf8wCcQyvTFMRC4640GEPH57TMQ+I/vNRCu4+43EOPa9DkQk9fOOxD1AxwgADDoATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWPgDYAFo9svS78jo/qB/lAGTAQoDMi4wEwjoARVqxQe/FbJRCL8VYwsOvxV2dwi/FSsQDL8VSqUVvxXs5Re/FbtaD78Vh14ivxUMhyO/FSyCDr8VoTwmvxVhqT+/FdnGKr8VoXohvxVIoB6/FeWFF78VpIYovxVfeyS/FUKuU78V42ovvxVEKxG/FWA6I78Vc3Y1vxXUJnm/FbXrQL8Ve8lEvxWWSz+/FWR0bL8V6+BBvxXlWm6/Fdn8Jb8Vz2Y0vxUrHiq/FSoCRb8VscZNvxWVlim/FYJ9U78Vf/gxvxVDmHO/FblUgb8VkaM0vxUtAEW/Fawtfr8VY2EXvxXVaX6/FfP9Wb8Vv3pSvxXu742/FazEgb8VfxqHvxWZNJu/FdF2XL8VIy1FvxXvJmq/FUFyU78VjNFEvxVdCZq/FZNdir8VxiPFvxUIR2q/FZT5rr8VqF25vxW3erS/FWd4W78VxQfMvxUsLD2/FcFmK78VaR9HvxWmh2K/FSJjhr8VreWgvxW1TJm/FXXoa78VIlCXvxWNR1W/FcNJbL8VQ5BlvxU+UYS/FfO5m78VLb19vxVlFZO/FQ1Yk78VmOCyvxULiXq/FYl+s78V4iNyvxW6eKO/Fbr6hr8VwR4YvxXetZS/FTHb778V9/KTvxWUcui/FQPPZL8VOYhWvxVXkq2/FQhGGMAV+Q+7vxUkdZ6/FSASpL8VbyL/vxVwhqi/FYjNyL8VKAP5vxWY+bi/FSjKb78VSMltvxWHqq+/FXeUor8VEUSAvxWssYu/Fc+Bpr8V3vS/vxUKdUi/FZyE4r8Vy3j2vxW2a1DAFZns5L8Vv/TcwBU1dTrAFZ2G+78VHysOwBUZDwTAFTH7yb8VZTwAwBXxPA/AFeG7CsAV0yiAwBW8c82/FS6UUsAVfBeWwBWTVGrAFdLF+r8VuOYWwBVaDcW/FYEMEsAVMcQGwBXwCADAFfjlYMAVJF2AwBVzqzPAFXfesr8VWK5gwBVWpce/FTYn2L8VhmpvwBVxsjjAFeyjZMAVJI/yvxWKD+i/Fa5qlcAVWIiHvxUFJBnAFZgUIsAVXSG5vxUBx0HAFX+BO8AV8Kb+vxUfofq/Ff6c678V3t+bvxUey5K/FV7BnL8VDX6VvxUrmMy/FeH8KMAVJ2jhvxWlHj/AFfiyKsAVKO2uvxV+6RHAFYCCgsAVyOOpvxVxhY2/FVnQrL8VVz4EwBWenkLAFQNSsL8VR+tovxX6YTm/Feew078V12CFwBWCHPq/FXZ6rcAV3kLdvxWOPD3AFZoc/b8Vd0cEwBUEA9S/FQiUnL8VwGN1vxXCRIu/FXg9Y8AVctcNwBUtQzLAFZdBHcAVn00dwBVgs+e/FdVjzL8V3R2wvxVmisW/FTyMO8AVWs8FwBXgIz/AFZTdtL8VCKo0wBVAuAnAFUuW2b8VfWgvwBXUuwnAFZsZjsAVXdgBwBW5SJW/FYooFMAVBIKGvxXqC4XAFRFH3b8VH3enwBVz0QzAFeHlHMAVEf9VwBX3ppi/FdjUWcAVSjGSvxVE+RTAFRF8NcAV6i1OwBVbO+a/Fb3RhL8VsKf6vxWYIl7AFBsIURABEPgDEOgBEIzi3y0QpJbsKBDa+qIXEMnZ3QIQt7OFEhD7k68EEJ7W6woQ6ryzDRDXnbcgEITDlxIQ4NLiAxDWpOsXEPWz5hkQ3K2XHBDXpJEhEL64kSUQsf/JFxDHsp4yENOFxzgQn+qyORDLlawTEIWyhAgQiuXXHhDzs8kWEPTbtCcQjML7CRDyhKMKEKSC2AoQzKa2HhCbk+sfELDFxTcQ/qL8DhC3qJAOENSuqDAQlLzsMxD32YI5ENOdjBAQ98vIDhDfjbIDEJ/grRUQo53uIBDEz9UHEOCT/hMQoL6JLxDNqbYWEOCB1RMQz+rmFRD+9oAJEIr5wDYQ7fi8MhDlv/AYELSCxAQQodh+EP/HuisQuZuEJxC5jIUcELmmlyMQ/P/AHRCX8tA2EIHN6RMQ7YTRKxDmvMsiEKzghSQQm8P4JBC+++0FEL+u5ycQ2dzlEBCq47YBEKTfrSoQkK2vLRDbv/0QEN6Sow8Q4Mv0NBCJ55w3EOvG3gsQ88q/OhCT6+Y8EI4BHCAAMOgBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWjUhIeT1PKX6mmUAZMBCgMyLjATCN8BFfaLML8VVVg2vxUqHTG/FYF+Ob8V28Y7vxXLQjO/FemHMr8VRo5AvxWA9z6/FddrQb8VxahMvxWySDS/FSEwN78VvkVNvxUi4jy/Fa/JaL8V1vFZvxUXDHK/FTGVQb8VjutqvxWTCWm/Fe3SXr8VvctgvxU6nT6/FT3ANL8VJWVbvxX2fTe/Fezzcb8VB0l7vxWX8IW/FWAMnL8V2H+avxV6X46/FdJ0ar8VK+tovxUUnYO/FRW9hr8VSfKHvxXWNXG/FYm6e78V+LaLvxUh4Wy/FdjweL8VZUPDvxU31IO/FVJsgL8V6918vxUwqle/FVsjRL8VFjpfvxWGFz6/Fe8SYb8VqnGOvxXBUVS/FUoCWL8VzIS+vxWZpOe/FRWD2L8VE26WvxUGpaK/FZ/qwL8Vg8izvxVLdQLAFc2rCcAVXrICwBW4e7O/FVb8xL8Vq5WovxV3aqi/FbqOcL8VJryavxULZa2/FUY3lb8VsbCuvxXVMrO/FRqFzr8VL7qWvxVrasO/FRLXyr8ViDiYvxVe1gvAFWh1t78VMB/VvxXoS4+/FWv9l78ViZ2FvxUtAZC/FSFX978VLK8UwBVZSri/FUJtnL8VJOWkvxXdboC/FZa5h78V0EZ9vxWtmLm/FbwAar8Vsw6DvxUHs5a/FUl9bL8VKPu7vxUvI3q/FfmnQL8VqG+cvxUg3ae/FaY+kb8VXy7gvxVEwI+/FUNGZr8VEGBdvxUYdFq/FT8cA8AV3L7/vxWucRfAFaRl/78VGqwawBXgi/i/FeOQLMAVsbOuvxUtl3vAFRobAsAVkYCKwBXP6QTAFSnX1L8VW2e1wBXGlWPAFZqxMsAV5h4swBXKcmPAFcz8e8AVargiwBURS7a/Fc3VK8AVmRIuwBUSfM+/FWyYWMAVnmYawBVoiDjAFZ/Irr8V9pW5vxUwK4G/FQ0ULMAV3pydvxWsbMq/Fa9UlMAVTiAlwBVj4CjAFZL3mcAVQ/0CwBUY8sa/FbNkk8AVod3xvxVaxNm/FcAnuL8Vooj5vxVK7w3AFWbqHcAVOyP9vxUtjgzAFdOBasAVVsi/vxXfMkTAFfU8IcAV7yQcwBWVANa/FXs77b8VQwZnwBXGagvAFfjazL8VoUfEvxXzX6G/FY7gxr8VyLV0wBXIpA/AFS9Fmr8Vvs0bwBVpi/+/FZIuJcAVxmoZwBWgWvi/Ffhge8AVHQHEvxWfNaq/FRorvb8Vkn3pvxXuGQvAFaaUo78VfOimvxVbh6O/FQChbsAV3y/mvxUZ6RXAFalTRsAV9Ua6wBWEDIe/FXa1pMAVUYOzvxU4jMu/FZHfhsAV7imavxUTljjAFbmOv78Voi/PvxWaRw7AFVQreMAVXNyqvxWAK07AFSsRzr8Vq6IkwBWizqrAFbhs7b8VXOXhvxXRt6u/FY1/psAVOXMFwBUTEPW/FUP2AcAVlQOCvxUm+4C/Fc1SnL8V7Z+KvxUNlM2/FQP2scAUGwhOEAIQ9wMQ3wEQ46DTEhCii4gyEKWMiScQ4cTkLxD4qqYSEKq64SwQr/XtMhDQzL8lEKHSyzsQq4GVBxCJ+boIEL/+0yIQrPSjGhDGibAdEL7MoSYQ+MzXJRC2+MMuEJTenDgQqvDyEhCMqrMeEJHkuAcQheD2DBDspaYVELmY3S0Qyt+9GhCdxL4yEPb1xBsQnv+IMxDz+/MhEOe2hQQQ/fj/IxD19fI0EMCMmioQ/OmYLxDLrLo1EJzHmykQgbTWORDCtY8KEL73vBAQ0+zFARCCuJwHEP3s+wcQ6+qaAhCq58wBEOKw9hQQlqyYFRC4oYQWEOPTWxDCyPsWEOP9tg8Qj7GZCxC9xZILEL6e/xoQ5832FRDehJQsEL64gh0QjIK8HhDxmJ8fENXswiAQzoW0BRDm4cImELzg7TQQuJ7iJBDl5KAtELX4pSkQiqX/MxCIpdwyENHyvC4QgpCRMRC08pQOEPmYsDYQ7PehNxDcgbw4EL6y/zoQ9QMcIAAw3wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaP/1rv7H18Lx6gGUAZMBCgMyLjATCOUBFWSW5b4VW6XpvhWrZOi+FWH37b4VSUjyvhV1AvO+FYER8b4VB8YcvxWa9xS/FV5rCL8V9ekNvxU0NhG/FZPF/r4VpSkLvxVdhBm/FcVXJL8VJWZFvxXrXRm/FS6ALr8V3HcqvxWCjS2/FeLMJ78V1WUSvxV7rTq/FYzDHr8Vl2sAvxXcBQq/FUvmFr8V1rUtvxXCMUu/FSNmdb8V3GRBvxVa1Y+/FQEiR78VWsBevxWOBxy/Fd+7Tr8VLccwvxV7QDa/FQaUQb8VUoRDvxWCFz+/FT6WNL8V9C9MvxUFqJe/FYHoLb8VTAktvxV70E6/FQKCTb8VGf87vxXEF0u/FZg8NL8V8TEPvxWLKBa/FSe3G78VvQwyvxWGBky/FQEPAsAVZX8XwBWsEpq/FVugar8V6W6cvxU7Gb+/FTlDUr8VzA5/vxWxuZ6/FUI1EcAVOvFPvxXdbsa/FX4/db8V5GR5vxXl/2i/FbXRhb8VwozXvxWohGG/FcX3gb8VgmJRvxWv2Kq/FT52Wr8Vmby1vxWcAZ+/FUZBsb8VIrnpvxWzA02/FbdTu78V8HCevxUsAla/FXYgn78VsBmGvxXJ7pi/FT1JwL8VZgCYvxU0Roq/FbaRNr8VDeOmvxUHdm6/FcY1Xr8VmoxpvxWIl52/FTgfR78V/ANQvxUnK4a/FXNChL8VrMEawBUvwWi/FSKUgr8VBqs9vxU0+ES/FehZG78V4N/6vxWtTya/FQ2iTb8V034zvxXX/L6/FU27BsAVFf4lwBX5Cp3AFRbNHMAVR/IrwBXPvQDAFcE/pL8V+3rZvxVVvKnAFQBkgsAVM2gZwBUEz+u/Fdrp3b8VBOgAwBWOqpS/FerMGMAV1jMQwBXtlcS/FXneAMAVXF8bwBWg3iLAFRp9Ur8V+UjkvxX0fxfAFXUHBsAVa+/DvxX7A0fAFaFCHcAVhMCbwBUkDeC/FZGblL8Veh3CvxXIlAjAFYww4r8Vwu45wBW0uBbAFXsdsr8VrTTYvxVSqovAFbDN778Vbqb/vxVzIPq/FQTx/78V6r+svxXoKX6/FTNQLsAVTIsIwBVO3te/FaAy6r8VW+OyvxUQwiLAFUtoOMAVK200wBVpghTAFY/Hjb8VOTf2vxUsXfO/FcuJ778Vab62vxVaHaHAFWxLrb8VQXrjvxXKqBbAFZSWYsAVfpjTvxUeQQHAFSuOK8AV+0dWwBVArhbAFSNF8b8VoGblvxVbUPS/FVAmyL8VJOSRwBWl426/Fd2OAMAVTK5FwBXiF9+/FYq07L8V0cW2vxUK2gbAFf/x9r8VqdObvxVdf9y/FXv0pL8VbGjDvxUDqoi/FVGdUr8VGhcuwBVljfq/Fa6F1b8VLerHvxUq5ZG/Fev3McAVzo42wBWzK8e/FbGZNMAV6aAnwBUxWFHAFau8+r8VoMBxvxVZoIm/FWJ7J8AVqsWrvxW1smfAFQl0KMAVoOoVwBXy40rAFdZU2r8Vey4ewBXMXtq/FeOjRMAVj8hJvxVxt3rAFWc8678UGwhQEAEQ9gMQ5QEQuPm1OxDSu+g0ENak0h8Q+5WNJhCSv/YQENvc9hUQ29+dCxCAyNQvEKziqTUQndyKERD45+4eEMG3BRC0/7QaEOvj8ikQxvHvIhCqobQmEJLz+yoQmtzpDhDR0Yk8ELWo9QwQrO3iARD14uoJEKD/5BQQ54zsFRDpzIsXEOS5pwIQ543ZOhD6uZMCEPC1xwsQzcr2DhCho+obEODLpQ0Qj7beKhDgzOwZEPmd5S0Qz5+OAxDDt8Y2EODUyhcQ/ouhCBCX35wHEJfluAsQ49OxNRDh+LUSEOH4wx4QprTPOBDUs8kIEI792xoQtsDHFRCH3pMWEMOm2QkQ96LDHBD//6wtENKlhDQQkcS0ChDr1bcgEOme9AQQiZ2UKRDh4KorEMmO+CkQwcLUNhCc1+ojEOvY5iQQqdvPJRCE9dUCEJiDxSwQ8oPzJxDTvokFEPnVvCsQssfaKxCo+qoDENzGtg4Qxu2WORC5i6E0EInBhzYQhdunORDkp94PEPUDHCAAMOUBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWi4vZjDkY+OyzyUAZMBCgMyLjATCOIBFXpWGb8VdWAavxWGVBy/FQsqHr8Vl68cvxU5+B6/FVNwJ78VVqY6vxUARiC/FVbBH78V2ZAjvxXwjSC/FZzlJ78VfKAtvxXr1Su/FWYQRr8Vu0hWvxWfsF6/FWAeI78VyT0qvxX7ykS/FbwwTL8VOtwlvxXa5kG/FTo6Nb8V+eMpvxWACzS/FXguM78VusxBvxXzYGK/FU7TXL8Vb8SGvxXeMGe/FVJ3gr8VK1hgvxVL4Xm/FcTrXr8VQ7ibvxUXI2W/FWETcr8VNW1QvxVhQWe/Fbj+bL8VW1ZzvxWQslO/FZzML78VH39svxVjUWq/FTvOnL8Vn2ZdvxWVp3C/FR9oRr8VKqMxvxUBqpK/Fb8seL8VaL9BvxVazoS/FRMlZr8VVGSNvxW6OM6/FSWRnL8VjdzdvxVXzZ6/FXz0i78VT+C9vxUysa6/FcLDlL8VBvKJvxUphYO/FSuVhb8V1/WhvxW20d2/FZJQgr8Vv5GMvxVMlG+/Fauer78VbwS4vxXZE4m/FRqair8VAX97vxX7Fu+/FdlA078VE4ODvxUp6IO/FfD/kL8V+NGzvxUJ5IG/FbgCob8ViDmVvxUQsO2/FfHVoL8VvINcvxVYtjy/FUaVnb8VDMexvxU3Oaa/FTPbqr8VFDelvxXmaa2/FfKBab8VQUg0wBV7bqK/FbL5p78VLU5XvxUvh1G/FRFBdL8VIaJZvxXueqW/FYgJr78VOIi7vxUSR9m/FWvuRb8VqkTHvxVzA/i/FYt2K8AVr54zwBXE4FrAFZ59KMAV/DZGwBXbhxnAFXZgJMAVC7WovxVBRUDAFa1tM8AVD47lvxV7yBLAFUToD8AVNpPdvxUGL5S/FWx39L8VrfolwBX2Ye2/FfC3eMAVRjOavxUCh9O/FePy178VgjqovxWFZVHAFa2IA8AVb7GevxUun66/FcCrasAV27kZwBXheG3AFSVjI8AVx1xrwBWVzl/AFRIFJcAVSilewBUOyQDAFQW0o78VoVjhvxVv+xnAFc8uNcAV2qPHvxV2opC/Ff2CHMAVGKUIwBXPCifAFchT3r8V+QyKwBXI0wHAFay2+L8VI/gywBXjXbvAFabQscAVQz0GwBWDJhXAFfqOA8AVSXwZwBX+9+6/FXGKJ8AVTNUVwBW/BIm/FTW7h78VS4AQwBXpCRnAFYyup78VgYmhvxXt+ve/FY5BEcAVOq2nvxV8V62/FUlkKcAVXZ5kvxW87jjAFeSZvL8V1N+FwBV+M62/FTlP0b8Vd6NswBURnzTAFd0/dsAVb1O3vxV17A/AFaWux78VpO6svxXNoSLAFdsd4r8VrgqOvxV/Bpi/FdM1zsAVcvx0wBW3gNi/FcD2FMAVMyyUwBVs2BLAFc0Bgb8VPitsvxXTDvm/FX6rFcAVH6QswBVnI1vAFU4gHMAVUPhdvxU9lXrAFWezs78VFlvGvxVMJp3AFfY50L8V+m4jwBUzuQbAFYZ1DcAV4xyDvxWZN5u/Ff8c7r8UGwhPEAEQ+AMQ4gEQiNj3LhCp/9s1EJaP1i4Qo6KsLhCrhcIDEI/ogxgQ7LbiJhDmstIFEKCelDoQx6WUERCVnqYYEL/0kigQuOjcCxDh4dUeENOC4C8QqZjgKRCBjYszEMCimDgQzvTLDxCT/IYREJarkAwQz/P+CxD05qYWEMqxgwkQtI7fHxDkv5obEMSQgR0Qr9bxChCn+sMhEPnerRIQzs6zLBC8+ZIpEIi5hwoQwqrBChCM9MUyEOXr7x4Qr6+/OhCFzbATEPvrhxIQw7G2AxCojKwREPPKyScQ/tCjKxCAlowxEMnpvBUQr9/7FRDTyIcXEOuC7QgQz4L4DRCH9ZIJEPzOlDYQ8tb4GhCj8Z4cELqQ9joQ+obGChCQz/QeELD/7jwQ0uuoIRC65uAuEI3K9hsQv8+4IxDlr68kEKTWzzEQ1atIELu2rioQ1uulLBDElPEHEIyjkjIQiIa1FRCKutE6EPmDwTUQ3rPCDhD4uIgQEPairxcQqor1OxBPHCAAMOIBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWjWmqzxvvm29ssBlAGTAQoDMi4wEwjlARWnhLS+FVcXIr8VHP0fvxWXAyW/FQnwNL8V3y8mvxVBTSC/FVnsM78VAxkuvxURYza/FdayRb8VDvEovxVYZzm/FbzwJb8VpnFevxUC40y/FXIDfb8Vsro3vxWC2Fu/FattSr8VNm5KvxWUW1C/FQ8bVr8V47NRvxWFlDi/FUwWTb8VDUE6vxVJiyy/FWffQr8VCqNvvxX5Z5K/FbDoiL8VSRNkvxUkeba/FTM4h78VbhxAvxW31z6/FSmrZL8VrTtqvxV3Y2W/FU38Y78VL8aJvxW9lqO/FdSYUL8VBx5fvxXuUZm/Fao/d78VoUlrvxXqEI2/FV0Je78VgfFOvxWYio+/FSWVdb8V8JNSvxUJb0i/FXrQWr8VtO4wvxXs7n2/FXz4a78Vww6UvxVO+4W/FU2Ulr8VN8gTwBVNZPq/FRPdt78VGgtwvxVyJZe/Feg61L8VdmXFvxVWY5q/FW2Opr8VVHqVvxWo/tS/FSIzdL8VKDBuvxU66rm/FTLSk78VFJRFwBVPEne/FfS9mL8VkdbZvxVJKZG/Ff+9l78Vc1qPvxX6Mdm/FSdE4b8VvNHFvxVLlVa/FSGdp78VHYtuvxVzRIG/Fc7dt78VPEzQvxWgsam/FfOHz78VpxaEvxXuN/2/FcY2wr8VGYXvvxXls7G/FWAKkr8VVhTMvxWFLbK/Fcmfn78Vc6isvxXS1aS/FcyCGMAVwM2BvxXmq1y/FUkU/L8VYZeevxXkPI+/Fe8Qgb8VB8nCvxUm5h7AFT5qUcAVXoHqvxXO2IO/FfjOsb8VqyyEwBW3xQ7AFRKRlb8Vr5ywvxW8n9S/FdnEecAVN3IWwBU2oxbAFSZsisAVMNB5wBVwgUDAFeJfCsAVOIhCwBUUN5K/FbUmN8AVMupkwBUNihLAFaeqicAVvoMfwBUzet6/FVSE7r8VFX4OwBW95THAFYfjXMAVdsK5vxX75dC/FeYypcAVGXUpwBUMLM6/FVylzr8VS5aovxXzbBXAFQmPyL8VuSPMvxV1MQjAFWI2GcAVf3eBwBUFp1HAFe3n0r8Vr3/MvxXo0TDAFbDRHsAVw00VwBXPwXfAFeUxEsAVsxa+vxUJzS7AFSGdIsAVm6AtwBX7lCvAFZDY/b8VMGAvwBW58uG/FUMmEsAVi3TRvxXK1ADAFdrVJMAVIiYRwBUoMTzAFc8/rL8VbhG6vxUt6MK/FTGk2sAVh4XOvxWqbgbAFfQyyb8VXZkawBU6J/+/FZnXsr8VwyWtvxX1l9K/FZtD778VNoOywBXpWhLAFYWNAcAVL4YRwBXredG/FSn13L8Vu7aXwBUM/b3AFS3A0L8VfohwwBW7TUHAFWtJ2b8V5nxJwBWpRN2/FSEIyL8VjSI7wBWnWL3AFSxF278Vi33VvxXOoLi/FdJfwL8VLxW0vxWmQS/AFcI3ccAVcU2DvxU8n5PAFUj79r8Vpa8gwBVyQIXAFWy2B8AVeN0gwBWoW+S/FTTgEcAVFEbRvxVt+NK/FTRtiL8VT8YUwBU2YxLAFBsIUBAAEPcDEOUBEJvrtBYQ4unsKxCo5MkZEL3T0Q0QpbapEhDQxqkTEKT9tAwQ7eyINxC9vMY1EIiR4xcQzeexFBDv/9cXEPe6rAoQ4Z/0HhDUvagjEN3G/ikQyefWLhDM2/gzEPCF6QUQjp+MEhCMurkdEPPj0BQQgr2BFhDRjL4XEJ2/xwkQovfmBxCyhK8dENGY5g4QicmDIRDJxbsjEJ79tBgQldSYKRDjpoQOENOt4C0QvvnnMRCnsf8zEIb/mzcQ86PhIhCg7rIPEN6f1QcQqNjvHxCxwLoTEKrN7xMQu/6xCBCUoosFEPv54QgQ/oXSDBCK450XEMLEzBgQxNGlGRCN0XsQmdiNChCM2JEbEP+cnB0QhLr4OxDlvfwKENGa3x8Q8KPsIxCCyKYiEM9ZEI2P0B0Qkf6ALxDOgfYvEPzMgikQ3q6SKhD/6N4SEKzZqxsQk7vBLRDV5pYwEI2HwjEQlsrdMhD9ppcSENyU4DUQ0/24ORCxoqA7EL6+6hAQ9wMcIAAw5QE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFj4A2ABaIWDzYGPueDAK5QBkwEKAzIuMBMI6AEViB0IvxXUEA6/FUfQDL8VzJkUvxUySRS/FavUD78VlzMNvxVMvRi/FdlQJb8Vz4wcvxWI2iu/FViIWr8V0lUovxXziB6/FbenN78VA6EwvxXUpia/FaIwLr8VauFhvxXxdx+/FWkyI78VNS8svxW0I0W/FWLmXL8VDDBgvxXT1G+/FahLLL8Vta04vxWQWzS/FSRSWr8VAkpHvxWnOU2/FQV5Wr8VLNB6vxWaoTq/Fbwxgr8VEVo6vxX1OmK/FTdre78VvlhRvxV51Ta/FQ5tYL8V7lwyvxW70S+/Fe3GVr8VEQSKvxXRM0y/FUYii78VnzltvxU+XHe/FfSmqb8VS5R9vxXbnXO/FXgYLr8VK5FxvxWKGmq/FRqlQr8VuyxSvxUSoP6/FdKycL8VRFONvxVk9b2/FQThqL8VQbSRvxWHKom/Fdc0rb8VUUidvxUowoy/Fa/lp78VF3havxVYrUm/FXxHor8VqA2nvxUpdmS/FQh+iL8VMSmhvxWXw4C/Fff3x78VE+ervxWdw32/FZINar8VST1mvxXEslC/FVtKZb8VdQRuvxU+9pC/FcZcA8AVaNm4vxVDZFe/Faru3L8VcbFYvxUUc5a/FTNzi78Vzr+ivxWks4C/FQK8+L8VXDW8vxVZB4i/FQbcir8VVtDQvxXHX6S/FXw2FcAVFXjUvxWM3cG/FeSKkL8VrC+SvxVOGZi/FffXQ78VaENVvxWllIO/FdTOwL8V8CeFvxUHJ9a/FaH3Wr8V9J1VvxUVlWC/FRPRU8AV6BwKwBXjZk/AFWGPoL8V1RiovxXZ19y/FYYIoL8VROW+vxXJC9m/FafpzL8V0pZbwBWcA4HAFTIvqL8VaEemvxX6GzPAFbR8QMAVTRINwBV99gjAFc0T678Vy2vFvxXJ1BDAFRNr+b8V0D43wBUlcrm/FaZM5L8VjJmpvxWfZF+/FX2A1MAVgYsPwBVBok3AFRPN/78VQ4IKwBUW7xHAFThkE8AVK8XivxUJ2aW/FRwjK8AVnE69vxV6MKG/Fb5q078VfYExwBUCSDDAFcoIp8AVVx/pvxWm38q/FVbIxL8VouAHwBUKZQDAFV4u+r8VVprFvxXNJnDAFR991r8VU62QvxXBTRjAFQNfwL8VGfqhvxWcylnAFfeJHcAV4Z4JwBUaYBzAFVlpW8AVO2MCwBXpoMG/FezI/b8VDohbwBU9/QfAFQUPir8VAMiqvxUYX0rAFXVO278VaUsbwBXmgy/AFXwmDcAV6JoHwBWzVYW/FYEPdsAVucRJwBX/6Ma/FWfOGsAVUICIvxXqlK/AFSwdnL8V973QvxVxwgvAFd62j8AVJVzQvxVtVsK/FdwCIMAVLk4ewBXpUpzAFc0RB8AVXYbhvxVCAcK/FbA1SsAVfAIrwBWFdRTAFY1PUcAV1gBuwBXsWZ2/Fawc/r8Viq+4vxXBkRnAFTRt178V+kOqvxXAify/FZA1sMAVcI4+wBVCB0bAFY2R3L8VwnsBwBXuivW/FQdTjr8VEEiAvxVBvoq/FYaX1b8VSlY3wBQbCFEQARD2AxDoARDR264OEP2r2DEQ6IC+BRDh0Ok3EMGEvQMQ66CkHhCV5f4KELzakjgQiY2+NBDC17oXENS83xMQ1KZdEPKppysQhKP5BBDc3NsLEJvolTkQ9OOBEhCJzoIxENz+tDoQwpGXKBCX/OIHEIDsrSMQ7Jr+JxCeksoLEIG0kTMQ8Jr2GBCOu5sCEL2R5hwQ0aSZCxCDs7gfEIbI6iEQ0bjrIhDouqwmEKHXxQ0QjZbEOBDHrsMOEKi+6TsQzPXgORDP+68QEIOPpCkQis/xERDmscILENb/nhcQ3Ln8JBCcx6wUEOzRsDgQo++ONhCgk5UWEITgkBMQ9rahChDw5LIYEI6b3woQ0ovAGRCbpNAaEMOMphsQrt3yHBDd/vUhEKXkqzYQjo3EHhCZ/fs0ELvEkgEQwuOZBRD3oYMiEKjfnjIQ0qabKxDQmss7EKvE2CIQ+6ygLBCpnqQ2EJOkrS4Q9szvHxC92q4xEMPFqjUQgMP4MxDT1bY3ELyn9SAQ4N76OxAbHCAAMOgBOIACQQAAAAAAAMA/SS1DHOviNho/UABY+ANgAWj77ojXqdy2xKIBlAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQov7D7QMQp7C/dxCj88efAhDx6uvnAhCi3YepAxDrtfr0AhDu6sq0ARCx/+r3AxD1tu9UEPXpwroDEO+7jP8DEKbrle8BEOuoxqwDEMrTvnkQ3bfXGRDtvufyARDx15yfARDvkK/JAxDR9cLQARDG+Mq6AhCuvdy5ARDVu5bnAxDRvqInEM2r5esCEO7NvdMCEO3w3N4CEKH2xN8DEPGuhJkBEMabi+sDENaT2pECEO+KyOYBEOLIwiIQ9+v99AMQ87XHUxDLsqrjAxD62KshEK+MrF8Qz7cBEAAQABAAEAAQACQq/AdFNIgmQpdbvkKJhYBCkOdMQM9PT0KjFPVCWleKQq13KUKukJNCRKjeQsPWZUJxA7pCfzOoQpjqxUKuq/FCe5ciQnM0YkKMgK5CoTGDQbcThkKPhyJCkggBQmUZ+kKoB8VCswIbQpPeNkJf+UdCiR4pQrbLhULBZvFCVGlGQQTn6kK/+5BCjNGJQpQJpEJb1FhCv+sTQkMmS0KOjxRB2khIQpMDqEJW5aFCkcSPQnr/kEJg+CFCk6iGQmHyWUJzLEFCho65QkzNA0KyqKNCWh+XQpi2JUKNoElCj28YQoLGPUJ1sBtB2n00Qp11L0JRMd9CXZxLQm4+ykJSzVtCuaPqQsAzAEJLTsJCt0BUQqHLrUKkNT9Cv/FnQrSoj0K0gORCcnF1QkpEWEKRt4RCSP2wQp6LVkLAUDxCoTJ5QsO1okKy7GBCU66sQroMxUJR/ThCkBZeQsBk5UKt77xCdAEsQZ9d3kKPwaFCjEfpQmmJlEJOzHpCS1lFQpZYNkJYUVJB8QnUQf6K4ELCR7FCwFRjQsK4m0KNdkVCTl26QlBFPEK0MRZCVOcwQsFoukDxdORCixUiQsT0+ULFhmBCiW9RQjNq0EKP/xNCtvP4QsEH4EJoVVNCkucMQoIEGUJWeW1CpC+OQpDJBEJ/O79CwhJQQkzFbEJHmF9Cob1hQqOCxUJG/uNCabQLQrz/bEK4PgtCmci4QrBD1UJj3CxCinj9QqufJ0KxeJ5CnQgQQrmEokKWE7VCuRjZQk8rFkLDrG1CqNKqQkgfPUKLF4dCsr4BQrK/EEKAqX9Ci4AGQkckQ0LC1s1CuZhpQpBq3kKPP5FCiLsgQoTAekLE1aFCv4veQniwPkKbJWlCmU2vQk+4ukKPb5FCvoD/QsS9O0KmfvFCwqVtQrSDUEJJlnFCoLFqQrfjkkLBwz9CwwfDQr5el0K/dpJCkCu2QlRUoEKbypJCWEbJQkeQNkKZualA3tIsQo7/c0K7KlVCs/WAQk4U9EKfEtdCnhg/Qp2UO0K7kydCexQsQkdr6UKayslCxf87QpZ1PkJZtTdCdtRCQlKzbkLALvZCZ6zKQllDmUKkusNCUKnHQr7PqEKyh/NClPdYQsCv9UK9GgZCvSLCQk3VekKACBxCjwFGQrpDMELDQn1CVZeuQqZBLEKU5FdCvje2QsYna0KaT4RCVonnQrslykK5BVsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjhAVMIERD///////////8BEAEQ/wEQ6LOaqgQQkZvE0wIQvej95wIQ5NCHnAIQ4ZSz2gIQ9rKh6gMQyZmI3AIQydTGswIQkY6I1QMQ+L3/5AMQqczy2QIQ5rivCxAAEABUWwgREP///////////wEQARD/ARCa0vjcAhDP79mLBBCOurTjAxD4o6KiBBDW9fKVAhC2+baqAhD0+tbcAhCM/sWzAhCG6dmLBBCL/uicAhD89LeMBBCp16EKEAAQAFxjCABkaB1zCAB0eB2AAQE8QQAAAAAAAAAASAFQvMCQ4b/T4eXAAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ2o+eORD307R7ELaJ1N0BEMqzhp8BENH3j+cCELasz60DEM3W5psCEOHX/q0BEMqJ9KwDEPeSpfUBEKadrrMBEMuYv18QzZyN7QEQ9ay92QMQ4dfn/wEQ7rGn1wIQyfzMtAEQ/Yj11QEQrduK6QIQobbC8AEQ/p/fdhDDj56uAxDSiY+VAxD/k4KRARCxmeZZENGKu5EBEO+755MBEKKt1bMDENWsxK4CEKuZ9tgBENGa6t8CEKHt7NECEK2R/K8CEMupqqkCEPqK3toBEObvrrMCENqbwtICEP4HEAAQABAAEAAQACQq/AdFEgniRMthUUKAtY1CafJYRQRsyUIJyC5CMWGPQl7lc0K3bzVCnXPjQppZeEJ26g9CxMHGQoTiKkJ+wmpCqf2BQnKgukKG9LlCtWQGQpyowUK0dQVCTVNAQryyBEKCyE9Coy3OQrlLHUKwF7lCEdnKQsPtjEK9J9NCt80kQpDHgUKJZFFCTBfPQkacG0Kx7yZCjdOtQqlILULDaKdCegCjQkokvkKOORVCoHA+Ql94mEK4y5BCXxaBQmBms0KytFBCw0gwQrEcJUKakDRCvPaOQpP0RkKt9PNCRvfWQn6+NEKssTFCtBQ8QrNyM0HN1aZCuikzQqND8EJz9e9CTtLeQqi7mUJES65CZWi3QsFxAEJ3feNB1asuQsV0QkKdKalCiNyJQq46Y0JgU2FCr7kRQq1jQULAsglCoguwQkkskUKTOjRCvsKJQq9lBUKf9NVClRTzQlKaFkK8y2ZCXiccQmvRH0JIo1lCuFvCQme2NUK1VoJCcZnOQsZA3EKOxk1CqfGJQsSxMkJHJvFCu+ZBQpXKRUJRQhdCj44YQkj7A0KgcsFCxIfXQkWT60KKMqJCmojIQpj2oUKSaoNCnEQsQr8qEEK2C7RClPWAQpYq0EKtz/xCn4gDQqPMOUJQIwFCuqK5QlOY7EJYkMdCS484QpAvFEKDLYxCTSX3Qq43IEKDXlZCUOG0P6cdh0K+r7lCvCQeQsTnGUKaiG9CRk9VQrxfvEKdHHNCU7JqQnoN20LC+W1Cp7F+QrCT4UKbpg1CoItXQpB9ZEJTf2RCR15eQmPJK0KP6zJCgH1DQq44VkKXI9pCmd5dQroeTEKbmC9CRYbsQnBVzkJa8ddCXqV8QoAOlUKX6PlCxI36Qo7y1EKRXNFCrxxhQl+5N0JMWGRCjIKxQqpPj0Kb+s1Cu0gcQpx6/kKyIdZCWAkLQpTNMEJree5CVQXuQpmeQUKQUcFCl12yQsE6ukJX7DtCnpIwQks1FEKaEo9ClFxbQpeL9UKWgpdCroNiQmWZhUKUoXlCnhuDQpfZsULF6NpCutuDQmQxOUK5ystCgW+4QsT5f0KUp1BClxu/QsL8p0K6nsBCt+IyQm2eZEKPEURCSYgaQpwSGUKfBd1CwvyaQluRi0KNSiRCTxtBQreDhkJPw2NCvkinQsSJX0JLSGhCgxL4Qp6MrkKQZvpCvty+QkT6BgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjgAVMIERD///////////8BEAEQ/wEQjIWd6gMQ16DVlAQQrveA0QMQrbPs1QIQyPi78QIQtZPtsAIQ9Ky+sAIQnM2EnAIQkNeYzgMQ4JHT4gMQ9pP91wMQ78W3AxAAEABUWwgREP///////////wEQARD/ARDUmYDqAxCUqPqpBBDIvazsAxCW+dmLBBDkutGzAhDU2N/iAxCg9+DVAhCw08baAhD1oa6TBBDJkLaqAhConbOYAhDV55oEEAAQAFxjCABkaB5zCAB0eB6AAQE8QQAAAAAAAAAASAFQ59frz6D255cfWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDGk+O+AhDvvdS/ARDqzcSxAhDJ9oK/AxDJ6+Z8ELXI/tADEOu1xPUBENmvvZ8BEOua0rcCENOt1tECEMaU5foBEOrurpECEPve/fQCELaWir0DEKnw79ABEN3L4/wDELn/jv8DENXv5uoDEMus11IQ79TTWxDRqOssENeus5UBELWMxKsBEMHJrJ0DEKuWztUBEKe8wpEDEPLetlsQ8t6PswIQ7/3l3QMQtZ6ErQMQ0/7C3AIQ873LWhD2qZZ7EMvT9d8CENvZxSwQqcnTswEQ8gcQABAAEAAQABAAEAAkKvwHRLRhR0TM2FxFD99WQpyA4EKhLqtCShveQmVaeEJ0bndCZnbnQqYEqUFDCM1CasWKQsAyXkJRfURCmEByQqwGtkABKxdCcgQEQpkvVEK+JsFCvOBQQjaHfEK+bVJCToMuQqC2PkK4x9NCk3StQjZd5UKUjadCbmkMQph8ZkKfYYxClGjuQoAPmUKHXSZCet7DQk1Na0JXjvlCoBgcQq7vQkJksehCU6MiQo8b20LDQwBCmT4RQsPbWkK7bUlCmfToQhcsIEJPsRpCYXIHQqGA2UJftaJCd/DaQoHzEkLAsP5Clna/QncECUKF3mFCiShnQrZKnkK0f1ZCuywJQrVVO0Ke4/9Cbi4RQsALG0JqO9dClFb1QlBQEUK1eDBCkbg0Qk8Mk0Jsdh5CVfP0QsBNxkKK2qxCRyHlQrjiqUKnT9lCr7vRQoiBp0KtnMxCX43cQptUhkKUQ2RCpLwzQmf9bkKBvWhChZy7QoV7rEKtBBxClSy5QozEBUKPd1dCZlv3QqEIoEKOLgJCcTiDQlgDMUK03RBCW9ftQooR10K9K7xCkIDzQkvCBEKT/bZCpPiFQoIHsUKt96tCmRZpQmfmFkKLdXZCn9eBQku6tkJW4WFCvaxVQpWXXkJc0BxChwmyQoJbP0K/hrxCjaG3Qo8wo0KcsmVCvHTSQk3XxkKvzSxCg+GvQqVUnUKzlN5CkhNRQp6FaEKRdyxClKkDQr58jEJQMBxCvVgAQpdkLkK8dzlCl4+9QsJtMkKrPVdCik60QkaSF0KZaRRCwo4cQmbsf0JmiIRClW2IQlAEJ0KNksVCZAX9QqKFuEJKHzJCh911Qr12kEJPichCYECLQkp8LELA3a9CnPBHQsQIrkJM3zJCd7XAQmRgJkKsLiBCUd45QlvsQUJQHGtCW5WpQr1oD0JQ5EZClgLBQp8tbEKQe9FCgXrAQr0cXkK8bX9Cx2ffQkdFAUK+HrNCSXpcQpWXIkKd/RtCia0iQl4/0UJ3vP5CTgAHQpL6TEKU9AxCmH/wQrMA20J/HyBCkjgQQl7VV0LAGM1CRyDUQlt2AEKsaSFCw3m2Qr88hkJZdbFCjwcKQn2fWUK7lVxCVg55QsFoYEKLhflCtYshQplOQEKjsHZClzceQp4waUJa1PNCUr0vQsT33EJayxEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI2gFTCBEQ////////////ARABEP8BEMSv9NwCEIqRjNgDEMery+kDEJr7idcDELLxqqkEEMHGiOoDELuS6qYEEJyLpucCEKqpgOgCEP2V784DEPHlsZcCEPtfEAAQAFRbCBEQ////////////ARABEP8BEIC9mO8CEMq6q+wDEO+h5osEENWai9cDEMC8y/ACENDs67ICEPTy9KsCEN6f59ICEJSywukCELDjsdADELmZ2dQDEOhOEAAQAFxjCABkaCRzCAB0eCSAAQE8QQAAAAAAAAAASAFQpLLwrY/mytROWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARCdkOyvAxDzi+7iARDWvdejARDR/PLtARDdq6K1ARCq9+e8AhD7zNRpEPbS/ZABEKLV27cDEK367nYQrr/0+AIQ86mclwEQqrr/lgMQ893FJRDq0LQ3ELnw05sDENfWhekBEMGs1dsDELH0l9sCEMXT6vACEKLyyuUBENuc9GUQ74+e2QEQ55LNZhCv16/fAxDB6Z6vAxDh7tWRAhC9vI3/ARCz6tvqAxCh2rKZARCvlYTnAxC99MuWAhD6154nEK76/VYQx4zi7gEQ6bCXUxDWifKuAhDqn/P5ARAPEAAQABAAEAAkKvwHQxnjjj35FdpD8D47RBlz1kJmoS1FUF8sQoFHH0KO2D1CnljPQLyNCkKDJI9CwasxQrGpLkLE5vhCS8iPQkr5/UGXqitCxXJ5QoMbEEKiKJBCXW65QoqBq0JE22xCnK6iQqdw40K7M41CjvT/QpPuMkK67idCUjNXQrHhLkJNl6BCuewVQk5mj0KS1BFCSTKrQlKYAEJqivlCjpN5Qsf3OUGjwb5CxbqnQraIAkJqB6hCxV6XQsStz0KZAMRCj354QrYOCEJ9aItCwnhQQobecEJH/XVCU2g5Qp4fF0KipWhCVKkvQkV18kKii6pCTF7VQr/zskKayytCS1yHQrBFukKUg9RCW/fZQkr8LkJdeTdClsiiQsRTRUK+Im1CksRfQr6wMUKTLWtCT0LFQnRsREKdZLhCnuQ1QmQS0EJM5jdCUyjGQsIDwkJM5dRCxXGXQrZ1XEKEyyVCsZ65Qr4gj0K4aXBCUq9PQodl7EKIu2xCw0ZIQovYm0JE1z9Ckk8IQlJSfUK7TrlCvGViQsGzqELAYZRCtmvEQpHe5kK+rClCpAjnQrv8SkJq15tCiYIBQqDBtkKBkFRCtelbQk/pHEK8HgBCmIWtQp2JgUKtP71CucO0Qpe3hEKN71FCiZPvQrsD0kKV935CwSgnQoJYh0JEz95CxbIrQI/n/0K1YXFCes2gQrqtiEJI7rNCur7eQl64XkJOHY5ClRXjQk36gULA11NCl2bTQmMh3kJii3VCwhsUQr+7t0Kc3zxCvEDXQlXFVEKOy7RCrSXnQmbFLkKogtpCe+mZQocpx0K38ZFCk6M4QkfoXELDWc1CrGH7Qp1VVkJKhlZCj0c4Qr9eGUJpplFCjiKrQpEZ3EKZhH5CThVNQsCPt0Ktt6tCYUhDQm27KkKTZixClAocQpDSJEKbuQhCqD/wQqGj50KYbttCWCtCQphFhkK1whxCi0MvQkkM4EKdaAZCuX/HQrwNYEK2CfdCwxFlQpTbCkKJbftCwMIMQoN9fkKPMu5Ch6hzQrk8QEJHZzZCpmsyQk9XHELE+QNCrXMHQsSHU0KO0PJCUYTuQreTgEKZbSJCmZKoQoA630KxyPdCl74TQpkazkKblPZCwKtZQpj9q0JhEBBCw9nqQkbLEUK0vypClIs7Qr314EK8KItCjSZ5Qo+OU0KSBNNCu5xOQsN6LUJTkiBCYqFcQopPOkJX6m5CSp4wQk8eDwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5QFTCBEQ////////////ARABEP8BEPC35rMCENC3/ecCEK+XwpIEELaxr/ACELbs1NoCELLfho4EEL7k5aAEEIiJ980DEJ7CrpMEEJz5qdoCEIPBndUCENiIqJ4CEAEQAFRbCBEQ////////////ARABEP8BEMqz5OsDEOGyje8CEIul+s0DEI/B8LICELS7xowEEJiCk9wCELHkoeoDEPDntOUDEJvDzdwCEJ6WudUDEL7ZuJUCEMWs9akCEAEQAFxjCABkaBlzCAB0eBmAAQE8QQAAAAAAAAAASAFQ1tKYwqqujLTcAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQyrbWWxDZ79tTEKGdnq8BEL/+s/cDENHNzLQBENWYw2AQtsr28AIQ+/fCqQIQtfbCbhDJzsWtAhDqqt2nAhCm/9xdELvy6qYBEPWL5vcDEKuO1SEQ0cuclQEQq8/ilAIQra7zsAMQ9o+k2wEQtq6OJxD27cvqAhDi9eXjAhC9u8O8AhDvtZMjELevtKMDEOXzxNACEMHovN8DELmZ5CoQ0feN2QIQ+vmb/QIQwqra9gMQs/CtkQMQz47q0wEQ5f3EkAIQ9onH3QMQxajLMxCy8/+UAxDWmaWhAhAAEAAQABAAEAAkKvwHROdA5UQRZHVFMlgeQnl8jEIIGTdCg2E1QqTQAEK7H85CmxC0QjocM0JbQNpCnwcoQpo/jEKRmwc+FENuQn59ZEKxMtNCvsJSQm0yK0KK9c5BoLbwQqhgm0K2JKJCAyD8QpZ7lkLFZqRCkp4iQq7JiEKjUNtBh1MTQoWUO0K6bHdCSE3zQqq+DUKAVQhCvIIZQr5ZAUKJC89CoA8KQk684EKIxDVCSXSYQnQ0LUJtbwFCuDzhQpGsnUJ9uJ5CvhCXQph/0UKh2vtCrFgpQpTYvkKMTuhCpGELQphgdELBh+hCi+C2Qr1ba0JlpcVCw0d2QqqnC0Jc/cBCjmZKQsAgE0JPyBlCxY5NQp+GR0KJkDNCgCURQrwpVEFaW5VCodmxQmpAw0KJlhlCwPvUQoeMs0Kri/1CmcgEQproJUJUdzZCj2iyQnGajEJEBGhChv7PQmLEqEKGDhNCj5FNQrfxxUJWthJCs6bXQon7FULFMjJCmTwVQr55EkK6FRhCgJCBQlYwp0KTwKBCjBxEQpx/AEKQwb1Ckr9MQpC4K0IJQUFCo/jeQpbsF0KcH45CWnhdQsLuMkJg++NComECQrabWEK6HA5Clg4hQr1Lf0KeWtpCiSCgQlVjxELA0khCwOT6Qq82XkJWfmFClqL6QpECjkKhCeVCvBc+QsTIs0JSjbFCwhrcQsVlRkFfNj5CxAcNQpyiFkKSmSJCS00EQp2C10KSkWVCZOlVQl9JGkJoqlxCttMdQoPIakLAcINCwnObQllWKEKZYTlCk03pQrbymEKrFVJCr38FQoc8GkJn3MtCw7TTQr1wqUKaVmdCkhBFQpj3ekK8cz9CqkVYQrIVZEK3e+NCmzymQpDRykKahAJCXMkuQrpYXUKN6chCwbWZQpzC6ELCVoxCu+K2QseFFULD5i1ClK06Qr11vkJa3YNCcIXTQqSfVEKwGHRCSnRLQruql0K2KTFCidnpQoHZakKNtmdCfnGnQlXSiUKbBdRCxVR8QrN0vELCKrtCiNbAQoLH8UKiSARChcKvQpxMXUJXLvhCi5/VQpvUFUJ1/itCcV8kQsK7CkKXa81Cl9lZQr4hSkJeo+xCndT2Qk8C5UJI6wFCk/tiQpMZKEK4dZhClnruQpGOBUJmN4FCURadQrNsM0KZILZCv0KMQlcdnUKKt5NCmEjlQsXidkK+aV9CVabyQq0K/ULD5ERCv20GAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5AFTCBEQ////////////ARABEP8BEK79h9UDEPzEzukCEJj4xYsEEPyojLMCEJ/ni9wCELiH6ukCEJjhkpUCENHg+80DEMOp2OQDEIqnnqIEELa2kZUCEJ3tpdoCEAAQAFRbCBEQ////////////ARABEP8BEL3siqMEEKuQufACEKiG6KcEELayjKoEELar09wCEKaguKkEEPHmkpUCEIyTgKMEEIHG/9YDEK6oy+cCELLgmJUCEPfi+pUCEAAQAFxjCABkaBpzCAB0eBqAAQE8QQAAAAAAAAAASAFQzJOM/arZxJKpAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ1pbWXRCvvpWZAxCu9vXoARC91aKaAhDOt4edAxDx9+KjAxDa3ufYARD2ktPfAxCuiProARDJzeajAxD7mJc3ENrrja0BENKM3qABEMu03d0BEP7coz0QtsnaYRCntP1aEOOYvWUQxYu71QEQ/6jzrgMQss7F/AIQ4s7e4AMQu4rV9QIQ0s79qQIQ2pvbvAEQx8/lIRDfn5alARC3/fIXEN/SxBQQ9b6/3wMQ+9LL7gEQ8ZTyrQEQ8u/CuAEQzu2T7wMQzovFuQEQ0Yir0QEQstbNfxDljc4IEAAQABAAEAAQACQq/AdFB1TWRHPdyUUxM8pCNFi4QmOctEKiqHdBzAnFQpQLpEKPHLZCvTprQrk0SkKK6kVCuV8HQhMet0LCMLRCq/K0QcjeA0KrL8VCgC43QpJt2EKv1TRCq+CFQOzwTEKNehpCwyMXQr3KcUK8NntCIym8Qr8LdUKEsdJCp/JBQnO26UK5UxpCesurQloxxEKzfwRCrIV6Qr8fhkIFzPk/qEDOQquXyEKC9iRCqGHyQmkU70LBYfdClZQGQmbxM0KYdeZCvMfyQqQGSUKTw0BCoefJQptSekKyH95Ci8/RQrarzkKULYtCSy9XQmZQokKmkQFChEyWQluIHEJPJ/lCW+r5QsV5V0JzK7BCu9p2QsOAkEKo7hFCuEK+QrqLIUK8KwhCq9AHQrQrJUKL/IlCVlR9Qp5hO0K4ZtxCZ2PIQsFPnkK3ZAJCxfDEQkkiD0KcouJCt+NLQkmt20JM3iBClHhrQrg+l0KEE65CtvwCQo/7mUKpkLZCf9MqQmVcP0KoRoBCTVujQp6S+kK1JbtCt2RiQqDxpkKZw9tCXyI2QmCx60JiBGNCwOjrQrRKxEK0kChCoNBjQr6j4kKZ46JCmEPcQr52dUKsfF9CT81mQnqsyEKkvT9CleezQoFapkK3K0lCstGvQpO1DUKiALRCvv6WQpK5u0JGPJ1CwEPkQlzvkUKWH6BCZy9CQpEUTUK9TRBCVNdWQpsgNkKRZ/hCwheZQm6UfkKATX1CrrR1QsEmokKTUd9CvNsdQnbTZkLBjq1CvVwwQraqlkKDCM9CiTkiQp2oyEJnuYRCRvEdQsCW50Kc+rFCvFARQm2r/UK/Hg5CSyoZQqeTW0JGs2NCTzPyQr5G4kK7SjBCTM41Qpp57EJjZ/RCpkL/QoUNd0KibmZChYN9QqwSlEKgdPFCqPtVQpUSJUKEuA5Ci9zAQnu8QUJyYJRCcCOQQsGe4kKQmd5CT+RgQkm0MULCyjRClm3jQpvD0UKyYYBCiaySQlKa50Ku/0hCk7BqQk0O10LAtZRCwj/TQmGTZkJ7odtCiI6XQo6SZkJV6RdCwiIHQruGrkKBFf9CjLCTQsQgdkK9RixCvo0EQsUpUEKNjJRCv/bPQm9AY0J2ecJCiEyCQr8g4kKW+BpCkR0rQrb0CkKPC4pCwygcQpqWU0LCbSZCsNxRQlq+zkJWZSxCkX1pQlRsqkKCSpdCUZP3QpdB0QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjjAVMIERD///////////8BEAEQ/wEQ9tepngIQo7/L7AMQxaqn5wIQ6MC80AMQrd/Z1QMQn8yPqgQQhpaD1QMQl4Kg4wMQscye5wIQjMu04wMQlbLQ2gIQgaLUXhAAEABUWwgREP///////////wEQARD/ARDf8prxAhDy86zxAhCHuOCpAhCEoYbVAhC62eeUBBDf7efTAhC+wOeOBBCugIWOBBDy49aLBBCopcGiBBC/qfPuAhD+7P9jEAAQAFxjCABkaBtzCAB0eBuAAQE8QQAAAAAAAAAASAFQ79O+6sfc+JqdAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQwrLlsAMQqY3cFBC39JsZEM/O4uYBENHr5twBEKeLyz8Q153TpwMQ7a3slgMQrraC3QMQp6v9sgIQ8b7qrwEQs5Tu5gIQ45bWkgIQq8v66AIQ+f7L6gMQ3e7XVRCurtzrAxDpyNO2AxCv9cb1AhDniuObAxDV+q7TAhCxrLZ/EKO6/BgQ9dujowIQusn8oAIQtr/y9gIQ59PNqgEQ+83/2gMQ7/brqAEQzZebGxDz9dPgAhDWko25ARDqmvJ8EKLJ5mgQ1Z/GIhCx8NdaEKWq/JgDEL0GEAAQABAAEAAQACQq/AdFIMtDRUGS6kUloS1FIwbHQedBpkGsZQdCg4MdQpzaaEKjCttCsUkdQlQsGkKGvBJCYn4XQqGMqEKu9y1COwx9Qr73d0Kn3HBCTSq1QsN7k0KNsvBCjDMrQn7OskK9/lJCjBXEQsAMdkKVxx1CavOBQo69WkKVUSZCRHP6Qn6UjkKozYtCj5LHQllpBEJQfYJCVVlnQq4N5EJL91lCkyHVQkvNV0K5nlhCjl31QkZzeUKVUxBCotTYQp3kXEKSxG1Csje5QpLRzELBFjBCm/xaQsWApUKqQkFCBnmTQoytIEKJGZ5CtOhHQpL4HkJsaulCmjO1Qn3tDUJ3Fb1Cl+IBQoAJm0K/ettCc4RFQpJ5+kK1PW9CReIjQpBsmUK8B6tCT/LaQoBd0ELCk0ZCUeRAQqpgmkKevr9CZTeIQoNOg0KO4VpCh9vFQoucOkLCBQ1CgBdyQkeUnkJMlgJCkrppQpfcfUK5hgVCkW5bQrysGkKI0RRCRuxlQnyFWEJ3y4JCxeh6QqQS9UKKSvVCpzJnQkaimEK9vYhCncYnQmVW30KRFJNCiSYQQoiRq0JH9QNCUIV8Qp6F5UK0RLRCSF12QmGa7kJpGN5Ca+XJQnqnzkJZHdNCmJW6Qnt7Z0KN44FCkijSQrnMnEJNmPxCRFq8QsM9t0KKHhtCmOagQoanrEKYSGhCWGH0QqQvJ0Jfci9CW+rpQo/jHEJn1GpCvzThQo51ykKXrDlCo9rOQlWcekJrPnRCkx2nQsVF2ULAHfZCurUWQoU6AkKeBzdCuUaUQpdYXEKocYJCwJSUQpuJwkJbVABCkkMoQpVpmkJTEKBCfTQ3Qm2grUK+rjhCvdBwQopl/0LF8shCVKtJQl8h5UKiQfRCTXTTQo4+aUKWQcRCZRskQlOi8UK/5LlCjUZsQnpmSkK2DAtCm4NDQq+0z0LC5uBCnUXBQrVJTUKPHpxCR1nhQmE0NUK87o5CliyVQpC5XULF77VCwtJlQrsOYEKzgVJCs4sSQrzFNUJFefRCxP65Qmiaq0JG2iVCpMlFQpDaAkJTO4ZCvul5Qo/l3UKU34lCVs8WQo/RcEK/9UpCic2uQsOxzkJUnplCTxvfQpPxfkK8uPtCmF9kQpK2tULCNkpCU7AtQpnhWUKUxBtCka6AQo1lGEKYOJRCuYJpQpYjvEKUDiZClZFEQp4RjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjgAVMIERD///////////8BEAEQ/wEQx/LL7AMQ75O78QIQmaqBswIQwpjRpwQQheTuswIQytKs6gIQn9+1lQQQ2oCOlQIQ1pTpsAIQjtWJnwIQzsmdlgIQifbBAxAAEABUWwgREP///////////wEQARD/ARC28rWpBBC9jvrNAxDZ28fnAhC21t/lAxDr2IOsAhDKw7HxAhCTytnOAxCm8uLSAhDY96eeAhC+g7HaAhCAk5qWAhCdz5cEEAAQAFxjCABkaB5zCAB0eB6AAQE8QQAAAAAAAAAASAFQjZrg17izj9b6AVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ1rPUuQIQ2u/jrAMQu47qlQIQwr+F5wMQucvzkwIQ44zO6gEQ35qyrQEQ5/qsrwIQoZzt9gEQ3uy+swMQ06qc1wEQwvzb8gEQ5qqbHxDF2fZlEOXJjd0BENKr3/4DEN2p1zoQ9d7DYxDZ6/X3AhCq6dPmAhDNvrOTARDNk83+AxDZ6MqnAhD5ubOXAxCh+uf0ARDD+NLsARDi++4cELWSlZECELXdrOkBEPPd1hQQyd7SpQMQ2/6+8wEQx5Pf9QIQoemG6wIQ/8zdsQIQ3sjloQIQ+onH3wMQ5peSmQIQqY4BEAAQABAAEAAkKvwHRIlvhUUeBT5EAYbOQsBcAUKJyBhCitQgRLOTd0Kg1aRCkZb1Qo+Sp0KtZdVCqNdzQlCioEJ8bcxCluzOQh9OeEIO6NJCoDtDQrOCV0K6l35CjsIKQrtivEKq34lCwJL8Qoy5ckKKAR5Ci7y9QpxXuUIH9oZCkq7mQlFoJEEc3C5Ck04hQljiBkKSVYJCuKvzQldo60K0DYlCUwcvQolw4EK5bnpCuSbdQmdoEUHiNBRCR9NOQrn8JEKrtJNCswIZQpJ+BEKFgf1CRU5zQr0vbkKGqTxCTKAEQra+uEKzQi1CmyA8Qlt+rkLCuhtCSQskQlaZhUK9h8dCmxhmQqXthULD/HRCnAHXQrtd1EK45VJCg43cQp5wdkKDJzRCBBlsQqS4+kJt1SFCihC8Qpmhs0K7kFhCkjcQQqrMbEK8kqFCXn7PQkl6rUJqB/tCv3P0QpqpfEJZbs9CmfGVQsE2KkLAAAVClvUHQrvK3kK/qCRCms3gQoanBUIabYpCT9qtQoxSwkK/BRpCnnHEQm3b4EKT0jJCUXfoQocae0JSkdBCUDt2QoYCDEJJcENCv/h/QpNvv0KsCERCmHi5Qr7nw0JXwGNCTifLQrky80KGFGFCk3s7QpK+z0KVdNxCup10QpHHuUKx6whCT9QsQoHZsEK1Qj1CoUYFQo6TRkK91mhCuZmHQryfuUKBq4ZCTwWFQoVsbkLDS09Ck3DpQoieO0JEDhpCu+bwQpEO2kJrEOFCTJeSQo/4zkK8f0pCm0/+Qo8HakKZdVhCTOZsQlJ5sUKbSItCZoaJQmV0GkK/vzxCUQOHQpXkkUKReQFCwaCSQq69GUJK0XVCW2klQl2Yq0KMiK1CqxY4Qp/xSkKMVUdCvsKrQrRUxkK4rtFCjn2wQpaEckKaO2lCXbY1QrrojULDh0lCpnD8Qlq3vEJVNi1CWmRGQm6o0UKRO2RCkoD+Qo1D/0LB86xCTVGHQpF7YkJe0eFCumUOQkWY9UK478ZCUcqtQmu/mUK9bx9CYbN9Ql2xGUK/yFtCuH2GQkXnekJOOzdCdVJPQpcMr0KaHqJCj2+/QlTjb0LDc2xCsJwcQkmFVUJN6y5CiTfqQqZ/n0JfSOFCTdpBQr1oi0LFdC5ClQViQqsx6EJVSCdCvvtrQq/ONUJZKeFCmNitQlt3BUJRV/ZClzlKQr/oxkJH/itCwXf/QpzycELCfIRCnDr7Qpaf3UKnGldCm/7gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5wFTCBEQ////////////ARABEP8BEOfN24sEEKeT+akEELDMjqoEEM+NmqAEEPLY+6wCEIGss6oCEIiAvPECEPbJw+sDEL3OnpwCEJTM+ZsCEK+GhJwCEMHfgtMCEA0QAFRbCBEQ////////////ARABEP8BELvzkacEEIbnmqoEEO7mmqoEEMO4zJUEEPvK3+UDEJqEodEDEKihjKoEEJmhmM4DEK206dwCEJfM3rACEMDOjpUCELfc3JcCEA0QAFxjCABkaBdzCAB0eBeAAQE8QQAAAAAAAAAASAFQkKnAvc6WqpvNAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ2rP26AIQy7rFuAIQ2pj6chD5093wAhDNzLaXARD21I24AxCxyPeTARChs8q7AhCyzcTzAxD5mt/RAxD9z/vbAhDOz/auAhDqqvbVAhDprtswENKrzBwQ9rHvvAIQsf7u2AMQ6tidlwMQ5pv87QEQ/t6DswEQ87rVnwMQvb+V8wMQt+/jYxDX7auXAhCn0/P1AxDr7sOwAhC+ye2gAxD32JX5AhC1ju2xAxDu6Y4bEO3avq8CEPvwi9sDEPK597cDEMeu8joQub28KxD9vNtyEPrXiusDEMm/19oBENEBEAAQABAAEAAkKvwHRKVJ3UR5JGZFVPalQevm7kKG29tDddpFQlPsN0UGiYJBXawNQsANhEKMbx1CarEEQrIbw0K3PkJCfJZKQoh55EKIE91CXbmaQn96lkJfTP9CwNo0Qq+N9UKR80lCf5RyQo96UUKsk2lCi7lNQkWWqUK74+pCjByJQsI53kJ/69dCqSw4QrGY1EACPFBCR5knQpwWH0J8fo1CiSeyQoq5gEJn0CpCmS4EQpl040H/MpZCrxuCQorwgkJJ5IJCkdCVQqDAfEKH8QdCsCkSQrRb7EJOM0xCS3AIQpGMkUJknEZCaGwvQglmtEJJYVtCkykVQpgqbEJM4DZCe4UCQpBzFkJ4qq9Ck8YvQrzGUUKzgyBCjkYMQnIRuEKSEUdCp+CVQsX26UIJiKdCiK/HQkTPjUJhn8lCigyDQpMxn0JLrNdCk+ufQp+uP0KYAJVCXBjLQsTtaELHzeRCe1vzQsNvQEKdJetCnU/VQrqgpkJFMkdCvVSQQlf//0KWe3VCTp74QoV63UKO5YFCRDKwQmbDjUKfCG1CkCz2Qr84TEJvGd5CfEzSQrO3KkK6JidCnUUJQsCkoEJgO+dCY5ohQq0j3kJ5J7RCwLrXQrer5kJQEfpCTjnPQppwd0K6Zl1Cbr0rQllZMkJPrVtCTggvQsEmz0JeApdCi4aRQpXv40KdRhNCStPmQsUQr0LE76BCk2tcQmJOPkKbYsdCjSfLQo6uW0JmOb9Co5EeQkzTzkK2+ANCnQ2qQpRwCELA9ZNCnA/pQmVsckKJDaZCwhDWQo3oB0JRb0NCRBMMQmsl2UJbWLBClECnQpFyKEKN5OZCRrE8Qr3LskKI4tRCmC2nQr/lC0KUn6hCr7bcQowCmEJG5HhCUOgoQrt0C0K7NcdCSr6OQpx1h0KYImtChlgCQqmD2kJJK/tCWlvmQrVKrkJLl25CljVAQneZ8ELD7NdCjN0HQpGnzEJhZ15CeZ3LQhWngELE8t9CxXxWQlbwNUJFSsVCrVIIQpucT0KypYhCoaKYQsczzkJb86ZCTh1HQlEXuEI2k6JCU38SQl7YAULEVTdCk0i0QqAgQ0KXFvdCXRswQomMy0KULXBCa8Z8Qrq6LkK/u7ZCvS1kQn5AAUJGLp5CZmKvQprYgEKKBpxCSBaBQrU2GUJX8llCul7KQpTLvULC85VCwAnSQpNOy0KzOPtCTNarQls+AUKQCg5CliIaQpaaf0LDNGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5gFTCBEQ////////////ARABEP8BEJr4jNYCEJyz15UEEPy4qJUEEMCe3dcDEI78+c4DEMOunNcDEMqy0NoCEOby0ZQEEIjR29ICEIy2iZ8CEOiB6dMCEMbHjM4DEAQQAFRbCBEQ////////////ARABEP8BEPbOmqoEEJHIodEDEKrNu44EEOGKy44EEJ76hp0CEKvLn9cDELybqp8CEJT68ZQEEPXGktEDEPeO1KoCEOXYsuIDEKre680DEAQQAFxjCABkaBhzCAB0eBiAAQE8QQAAAAAAAAAASAFQiZjRjoL0g7LqAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQxpDDnwEQrejX2AEQqsrnmQIQps/rYhC3yr/7AxDp1cyiARD+qO2yAxC7uPKgARDO/KRXEP2yhukCEL/cn/0BEKLZl98DENOXjNsBEP3yk5UDEL6S9ucDEPXS164CENvy3bgDEP+XtO0BEN6Z5RMQ/ciqlwEQzcrHpgMQ45LFYBCt0Ju9AxDPncrXAhDb669ZEOna4lUQxYzWmgEQqt+OcRD1zr4rEOu04loQocvCpgEQvrznXRCy2+d1EPOo7nsQubrTpQIQu7TS5gMQ4er/nQMQABAAEAAQABAAEAAkKvwHRWp3EEPXAtdFHoFIQqq9rkJgUANCkPDOQoyvX0JJ3WxCiLofQkZTZkJ9A3xBnkf8QrhvS0KdhepBzXHRQlcIvUJNlYlChr7iQsCwnEKGfk1CS1YIQqN2g0KgoVVCs43AQZu4W0KCMlNCkpaSQiCHq0K5M81CfNJBQo/7EEJ0AHhCvJyOQrgQU0KUA3hCs8Z8QrPIF0JX7+1ChG6yQsPLaEKNUnhCYFSKQmHtOUKCnElCRxHOQpF150KXjOdCsjcNQrnoFUK0b6VCV7x8QsSOyUK7pmhCqDZtQouv+0JOmA1CstuvQm07l0K1TT1Ct/D+QncfzUKNYWpCelPkQlR+K0Kj9xxCO66rQq1cSkKnyn5CT4ovQmaOV0KyOk9CkZpxQm/oZEK1hY9CtpeqQrr57EKzPxNCgXsLQpmvAUJ125VCw26TQpDshUKkdS1Ci+5XQrueSUKLshBCxYWsQktyI0KDt7VCsORvQpyf6kKSc2dCxVsbQlFJnEKIBstCu45nQmKcD0K8S1BCw2EIQrXsNkKIJqFCTY0UQkh5jUJO7vlCvujeQrFVpUK9ItFCuZ8HQqoPgkK9kStChFL6QrBIeEJOPfVCjwlJQpWXAEJKlUFCnPvBQpEBv0KaR+1CgW1QQpk3bkKm0CNCmNwuQl3kNUKZNsNCwkcfQmZvLkJGqdxCxFmmQsft9EKdGcxCt8O5QoGAjkKSaVRCvx3hQpw3DkKq2vlCUUeGQklcyEKn+9pBoKDOQqYdxUJKq9FCkwPzQkxjL0LDEW9CoL+7QlN2AEK+Ux5CltDKQoxTK0Jn+yFB9fGPQod17UJSwvNCi4Q8Qpad0ULFwiBCj2ZZQnI1gkKfwOpCVVQyQsNG+kKPBstCYJvpQlVnE0K850NCVdFKQo58X0JZ6eVCkaYUQlWYAkKzWmNCsZX5QmJ8b0JWOPZCuJg5QpG/1UKPI5hCnWaaQopH2EKT0xJCmZBHQok6PEKcN99CwMztQr3QeUKOj3BCU0D4QncMQEJ6GSBCpSG4QrzoJUKhtHpCUG48Qklc0EJN9WpCVl3HQnBFEEJRehxCi8e9QkWE20JSHMlCW7ZjQpEPj0KYK3NCUgJKQou+n0JuASdCsongQljzIkKWDGlCxGPNQpCyikKYa3xCtjstQp4cDEJcY9pChuEIQkQ/NUJnd15ClDYfAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI3gFTCBEQ////////////ARABEP8BEI292tUDEMKMgKEEEKymy5UEEJDd5qcEELOd1vACEOTi/NYDEOD6ip0CEIy/r5UCEN2D8qcEEJjk05UEEK3Tj9ADEKWHMRAAEABUWwgREP///////////wEQARD/ARD2ptDsAxCV69+gBBD78+mNBBDHnNygBBCr/8rpAhDX853RAxCKgN3TAhD1hLqVAhDAx/WUBBD+5LyOBBDKwbGXAhC42zAQABAAXGMIAGRoIHMIAHR4IIABATxBAAAAAAAAAABIAVD14IK80uX8hqcBWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDVsJTnAxDxktK0AhDpvOLVAhCm6sw3EOu8xdsBEKWg1RMQ9qjDMRC/tdf9AhCl3p2lAxDN+O28AxDvs77XARDr2vr5AxD1s/5QEIPXrjcQ7b721gMQzpOkWxDX3tajAxDmzcVcEL3r0t0DEO+crHsQwtm+lwEQpbL3kgIQ+d3dbBCh08PQARCtjorlARDJsot3EN3Jzh4Q99u3+QEQ0fDzrQIQ/e7NYRDNzM7dAhC3mLpjEKbN/+0BEOm/ytADEOL7rOkBEK+u9ewDELrM/noQ2cmFDRAAEAAQABAAEAAkKvwHQ66lZERaHDdDeF7yQ9Xz5UOMg5xCrYbcQoakKUGEmKRCuxbmQo4KuEHr4oRCjSUDQp2pckJFwZ1CY3eGQqEKtUJjJAtCfvgZQrvGW0KS4SlCloIzQrw7d0KAGsNCYDWjQmGqWEJdRYRCeJciQrpvqUJvZIZCnfwvQoV62kKcxxxBVIyMQqHkoEEK5N1CleINQre7dUJXqcRCi7NtQpRR1UEgSItCTMeVQnQG+UKXD/xCwp18QkfV1UJPr+pBrNgmQpK2qEKjz+ZCc7gLQqH7gELBT9dCrMZgQochskKvfRJCT0Q5QrsYpUKNPW9Cgk0RQlF8TEJonVdCvMhZQna2NEK44ARCnnOzQmLYf0KP/Z9CgL6nQpUfj0J7ONVCaCMmQpNcgkJYtEVCsBCjQlMoe0KUmB5Cd/IZQlpwC0CTXlhCwcIdQlQJqUKnY3hCdWQVQpGvvUJYDAFCbgJYQnOZDUJ7MvpCikQVQr0fBkLCIntCvqsKQrLEN0K6RlpCj+BvQlxDj0K3+ZVCc7RxQkltJUJfJU5CtrlaQsO8ZEJEXqhCmpKoQrYxkkKHfV1Ck/48QqDrlUKb5NlCl64DQoh/dkJKWRxClrCgQmGb6UJOc5RCUOyaQq2GeEKpkmFCZYfKQrKUoUKingtCRaiVP7vFBEKt8EhCjcz1Qqdp80KhdEdCvaW/QlQ2s0KZlqZCrGUGQpoqBEJlyEdCTikmQq+oJEKSzXBCuKmqQpCqlUKnXfJCudHlQp1i2kKZnLpCmqz1Qqj69EKoU1BCmablQpuHz0K/dbZCtWsLQpR88EK41VhCv3feQpAIN0K3O2RCSjVFQpkfcELADCZCh5DfQlOdsEKdkj9CnbNcQlE20kJX+axCTag+QmeB/ULAlhBCRSpEQp5pnkLEU6BCxYNqQpzu0EJPde1CwCT7QpHkV0JWfgZCpEZ3Qro9t0JNKjBCwL1+QpfS10K5AixCoNoKQlwN+0JPwpNCjnGYQk5lJUKA4GFCXdGUQp6aDEK3GNZCsdx8Qr2oUEKOmNpCmxMuQn0Er0JLLlpCvNeiQpaTjUJPqetCVJG3Qp9qeEKUG7BChcJDQsNF1kJt4ChCXdmGQsOGv0LC4NFCwO9IQlyEGkKJinRCkfgcQsVEc0KOSLtCuzd/Qrcb2EKNxf1CnmA0QkaVAUKOt5hCR+s2QpIuiULEgw1CimzWQr07u0LBksEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI4wFTCBEQ////////////ARABEP8BEK3NlaoEEOermIwEEPzCvtADEOCv+qkEEKWwnJUCEPuCo58CELHj89wCEMj/ktwCEMrswpgCENi3wpIEEPm9lqwCENHfnXEQABAAVFsIERD///////////8BEAEQ/wEQnP6LqgQQofnRlAQQzcTr1wMQ0dmI6gMQzKO3mAIQ0+/QkgQQzc2dlQIQz9uq8AIQjtKr1wMQgrSb6QMQhNWzmAIQ6bbaXBAAEABcYwgAZGgbcwgAdHgbgAEBPEEAAAAAAAAAAEgBUPWTxdKeuIK4pwFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BENLJ/acBEPG929QBEN/M1uIDEOOaopUDEKfplvUCEPPwg+wCEJ+Zq9kDEPbU6mYQp9HVZxDZsbKpAhDl1PpyEPrez60BEK/dx60BEMnp7ysQsayHGxDJz/P4AhDX6M23AhD16++bAxCm+rcREP6u1WMQ5czstwEQr5WKnQEQ0s68MxDa37fhARD67Y+5AhD79oqrARC/vN7jAxDl67yZARCi/Ja1ARCm2r6RARDh3uzQAhD71PtaEOOc29IBEPnSrDkQrbS/nwEQ/5L+lwMQvuj1exCj/IOxAhAOEAAQABAAEAAkKvwHROWLpkVh9DJDx+tXRJpNBEEZjXhDCAuMQoxedEJ5TNtCixNOQoJizEKLr9hCkatYQoLju0Ko+cdCngtKQduj/EJgKpRCr7GUQe4fMEKCZYRA5hRqQoirKkK+7MNCp2HYQkwoNEKYaUdCmfTcQoLFJ0LEpWpCecJ3QlIWCUJQCX9CvaAcQqGw1UDvUTlCwsX1QlLRoEERAWxCZZ/XQp5400K2uSRCi2SsQrmYLEIDBVlCtmynQpl1FEKURutCvlDUQkQZzUKSjelCwEReQr6Ih0Jtq31CvORLQqj3lkLEgTpCwsKwQp+WBELCYrdCsXOfQpt67EJw/99Ctxi2QqB9UUKT5NdCVsm+QrQo2UJvKTJCaA5bQmI+MUJ8LdlCug8EQk0pkkKj+hRCY94BQmQDzUJNjDhCtR4tQoLCI0KwxAhChJP5QlZwikJOId9CvIShQqgaGkKYTSZCjBcuQk4aJEK33uxCgiyaQqpd2ULEXdFCjNfJQpqKbEKfl3VCSrHlP8Hj50KvsP5CketWQrxz00JJPwdCc1SVQoFpqkJcpsdCjt6GQliSAUDIHSpCmAHXQp+y/EKJ4p5CTmlFQm7sh0LE3bhCkP8PQsdGgEJyP2FCkYtjQq3o10JcbRNCvh6+QpUDAEJNytRCjGTxQrmXw0JXc2JCShgbQklvxUKNpsZCqdwzQoukPUKiopJCmHJCQrjJNULAhdNClYnBQrHNVUKl/BZCdJ5pQrbnoUKynXdCRWTpQns3oEK0wDtCwq69Qrr0EkJhnalCm2XPQmcAEkK/145CTNcUQmA2Q0JXfXZCuyJaQogABEK14IxCvgXbQmQ0vUKgIJhCYVGpQmCxXUJ/W1BCryfVQp0bzkJf1DdCkbrkQrQp2kK6ITpClJXeQsNL2UKbdL1CZrMyQlLoyEK/03VCcHbnQrbXt0KgCX9CYKxzQln5gUK6SwZCldaUQp/jxUJXhTNCSR9HQsC/3kKPmApCgmOrQkqxckJayRRCugMRQqQNm0KbpnRCnIuiQmiYOkJUwk9CY8nbQp9YvkKDOxpCoHYcQoZUo0JO0XFCv1xAQrYoZkK+v+pCV3PMQoj7mkKVFKZCqnHmQnmdfELAFw5ClB7UQnEfC0JPkONCsKUWQm2kzUJdAKZCh5HnQsJ2FUKPInRClZ0tQq1PZUJljiRCVlnKQkzDU0KjOfpCUy+PQpOn+EKx04pCVj54QrqUgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5QFTCBEQ////////////ARABEP8BEPKW3ZcCEIfA76kEEMq/mvECEOm7wOMDEIiav+kCEPuS/Z8EEKudyNkCEPHprqMEENe/v+cCEMnsx6cEEMW2ypQEEKG719QDEAEQAFRbCBEQ////////////ARABEP8BEMuKuakEEKSHpeoCEPr77rICELrMr5UCENS79qwCELP/244EEM6XrPACEJPD5OUDEM348qkCEKLa/dUCEOjihbMCEPmeqNoCEAEQAFxjCABkaBlzCAB0eBmAAQE8QQAAAAAAAAAASAFQxaCOve+7mPE7WABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDK8da2AxDNsZydAhDz9OayAxDP0sT4AxCxiq3hARDZyoUXENmV9FcQ0vmH6QMQp7Kb/QMQ7crC/wIQu+3FmAIQ+ez38wIQocvjaxDbtdSdAxC/k6//ARC5l9/nARDhzKSvARCuzOtfEOnV5toDEP7x7dwCEMXy/q4DELf1tdsBEKao4qMCELLc26EDELHT09cCEOPy+2gQ0tvenwIQ/Y7vYhDvjebXAxCtq+awAxDD1Nz1AxDFiua1ARDGqoKlARC7lfo/EO3I/ekCELXIqpkBEMOpxLUDEMPzLxAAEAAQABAAEAAkKvwHRFNZJUVSzgVCx9a9Qkvyw0KYnqJCQDpDQpzYm0LDx9VCu5QhQquAxUK0x3tChnr6Ql2v10JpF0FCpcp7QJCsckKTteZCcRhxQlWUTUK1G/RCri3/Qr040kKISIpCalz4Qb/ye0KYtXhCl5sbQrtF60LA4LdCsrMjQpOwS0LG5qBCj71FQrBW+0K/vs9CiRUXQoc6IEKtxAVCtXXWQqghXkJ6GRxCkweqQrMKpUK26r1CWrjbQl+XOULChxpCvWiuQm2QgUKSs9VCvt+aQoY7FkK/sjRCSKWYQjG/MUJXgZBCkpsAQovj2UJHOSFCce3OQlqm1EKZVj9CjHseQrh0/0KVs2xCmDb8QoAbm0JL1T1CkrGjQmq+ckJY58ZCRqcYQoiEn0KSYv9ClvAJQpDUCkJyJutCoH1DQmtHXEKv5jdCxBQJQsQxl0JhJxBCjjJpQkTc7EKEyCdCu8XyQnNc70K+mHZCRcEaQqNVzEKXXghCvacFQmWTN0JsrxJCxUiqQqS+ZkJaK8lCmofOQsNz3ELCIpBCueDDQr/sGEKYmoVCnabVQqIiOEJp2rFCmxWgQo8/eUJOcyRCvImzQlG72EKJKn9ChcR7QrTdyUJoEAFCswnhQrzNekKgK1lCiEn+QoscUUKyI2xCwWQ4QkcYcEKV3QpCvT1NQeuh+kKTvjdCuSTgQsE8MkK6ZhFCmGg5QrccPkIv+K9CnKFiQqFRXUJcDUdCvsp6QsFNMkKdrp9CXEqGQpOK5UJMc0NCudwVQpFH+EKQHJ9CwcQpQonJ5kJZTU1Ck/AIQk0NKUJkrqhCwqLWQpGIbEKMqbZCq7lyQr+VBUK/N7hCXS70QlJSN0JRkAhCpmGSQpZ8j0JQY9JClkeTQmOlx0Kes6lCw9/jQlTFmEJbjBlCkdCDQlaEiUFY/RJCfsvJQoZMKEKU/NtCpaHdQnOHMUKkyWZCYEVSQmD/YkKouExCvR7jQsC6UEKKlwdCUJ9vQpzI60K5KaVCjiyoQmeMH0JFPvxCVqKIQr58zUK+o6pCjJuyQpkhZkKxjo5Ctsy7QlUQ4kKnFiRCvOUTQpHhmkJWnetCSHv4QpxN5kJjYZdCj9r3QsDS5kJGB1NCrsjqQos1WUKL71BCsT1NQqyxYkKhzj9CiD5IQkxiK0KwKytClNYvQsJFqkJVZ0pCWqFYQmY5LULCfLBCvsfKQln/FgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI4gFTCBEQ////////////ARABEP8BEM+9zJUEEK6c0pcCEMPJ99wCEI6mpZIEEPLxgtUDEMOol9wCEIO3yNMCEKasj5YCEJnrp+gCENCZ3tMCELKo3Y0EEKWElB8QABAAVFsIERD///////////8BEAEQ/wEQ2/+PqgQQ+fCa8QIQluXqlAQQ2+3ppgQQoJevlQIQ19mH3AIQ+dWunwIQ9OuAqgIQjZGD1QMQw57glwIQ8ZuYlQIQq7bmHhAAEABcYwgAZGgccwgAdHgcgAEBPEEAAAAAAAAAAEgBUJevyL6t/9rxV1gAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ0pPt2QIQuoum0QIQzrLjkQIQ+e6nuwEQrf76+QEQpu/7LxDrt9yeAhD7lb6tARCl3MP4ARDH18PVAhCm+vfvARC9/Lw3EP2ordEDEMK6xdMBENXe9/MCEOGO2/wBEPXrujsQqp38mwEQocyy2wEQxZS0mwEQ4djqmQIQt5HK4QEQ1q3NZRC1/evsAxDG7dsfEKL673sQu6q7vwEQvra33wIQ5cuu0QEQ7Zuz9QEQ8s6L6QIQr8vTmAMQwavk9AEQ75nC5AMQwsuvsQEQt7ea7QIQxumN/QIQ0czf1wMQ2Y2H9QMQFxAAEAAQACQq/AdCuzLcRPGwtERF0gJEjXMDQlgv/kKVxDtCvXtpQpigtkKVqApCbNopQr1mz0KMwG9CwLiTQrHSSEK8wBxCl0VFQkdXVEKRCSRChn5aQlGV90KeHDNCTmntQsQrIEJ3sMVCn9vQQpAnTkJ8WsNCo5IFQk4fTUJg8rpCr/O2Qp3k2EKWUfpCoRtoQlmF/kLFzkRCXx6SQmgo9ELBPjZCrTLKQoYZPEKTzotCYOqaQmiNWkK2P8RCTxonQrrCxkKw3xZCk0NEQoY8FkJUMe9CjKzMQjnqd0IuCxJCRBqGQrrwnELCi0JCjpGAQk3uC0KDTmVCwERkQoo570JUHeBCZaYCQSsKhkKxP5BCpFtUQoeO6EJZN/FCtG7KQsEg1UJMJp9Cn3dWQlOHMkKf84JCvH6yQr5lqkKcLVNCrMdfQrZkn0JZk01Cs6uDQpB51kKNNm9ChF+gQsOWtUJu+3dChfeKQnZOSUJGM2VCmt2jQltIpUKexJVCjnLkQpfzGkJTwrVChqU2QkjEtUKXpYxCiGECQrLlv0JT+EJCumUTQpNCj0JK6MRCvxSTQmwM2kKS2TFCkXbnQp2gsUKIlz5ClSHdQsO0HUKP/hFCjYa1QrZ6EkK9j5pCxaQPQsHkH0KbawxCn6yzQm0u/EJXid9CjqBbQke3sUKNbiRCWCF2Qpkj5ELCOUBCkbgHQlvCoUK6usRCwueBQsf5SkKQGrBCrrNMQn/I4ULFCfNCju4vQoDly0JhQ3BCmdU8QpQQMEK41GBCtPf3QsH/5EKLLWdCpSU2Qk1msEKXWxlCsLeYQpY1KkJVjJlCTX2MQk6GqkJhajpCaakKQo9D10KAYy5Ckf8+QqoFGUJTsRhCv/WWQpGhM0K/wqdCWs93QrVaFEKPQ0lClY8fQlU8gUKOppFCYfcAQsEMeEKKcuVCkWUMQkr2PkJS5G1CrI2BQsONQUJXIplCpCfFQlONnUKCkVhCkLXiQsBkx0K3Eg9CgbibQo18P0KahyhCm1yuQpum8kKEBBFCkNVyQrzeGEKc1JZCwr6YQpe+akJUcbFCRRFCQkWDJUJWpShCl9hLQp6CcUK9d4VCxP57Qroh6UKOWXlCh4SWQrrmh0Jrac1CXI4hQpXgGELBldtCjLBfQrGmxkK6auBCwZpGQr443EKb959CsSVMQqRbFUJYB2tCiNW2QsKK2UKA81BCU3joQmOmZUKFG3JCi7p+QqC790KhMGlCXXjHQqNoLEJJDzVCUY/zAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjrAVMIERD///////////8BEAEQ/wEQxLTv6wMQpKa4qQQQ35SAqgIQ6J6JowQQwefn1QMQ9NiDrAIQ+YKC1gIQhu6CrQIQ7fj6nAIQ7/XopgQQ/pq8lwIQ7trWlwIQxQgQAFRbCBEQ////////////ARABEP8BELPnmqoEELWs9qcEELKlgawCEOX8yY4EEKvK/p4CENGq/Z8EELm27NsCEOSm2dMCELKvzrECEIjg/uQDEJbgv5cCEIDVrp8CEJYJEABcYwgAZGgTcwgAdHgTgAEBPEEAAAAAAAAAAEgBUN20h9zFn6rmYlgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQyayDsQEQy7P8GRC+y/f6ARDPztXdAhDf8eeCARDp9uM6ENmtqysQub6W/QIQt9fGZRDejYvtAhCpmtqhAhDRvvydAhDnsZvxAxCy086tAxC3/Jb9AxDFzM4WEOm/xq4BENOKpGUQs53EnAMQz4nG4gEQ3q6alwEQ2ZrWkAIQ4bbf4wEQwdiz6wIQ/bX6owMQxdrXmAIQzbDzdhDa1+MREPXN0qADEK/o1OwCEKvs17sCEOaw3eoBENfR7hUQw871mwIQ6ejavQMQ4ZrmORDmroLzAhDx9PUPEAAQABAAEAAQACQq/AdD+rK9RSJIfEKBVvxCkmI/RO8cikJKjgtCb13eRXx4ZEK2hNdCvdc4QmtiIUJr9XlCu/+4Qo1ulEItsCJCTZdhQoYc/UJUS2hCW37qQsOQ5kKT2UxCuvVSQkjyCEKnLZZCSAOUQreTq0K+fZFCfGM5QgaCckEWFM9Cn+ylQnGtD0KgtLpCpio2Qouiv0JsRQlCgXJvQsHd60KHFmVCmF5YQqp+WkK3FOtCmOPWQpLugEJGLbJCfvDjQsSTNEJGchNCf0DlQpSN40KtPXJCBOT/QkaO8EK8m3RCuh+GQr8fD0KVhNhCls+GQrQQpkLEY6RCisaQQoqK0kJUd29CnhOsQoJqS0K6xR9ChbNlQsI+gEJIQm1CwFIrQksSFUKUKtJCY8djQn/APEK9bVFCgo57Qr3fCEJ5LcpCsjMFQpoFq0K/3qdCS2TvQkQzxUK+dWJCXdXOQpWBykJvyPdCefUqQrv1aEJEOvxClBw4QsTZ60KNhT9ChrelQo/8pEKY1K5ClvIDQmcCwkJj34hCcRakQpADJ0K/vetCVucqQr4TBkKMG6RCqYquQsQj8UKmwpRCYmIvQpEATEJSVhVCq1DBQo7dhEKSMaxCZsyHQr//eUKIV1FCUNzNQqOOxUK/ft9Ct8/sQqyfzEKNve5CeVO2QrrVDUKj07hCm2KeQrfo1kJzTaJCS4EMQph1s0KW6HBCig1+QlNQc0K8WrtB1YZeQm5gF0LBL8BCh/0iQq8sZEJILrdCo8s3QrGc0EK0VmlCeqzPQneJekKtrfZCiA/WQnk4xULDQQ1Cnb9AQq7aPEJ91ZdCXAajQoNYBUKKiQZCmZMgQr6RwkKteA1CmcTBQpNkSEJWSlFCqZinQqzD10K9upFCh5aeQmS3a0KHBUJCmSjWQkeeK0KeelJCmaBwQoh3zULDouFCRT1UQp1a/kKUWqtCqczIQonYgUK/C5RCR0aWQouSEUKXA3VCVK2AQkjMr0JdvJNCvkJLQlWXJUKwSfhCuYs9QpWNmUKzpBdCV6Y9QrG8/EK5xoZCgSYGQm6KgEKHiiNCZQyiQrZmvEKVVbpCwcBmQkZb1kKMzkpCmSfiQldp6UKNnDNCjMnTQllkPUJMxhlCk1BkQky3CEJvzNdCUwDXQlI0IEJqGBFCvaigQktBy0KM8DJCk8EBQsHyfEJQjwFCjwJjQkwSVEK+0thCwUb8QmQy1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjjAVMIERD///////////8BEAEQ/wEQ5+Ht6wMQiYju6QIQprzzpgQQqtSr1QIQ0vytkgQQ5tOpnAIQwNnboAQQwoaI3AIQ5bGw4gMQ4M335AMQjtLEiwQQi4S/ZRAAEABUWwgREP///////////wEQARD/ARCH0O3rAxDhv/ygBBDM5qmjBBDFtuCOBBDm3PzUAxCI5eGXAhDM7caSBBCTtIfXAxCXt6yxAhD5htDiAxDG9qrnAhCw8KljEAAQAFxjCABkaBtzCAB0eBuAAQE8QQAAAAAAAAAASAFQ5Yf6uv2aopi1AVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ1smF+QIQo/PnmAIQutiL4gEQ5brrbBDPvrw5EN3snqEBEPbto/0BENXw1hwQ1vb1pAMQp73a9wIQsvfX4gMQw8jN+gEQ06+6rwEQxszUqAIQ1auN6wIQws3ErwMQ//T6sAEQwdOqPRD909uQAhCt1/4kEM/P8vYDEMqp0/sCEKvs5p4BENuK8zAQtf7K3QIQ1c3znQIQ98/O2wEQtZGH/wEQwd7i/AEQppm2/wEQs/jrXhDanqanAhDWs/JlEO6p15YCEKmK8/4BEKLs59ECEMn2wpQBEGMQABAAEAAQABAAJCr8B0VDo+VFL1DtQqpHOEKkYtJCuAUoQlV5HkI3dPlCnpqHQre9zEKOMLhCiI/CQhzFxkLAh8lCiORIQi1xMUJFtDtBXqCIQsJVZEKSTABCUQMUQnqdnEJFaEpCmhAeQqx4qUI45lhCs0Q2Qk7kIUK4FMVCumB5Qke4okKRLiVCqCLBQp59fkJVOCVCuy3UQsDYP0K8BDNCb7k+QpT4FEKIN35CsHQnQk/GVEKSiPJCuzaJQr0JhUJZ0EFChZrZQqNpp0Kpdi9CsKWdQrmXpEKscQ9Cn/RoQsHNe0JZhzRCixfVQnX3iEKOiGBCgnHYQmS0IEK2ZAhCmBp4Qq8SJEJlhldCnb6tQrc6yEJP9PtCvBJ1Qor7uELAJnhClTM5QktcikJKG8JCtrkZQp9v7kKMiaxCwsYfQrEGYEKzkCVCsRK2QpUINkK7wGpCnB8tQsLVnEKMywBCwDRLQo+vP0K6q7lCxcL1QsWIK0LAAslCs0xMQpDlOEK9z2xCTeBLQsA9cEJUk6BCYgHgQqnnMkKHsJpCl9VoQl66cUKGXItCtaBtQrhoo0KOxGZCxMuVQml9YUKnc79CW8ZgQsRca0KNHcBClAg7QpCU4UKaKtpCmn73QrrnQUJWqipClHQmQrTu5kJjHrxCso1FQpO8o0KKiKlCgbw3QkpHbEKjpdZCtP26QpIBAUKO3jRCVaubQk9BnEJe21hCnVy7QqJTQ0Jjj+VCl48uQpR4nEJS48FCvEAoQp/MhkKPF5JCjJwWQkxdF0KMDQZCfFB8QmOvTkKKgY5CdKJKQoRfyEKILgpCvF+PQpsG8kKM0BtCXXN8Qo9w1EJuJT1CR0PSQodPy0JyQq1CRyk0Qo3kSEKeedxCmpfiQr9Nj0JIaZxCvPU4Qlg3e0KEnnBCu/sVQk+s1UKWSp9Cmvv/Ql1rWEK/ThlCiYdwQmBeaUJa84lCwlb4Ql5VnEJQKbVCjlrpQlob10Kf0NZCjTGzQqIx1UK97+JCtZuRQllb2EJGz79CtBeNQr+KpkK1EERCsq+JQrxGGUKcqD9CSXy3QrgwnUK5ROVCSliwQpTvVkJyWghCn9hXQoxYr0KSeCJCjA4LQqAS9kKcvHRChoBuQk7I80KySjhCm8XSQo14VEJMVO1CTZLPQpo3WUKRqgpCuBPnQrQ3M0KU6nNCm85LQpfHRkJRRstCSypkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOABUwgREP///////////wEQARD/ARDdyq2SBBCK2PanBBCYsc7OAxCq7ozcAhDL+p3qAxCstezVAhC3+LKVBBCquuWcAhDGtsqUBBDaid7VAhCv4tCpAhCX9bUDEAAQAFRbCBEQ////////////ARABEP8BEPCs1JQEENv366cEEKCEnpYCEOW+ueUDEIGXiu8CEI2g4+kCEKfPrp8CEL3LtvACEIGi2osEEMLOvo4EEPby7pUCEKH2wQMQABAAXGMIAGRoHnMIAHR4HoABATxBAAAAAAAAAABIAVDU9e+ax/nJ/vUBWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDi2NzZAxC7lMvuAhDymOL+AhCnvsZyEPHru9kCEM+K9aEDEO7MrO0DEOXd/j0Qv6rK+QIQ/ZOltQEQubCd3wEQvZDq0AIQpdjsGxDxiva8ARC6v9fUARDF3Lf9ARCpzaNdENXQr+kCEKGbnW0Q77mUtwMQp9Tn0QIQ5oq+8QEQpby3exD7+OaqAxDXntMZEO2X0v8CEO+Q/t0BEKKW25UCEMaqotECENn24vUBEM71krMBEKvN7qMDEOK/m/kDEKnOzaEDENf9lPcDEPuKvZUBEPr+/Z0DEMXPThAAEAAQABAAEAAkKvwHQ8EF5kEjpMpBm5qCQpxuJkHbKpRCilNhQSyr6kKGe+RCpZEAQpDU6UIl7IlCvsq1QppW8kJHscZCT5V1QnWvNkKTugRCZPieQgv5hkKmPc1CZCcQQkzjtEKXOVBCfCkrQpIyYEIQ4nVCliCeQovEC0LBBxdCPuxXQnGQVUK42XZClaEvQr1bdUJdQhFCsQNkQrY2wUJ8+alChLe8QsWUt0LAJwNCo4aqQp3BxkJd3GZCaBDIQnLgEEJtGOVCg32YQl2I80KP68dChOuVQoYUb0KJCgpCSFr5QomoJEJEc/hCuryvQrqBTkLDm7FCTH+yQpRo2EKWDWtCvcFzQrOyhkKoLblCneCCQpG920KTEdJCsW1nQqHzBkKZlghCp8lHQoKiyEKSFHxCS+OkQrdQRUJUlINCjXTIQp6s70Jv8XhCjXfsQmx3pkKR/AFCSZjfQrCrjUKZh1hCbNI1QnY3ukKDeyBCr2pSQps3iEKxa05CSk6nQkf0y0K3FehCSiqWQorYbkKVApdCjor3QpYNNEK/dLtClhoXQpqQ/kLCamhCqeL9QkYgBULC34pCuroyQpZD6EKSVbdCX6EwQrNgI0K6VjVCxYeYQlClBUJNg4FCZYc3Qq9mpUK++q9CbUjTQk5HOkKNRPxCvU02QnZNz0J+zNVCo9GSQsL6mkJLpaRCnUSCQnMz6UK+YwBCYIp1Qo3xDEKN7sBCWKe5QlHOBEK/b71CZA/qQmJ8XEJujTVCaQs9QmCNHUKVJi1CqBGCQolFvkLABjxCZXebQpYZv0JQgdhCk1csQpJU1UJo33pCwcVHQpL7J0JZf7BCWaWgQlNpHEJ42jJCr9nrQoVJk0Jpu0tClzGVQriUmEKX+jJCvY6WQpPS/0JdUPJCg5kGQr6s6EK3t6JCjiCeQoh7SELB9j9Cn6FaQpCdz0LDdQBCwKIbQpAun0JWE+lCXW6xQsIagUK8leJCskHiQpiVV0KxTSJCVGY6Qne7VEKRqABCjI0wQk2SAkJZbZdCuW5tQq88tEJJq+BCb0eUQrGurULF07hCSYsZQpoK+UKTHplCl2CSQrjQNUJWtEpCtx8FQle8U0K/82JCeLYGQrp1LULByINCdxxcQlhDYkJaxbNClZZNQprA00LClE9Cmw10QsITp0JhkBtCUspWQrXv40JzdaFCmG2jQpLn8kLAIfxCmvcwQoD2HEKT6V0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI4wFTCBEQ////////////ARABEP8BEMbtzukCEJCj0OwDEKGKr58CEM26oKMEEPXu+9ADEN7P/6kCEMGyzusDEPnZr+wDEPqN1uUDELywzusDEJyZ/OkDEKWi1HEQABAAVFsIERD///////////8BEAEQ/wEQ8sSuqQQQudvQ7AMQ7qCykwQQlIiqowQQ+KK/6QMQk+akrAIQocPNzgMQgub8mwIQkcDqoAQQppyC5QMQ55rF2gIQj/6QXRAAEABcYwgAZGgbcwgAdHgbgAEBPEEAAAAAAAAAAEgBUN3xxta5w8bDKlgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQrpfM+gEQ4e31PBDu7YKZARD3+NtREKKZu7UDEOK352sQ4qz0rwIQ08rN5AMQrban5QMQtZKzqwIQxdq6nwEQ8ovq/gMQ7Zj83QEQx4vfcBDOsavvAxDVrfsQELauyx4QptH6vAEQ1/v2fRC16O2aARDG0POQAhC17tI4EMq/1n4Qo4zTEBC5/ue1ARCjv9xiEMOZ7RQQua//LRDznrKZARC2rbPlARCm3sJdEKfMq9sBEMm5hO8DEKG/1GYQ7fz32gMQzZyanQMQ+orrnwMQ8u6S8wEQChAAEAAQABAAJCr8B0S+IO5DeUFNRAEn6kV1yP1CrnMUQlY5/UKXd8dBtXsxQqgLykKkhbhCjyAUQjOoA0KnTSBCgKi2Qnx8fUHN80VCrLI8QqgqaEKKM2lCTPoQQlkhQUKSpchCWuaFQpvJ8EK/M8ZCqGz+Qm4aVUKR56BCoPohQolvfkKs0QRCXmNQQq4jrEGA+3pCVP6sQqIhFEKxqyNBbWnlQozVEUK59y5CUYwVQqajfEJVrCJCtjUAQppCTkKfYwNCh3thQrkS50KNuclCj3waQsDXFUJPVi5CpWNKQq5l5UKCzgNCi8yRQsDtZEKUSvJCuQfCQrZLvkKiBV1CpoklQoDkA0KbhAdCt2X6QqZpFkK7ZFZCXR6fQqtl1EKLrY1CjJ7BQnAxD0KanotCX5U8Ql/LsUK9RS9CcWeRQpkqYEJzMD9CwJPGQovch0JliD9CnFnoQmHbwkKp4YJCryCSQsNSPUKblI5CxG6jQqy7XkKi2qFCxMN7QpZ4gEKMbhtCnXFBQqVqR0K42u1CoKc0QoZ0/kKcXixClDzOQp8JgEK4KBpClfjtQrICuUKEXwFCgk/wQl6XKEJIjpZCuxF/QlE4s0JQd9BCgaqnQlk85UKYakFClhWNQp/nl0Ks+5RCoiylQo9GakKtiSdCtjYqQrL2f0KVr25CgXGhQowD0UKPETpChUsNQoU9fkKL0kVCl80qQlkDREK1VNFCwEgzQlOlSUJXY9FCk3ohQmOZyEJIfDlCljrjQoPHS0KevXVClYEFQpHvVkKREmhCmsGFQm/730JFWiVCS94pQknyu0JPMvVChtoJQl2Ex0LDdJJCi7qxQsUKJEJEtiZCuy7KQmskTkK2DfJCmzgFQpIi10KV14BCmrOXQolMfUJ6MV5CUlxeQsBS90JPYllCWw2OQlKP+UKnN59CtAsbQor+/EKsNadCkraBQpTa8kKKWJ1CsP+WQrSdHUK2sUFCoWmtQkSonkKGGRFCVM4/QpMz+EJOVAlCmX9yQpBLokKT9xZCwhlBQpj4ykKbggdCpAixQlcJYEK/8thCtUr7QrGHdUKHc9VCkoblQk3FqULB7WZCh70DQsA7pUKJ+r9CV9ONQmbpPEJRr/tCio7mQqEm6kKRz6ZCv9NkQl7MjUKWJoFCv6caQpP4ekLFa9VCXREyQpdNK0KOicNCZd22Qoy51kLE7MtCWzFRQp0tnEKQ61NCxZHcQnFJ60K7YfcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOUBUwgREP///////////wEQARD/ARD5iK/qAhC0+ImdAhD0zPvSAhDGyomjBBCu8sLlAxD7nZ2gBBD+wcGwAhDU/dKqAhDD+9LZAhDnjoOsAhCmiJznAhDr2sfnAhABEABUWwgREP///////////wEQARD/ARCT76/qAhCyrLOVBBC1tJfvAhD2i5TjAxD22t+gBBC4tcDjAxDmveezAhD/2MDsAxCW8KCeAhCrxO7ZAhDKz7qLBBD1oK6VAhABEABcYwgAZGgZcwgAdHgZgAEBPEEAAAAAAAAAAEgBULr77pOk9c/ANFgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQwvW2cxDhzLpbELe2lmEQs9bW2AEQ057z8AEQvfycuQIQ07De9wEQ+a2tXRDx0ePxAhCn6OveAhDRspbjARDe8prrAhDHy+W3AxCqmcWsAhChmN3fAhD9lfvUARCxr+qfAhDl0NS8AhCz2PXgAxC5jd9XEN3qhpkDEOKZwqoBEKqvh78BEMO9xRoQveqX7QMQzsmecRDLmNJmELOyzJQBEKaw1PIDEN/cwnsQ29LvIBDTnpZREKHJ7PsDEMbOnzsQzorekwIQzZDc6wMQz8m1fxC/ERAAEAAQABAAEAAkKvwHRBi9IUVTfGNFd+e4Qk4xjEWEFoBCG3tSQhxdoUIXO7BCOUEJQoLMK0KdLF9ClzG1Qn2oy0KhUiRCqPVaQnHEo0Kix3tCtL9nQoDTm0JZyBNCv5nkQn1T7EKVFxZCeigmQlO91UKmjw1CX7wkQqs3XkKTCIdCUJMNQpjriEKOpp5CbfjCQr2OxEKzDstCW1/uQnEdi0LBsz5CvGKuQlL6MEKOGn5CeS4xQp1nLkJ33iFClcWFQsE6OULFCopClvSsQowdn0DtWidCsFHpQokQMkKKQo5CaOhrQlVp10KDeDdClgx+QpBe4kKN6z5ClFKmQoBfgEK4WvxCtrhOQlN8l0KuiztCxAAXQrmLvkK90uZCuKxyQpltZ0LBnSBCwlLPQkDyf0K6vEVCgMo2QrKG8kJY3NBCgZiUQq9C0kJ8n3RCUgjTQr4SvUKWvKJCt3PqQo2M40KI1PxCgL+SQrGd3EJMXdNCh//sQpRVikJ1qs9CtzznQoVehUKWc+RClYrZQpRrp0KFcHdChNSwQpeXiEJ+5GVCiWp+Qpo7p0JmQz1CxcWIQrpL5kKBHa5CgiTiQlZgKUKVCxJCRmNcQr8IM0KTS1ZCsodnQqBs1UKLLRpCiIJjQmf52EJO3sFChoAhQpKISkK/qGxClIN+QoB+oEKvm6FCgvfQQr84mEJpB8lCbAOrQpUrq0KSQyNCtH6JQr5UjUKKb8lCJPX1Qlrw8kK+MtlCYAIqQmtc2UK+xO1CRk8QQr+eJ0KSPzJCjl7GQpv600KWg3BCiMYtQlSbpkLBTNBCsdjhQr2MOkK9nKNCh8vvQkvJlkLDE4BCTtqfQmedLUK+VOZChCT4QpK0I0KXcWFCw9w4QnWMeUKM0F1CvDpRQq9nykKQRBlClttrQrLgykKKxhxCvkpwQrsJFkKc1b5CavRwQlcu50Kw0JlCSX0MQpNwTUJJTbVCc2PIQm6UlkKmZW5CvPa6QkYZokKN6v1CtMTWQm2bzEKb1otCY9EhQlSHdELBzsdCkKb1QokXnkKVI3VCkDeyQrpjfkJsoxxCXPHrQsAOkEK43TBCkNknQkoS00K6hM5CWSH2QsP+5kKwawZCgUmJQnDuC0JW81BCk8weQowkrkK4nwRCuQ83QsEglEJN4xBCw/GOQlU6kkK4WaBCksL7QsRc5kLCLfJCXUTsQk4fWEKYIaxCrZ10AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI4QFTCBEQ////////////ARABEP8BELq0n58CEJi06dUDEL/D55QEEPvTgq0CEL/0iKkEEJLPje8CEPbb2NICENLs3ZcCELrOle4CENbouNUDEMCF2c4DEPyqpQoQABAAVFsIERD///////////8BEAEQ/wEQ6t3E6gIQ89mH1QMQwYuI0wIQusHKsQIQ74WmoAQQ8KP32wIQo4iz4gMQopHe1QIQwJyGoQQQj6iimAIQqrmc6QMQiOmNCxAAEABcYwgAZGgdcwgAdHgdgAEBPEEAAAAAAAAAAEgBUKit7onUspjqzwFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEN6UxuECELa+ur8DEK/ejZcBEPiYgxUQwcz61gEQ86v8swEQ5ZDVsAIQ3ez91wEQs5DK9AEQo+iVoQEQ86rtrgMQ5/b6qAIQ6f3t6AIQ8+7S3AIQseuz5QEQ2d2KoQIQ6dXy6AIQ+7jdrwEQr7DTuwIQsYqVYRDl947jAhD3ncu2ARC90/L5ARDd9tLmARDz29OoAxCu26chEO/Yv9kCEKPtypoCEPW1/uIDEOKsmy0Q1bX8NRC286XtARDamf5cEOH7n/sDENmN++EDEP7yot0DEPGP7pYBEO3v/fUDEOfQARAAEAAQABAAJCr8B0URjC1E1fUwRMT4MUJEnipB+vO7RXHrfELDrRpCmjMnQnlTdEJQmstCwE1aQoGZv0Jrjw9Ci1BMQnxlTkKxkvhFaG6CQqAbCEC8xHdCXN5PQmGoOEKK2fRCwEsMQpKwWkKdSqtCwXyOQpTBT0Ki1ORCjHNjQqgSvEJzhRRCce9fQoTWUUKsbkhCagxBQl1zsEKW7gBCXeG1QrRz8EKpcrBCkPQ8QlqOdUKcAEpCuH9LQpoe+EKzNLZCTi1SQow7GkJucahCldS/QsKHFkKYbmNCnO85QTea+EJZcXRCmCjSQpJaEkK4bdxCod4KQr3ykkJOl9ZCVtBPQp1F8EK0YmlCnkzXQsUYoEJO2M5CdAfCQsN6gkKlcQNClfFeQsQfAEKY7r9CcnRCQlJMLULBbGxCl/oRQsNcaEJzIrNCVzeCQpTPWUKEcWBCmUgPQpjLaUKNyLFChohQQobrNUKdzZ9CxfCCQrvPJ0HOcJVCo/ffQl1pmUKa8MBCrHa/QrtzGEKcahNCdiOEQrZizkKYzi5CohhcQsSK5EJUa8hCgHZlQnFEgEK1j5hCSghtQriPGkJqwp1CjOiIQrz7nEKpsE1CdgYaQlHJgEKZqClCjywkQpDtC0LDE0hCuiEPQrdumUKXvzdCYiwaQrgVo0JJTx9CvdVcQrkMmkJbgX1CbwFoQn86XkKTMk9ChtdhQlrFwEKsRk5CkHDDQrlaoUKIVW9CYzt+QlCjTEKUBkBCwd01QqvXH0I0VJxCla0UQrhw10JXJApCS41QQl0GfUKPVMNChzQpQsFVtkK1yntCiLjYQlwsBkKAyhdCtpSXQrvVGUJJMZ5CVd7lQlHJMEJ/Uh1CsEA/Qpsx+kKAMzpCnxw/QpOcsEI55wVCkjZpQpzOF0KFtOxCbVRxQrsRSkJ0hoFChtCNQrZYIkK4Jk5CU66vQpcC9kKR6NZCwCX4Qr8oTkKGw35Cv1TYQq5OJ0LFHfVCZGB/QmPbW0K/1ltClLEbQsQslkK6ERtCrQlQQrrWykKeqkBCu9LhQkxZjkJK2YFCnUbKQqBTZkKTD3RCVC9vQkVp5kJVvWRCuHSvQkp+K0KZ7jlCwAJNQpQvXkKVhc9CZQv5Qr1+gkLD0thCWaSTQrUbT0KZ6GJCuni5Qox9s0KTspVCcmQhQpZIcEJoqTxCkiyFQpfZvkKSVmhCZDVdQoPuv0Kyj6xCWx2NQlQ2/UJYYu1CUuSnQrc0RQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOcBUwgREP///////////wEQARD/ARCeyJmqBBDg16iTBBChzNuOBBDtl9XaAhCRwuGqAhCD5vqfBBCI9+fXAxDyoLKqAhDJ3YiOBBD7ub/nAhDD2ZvjAxCp2+LSAhAQEABUWwgREP///////////wEQARD/ARC0g6+fAhDbit7XAxD4/ciVBBCkovuVAhCWmq2eAhCU7dmLBBD05NSpAhCg+eGwAhDV8duOBBCWr6DjAxDz49OqAhDazY6VAhANEABcYwgAZGgXcwgAdHgXgAEBPEEAAAAAAAAAAEgBUMHwpsqW7faJiAFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BENLVmz8Qpfuc/wEQ4cqmnwMQ0dj30AMQue/MUhD7v9OmARCxyJU4ELGPpakCELW3qtUDEMHXutsBELue0+kBEOWrztYBEMHP5tEDENLo77sCENLYiusBEPKJ+pEBELaf6p0BEO/d7uUCENeRlPEDEN2P8rsCELWp0pMBELrd1pQBEPL09fQDENP2zbQCENmM45kCEL32s/cDEL30xa0BEOnY8qgBEObRrNUCEM6QovkBEKeOt9sCEL6oxfwBEK6Ko9sCEKGo1p8CEKuftJcBEPPw01kQqtj25AIQw/IBEAAQABAAEAAQACQq/AdELF+mRVhub0S37E5D7CiURQIbHkCl0GVCHVt2QoDe7UGq2xZCn3neQsU2SkKHPFNClRZ9QouOzUKcaFBCVl/JQq9Va0HNclBCkeMdQr7v9kJjauNCCUTgQpXF0EKYdQFCqnWtQoe2EEKndCVCtK8hQpXQ/0KMRINCe2bGQoVvEEJad1RCnC99Qo6JAUK/dMRChv3XQqPQX0KZZOdCrqLmQKmZKkJqR5dCgX1ZQqedw0KJAdNCtgZxQsE6cEKwtxlCkHzDQj/hy0K8llNClKgtQr3bpUJ6lD1ClNTyQr59kULBqWtCmfFjQq4mTkKd/7tCUBbAQpVHm0Jtyp9CkjwNQkwnr0LD2LBChrAEQmLgB0KRBbFCcZhgQpJDqkKYtE1Ck7rgQsPbGUKWI+5CUmNdQlyXmkKRCnlCw1BjQriPvUKX5rJCSh1wQkiP3kJbIudCuP9gQsKBA0JMKMxClD2/QrqRpELA99lCvoX/Qmw56kKJmoZCn26FQkaAlkKHUBdCritRQo+p0UJSxylCjVmmQlXdiEKLMgtCZtD6QltMyUJdF+BCU3KFQn9ceUKv8sBCRGPXQsBsXEK3DbVCq2x7Qr3J9UJke55CfklEQr/twEKhhHpChThyQkaOgUJFE1JClF4BQpK330KWufNCjG5KQlBzvkKTEFFCxOW9QpNAkEJlypNCfSfZQpqKMUKheHhCwwQIQlR2X0K03sZCsM7HQpYo6UJbuGZCWKF1QqlzQUKscjFCwNisQo6T/EJEPtFCiF41QsI1m0KF/nVCpf4cQlktR0KW5HpCiCE6QpzG6kLDCP1CmNayQsU9aEKAKpxCkmRFQoaR0UKnWLtCwME7Qk3Zc0Ky+V1CllnGQk4uPUJX/ORClQQUQpHL20LF98ZCuFEfQltnikK6lK9CuW8wQq8jh0KGqxNCw3kxQqjzEEKnklNCjxhsQq/kWkJNmuNCSFrUQoiid0KKsMFCTbjYQr8pvkKb8DpCvRevQoiZ4EKKqSFCwZvGQpBKgEJqvgBCwC8+QpeLiUKXT5ZCmIR4QsL3qkKNfVZCjD51Qo2Zt0KPeXZCJiR2Qkfg6EKcy61CXSkVQo3mskJ0H11CwNLDQrwZNkKdl9NCV9QaQoP9tEK38GVCli4CQlvLlEKUDSNCvVkgQoWE+0JMxdlCTj13QoXU6UK8qG1CVjNAQrWpwkK76uMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjhAVMIERD///////////8BEAEQ/wEQ2dqdlgIQp5qXqgQQsrufnwIQ9p+4qQQQ67mOnwIQteX70AMQtqWtqQQQ0qLc1AMQubGpnAIQjJCzmAIQuaSzqgIQk9WmChAAEABUWwgREP///////////wEQARD/ARCLm6noAhDg7JbuAhCbkv+cAhD+6ZrpAxCZ2IOfAhDxop7oAhCJ9JqgBBDW9P6VAhC4somcAhCizbzTAhDbyOacAhD9oc4KEAAQAFxjCABkaB1zCAB0eB2AAQE8QQAAAAAAAAAASAFQ6Nril/G+h5kzWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDOvqV9EOn7tt8DEOCoxLUDEPGosvwDEKnolfUBEOX1zBwQyrPXngEQ5fSWqwEQv+rjbBC+iPbeAhDBvfuZARCztMonEKaoyvIDEKvxlrUCENWd+rgCEL2/m6ECEOKTxD0Q57Lf3QIQs5vMWhDHiL1bEK3I+1cQ56mL+wIQooiF+QMQ8/LcGhDiz+xuEPPbvuMDELexxa4BEMGb5G4Q19XVugIQqc6irwMQ4fDfowMQxar2fxD2r+TZARClreX+AxD/q9aiAxDFmdL4ARC/s9PTARDB6/y2ARClv58FEAAQABAAEAAkKvwHRDayfkUZCTVCBmo0QsH5RkK0LGJBMtkRQq7liEIfUYRCj5ZSQnSCA0KdhohClM9SQJ1BrUJPdmBCns+ZQryuCkJtRmpCWxgfQo3VEUJKsu5Cn9/ZQqGvakETespCSFFkQpcTUEKJRFlCraxNQryoo0K2JfdCgxmMQq0Z6UJbWARCvnqWQsQweUKJ/5hCkPabQrROBUK6XIRCwbWbQlUBKUKbYV1CJHd0QpxwxUJgNI5CuQMPQlB8UEKyeiBCueT2Qkw08UKTXIhCkxziQlP39kKSU+FCrG0UQq1K2EKRS9dCnlHxQkgEA0KSAGFCptOSQok5EEK/6aBCWeOGQpo6FEJfxHJCnj0jQmXk8kKfLcxCsO0zQqXGGUJej1FCvfF7Qr1FskKlusBCh46+QpkwBEKJp+ZCaMUtQlECYEJ8krZCsiQHQk/0f0LDbU5CWy3/QpVSukK8z35CcX4vQpyQ40KRMrxCeZ3AQp2wDUKLC8JCUCvfQpFGD0Kzx/VCq+HFQrJgQ0JoMEZCnIYiQqZT10JY3fNCR1TeQkW0Z0JLWSNCxImgQmNf+EJpBcZCmIiSQlMUY0KdTw5CRC0YQrEejEKXUOxCkSc9QlyzLUKroXxCkomgQsJPrkK1NsBCjV6EQpEaikKGENdCnQTAQorcG0JcGilCmV7xQlSdlUJO3t5CmNqhQpedfELBdT1Ccih0QsU+kUKFvflCgS3JQsJriUK985ZCa6qjQlsGoEJhILNCot/jQrtsLUKZ0WtCmUMoQrGzokJl2ENCmfl+Qp84rUKFZYVCrgW7QkgUiUJHAn1CUanqQlxydEKuQFxCvx49QmXPe0KHvcRCt4GWQsO5BEKVSdNCrb/HQpHztUKvZAJCcXcEQrVnvEKWlq9CrXetQmTNLEKta7tCvN/0QsVN3UKWTANCgOSDQpNrIkKIREpCoftJQo4GMELCz01CwD3WQomTLkJKCOdCvx1yQm3rzkJRZitCumSqQoVKmUKuKdtCKBoyQlahD0JhzzNCT5WCQrKx1kJeVi1ChNcDQrblUUJ2iEBClyjpQpvdM0KghStCj2U5QqmHVkKaUZBCbYHaQmkMIkJEfE5Cp4sZQkVJXUKiISNCwLmoQpWP40LA2FNCTFfrQo/q50KNlIBCSMkEQkhYNUKLo/lCwzuyQo2n+UJraydCkVi+Qo/2xELBv/hChfL2QrtLL0KYbttCU0MCQqL64kKeYGFCTb8/Qk6d0kLFe20AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI6QFTCBEQ////////////ARABEP8BEM/kuPECEPWxxowEEIi/i9UCEMiGwpgCEMzOyY4EENPUzaIEELDXqakEEID699ICEJ3HvLACEJCw4aoCEIuzn6MEEMn3+ZUCEIYBEABUWwgREP///////////wEQARD/ARCq0a7sAxCx6p6gBBD/19DqAhCEgcnTAhD7xa7sAxDA2dnlAxCLxODrAxCyhMvwAhCm9IfuAhCs6q/wAhCEt5GVAhCT2rWqAhB5EABcYwgAZGgVcwgAdHgVgAEBPEEAAAAAAAAAAEgBUJiTgoSd3aPgWlgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ0pX2tAEQt5nGvwMQp9D8XhC7r93UAxDPytStAhDdsd7UARC+mtV7ENaO1PsBELH6uu0CEN6Y5p4CELOLy6kBEP2cj90BELnK21gQ6tuvowEQx9LGrgIQzc772QIQrZG2YRDz8rz1AxDNtNJTENX9pXMQ5+z9rQIQtvbKpgMQpt3i+gIQ3q+r7QIQ8/WyowMQs43ElAIQ5bDdfBCnic72AxC5q8v7ARCx+9r1ARCx3e+cAxDTmcduENKazZoCEMP12vkDEN6K3+cCEPPrx7kCEKOP7JABEPaqyzMQrd3yEhAAEAAQABAAJCr8B0WHfv5ELztEQrR+OEGKn0ZDTpKNQjRhM0IX+/5Cj9CJQnAw0UGEAxRCXfJcQk7RAUI32JxCSy38QrxyMEK9g1pCn/vCQg2sZEJyFPpCjVD2QoVUwkK10Y9CmbKXQo5ZfEJYk5lCqGPIQbEqG0K0fhdCTle1QrkiH0KM2xtCxVM1QrvLVUJ4MgNCgGA0Qp1xnkKz2tpCkOttQlkLrEK9CiJCZt34Qkx5eELCevFCwgrTQolUWEK33CZCWSeoQkZFHUKCIsRCpS2AQmsIyUKVtAxCtWcyQrwtmkKgMppCmEysQkgXp0KK6/BChpJBQo/lRkJLHL9CihVuQoyJNEKAjr1CTutTQrPju0KHPBNCZKk9QmBoHEJoJfFCpnsCQpMILUKl0bdClVkjQplEAkKXVnlCivnRQpSG4EK7C3pCjiDiQlCjpUJI8n1CnWtrQrQlt0JbRKVCrkDfQqLgq0JeAoZBzcfJQrzUKEKcM3dCui4MQpvE+kKcua9BkKRiQpXprkKWLcVCkxDhQsCJp0JonT5Cvti5QsQSmD+9iiRCZbwKQr8i6UK0YlZCqp1rQkYXr0KVeOZCv2kPQsCZ90KIh/JCUtI0Qp8YN0KiqShCnOWcQmGDLEK6gSBCxY92QoHMrUJaakZCiynZQoDrXUK+xKRCR95BQsJu40KyHG5CnIkJQr53REKISKtCjoIGQr9ynELBYplCgzOOQm1PMkKSUCdCiGSiQnQ7ckK3C9ZCo4deQpRcTUKdQD9CqsymQsEtIkJkNcFCSrdUQrfg9UKAX/1CpeRRQsXM3kJWh8JCh92WQpFOPELFfoBClaUxQpROuEKWaRhCcQgWQrLhpUK3+oJCiYSTQmhSr0JrtphCiZ0ZQpOrMEJ16yRCiSyUQlifH0KNtW1ClvN0QowA0EKTxE9CXmhkQmHmGUKZPk9Cmr5EQoH0VUKdIX5CUJg6QkjaekKPOqVCmenhQltaaUJN8TVClq5cQo1AZkJN7bFCwfnLQlOhYUKC1VBCljBnQsSjKkK2H/pCvVPXQljm4kKd0xVCjUKwQoSVnEJpy3lCwp86QsWb/UKNLupCWnAuQkQX/0KyHbhCw+/BQqn3QUJtfcJCXZtTQryb60JHgIxCUIZzQpBnv0JaetxCg/tGQlKzIkKIS7tCpE19QpUCZEK+dF1ComGxQp8bvkLDPMVCWMb/QpYRqEKMp9JCUuu5QlGfdEKUAqdCjB/1QmjcJ0KKV3FCkhTnQoxEGAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOoBUwgREP///////////wEQARD/ARD18LepBBCst8vwAhD11JrOAxDDzLvjAxDFiZ+xAhCtsf6fBBDLr/qpBBCdu57nAhDMvqOMBBD0pOqnBBDQ1JKVAhDWsaKeAhDtAhAAVFsIERD///////////8BEAEQ/wEQ4PmM2AMQ1aju3AIQqquV4wMQ+P6K6AIQsdnwswIQocqOowQQhpK78QIQhKXKlAQQrL+0qQQQ8/zn7gIQv/CengIQzOf0qQIQiAMQAFxjCABkaBRzCAB0eBSAAQE8QQAAAAAAAAAASAFQoe6xvabu/pmYAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQpfTEpwMQu7/GJBDDju5WEKa1m7ACELnM5PACEP+95tEDENW78vwDEO3Z5KkDEKeY7PoBELHJ3Z0CEOm0uhcQ6fCOlwMQypjb9wIQwrvq5QIQqa28WxClyOXwARD9lNXmAhCq6JJtELHKrOMCEN6y664BEMasxrcDEK+Jt28Qs7+dmQEQ6dqjoQIQ+ur+7QEQ2ZSW/wEQ9fjD/gIQqrWt1wEQtpXm4gMQ39yKLxDZ1eJuEP26/vUBEN30090CEOrc6n4QyYzK8QMQ346UWxDi8/ueAxCt3aznAhCnFBAAEAAQABAAJCr8B0OrJT9D3Xl+ROCyf0VIIAFCg4LwQr4L3UJlCIFCUxNTQkJ+u0JSWJhCmjxjQqz/QkJSeWxCtwvAQoZ8ckFODNlCiIUzQpJox0Kv/J9ClYm1QrZCxUIxly0+ePhiQbXzVkKkRLlCnz+BQpIsiUK+vGdCokUjQm8mZkKR5mRCUb9OQkXyKUIvrb1CVqqjQqWChUKApjBCrWIwQlS8WEKkNE9ClPI0QnKGk0KtBylCVLweQmc0S0KbJeFCUUDuQrKID0JJYXdCl4RNQN+L1EK5t19Ch7gkQlx5NkKU4gdCoyELQo3UZEK9JJZCY7E4QoZXXEKJBfNCSA4MQrY6VEKDzfpCvJgnQobEkUKiZkhCX9fbQrt4x0Jqw9RCtjg+QoZ4d0K22bBCqrWtQm5Mh0KTGWJBD/0rQmHncULAyZBCsobpQlDIo0KTFChCVw73QsDzEEKRgvBCmRXKQoOss0K5u0FCvTyAQplOeUKScRFCi932Qp4+ykLCfh9CnnblQkwl+EKV1NNCZcF6QsXGHUK2BjRCh7m6QsDO+kLCvdlCl4WJQpyX8EKNftFCpycFQqb2ykKRIURCmvzGQp7Q4kK5oLlCteK7QsIZNEK6TppCoFDSQrv/IkKNjShCn5OpQr7fq0K9LRNCuPouQo/6JkJg4ddCdiA1Qlw0MEJVj2BCi78NQqIDqkJd01ZCuHwtQr2kOUJRpTlCk7twQk7TqUK/FsNCtrPVQphdPEKfo49CWi8sQod+q0KWSo1Cq+w8QsCCpULCFANCSe96Qo6P40JbJ8xCS00vQsRFIkJvVddCxNTuQrCookJN/N1CsaOvQmxV1UKhwmtCTfgqQldcWEKW2itCjuwKQkVkgELDJppClD+4QsE1V0K4XYNCvWMCQpCwHULBqdNCkWayQrwSnkKAQFtCo7YwQqdrLUJWn3lCvFy9Qk2IukKV4BtCsIPNQsN3akKVYhNCxP2MQrcDPkKSpalCniu2QsLvT0KJP+RCZWivQlTiQEJK4LVCYfIlQl1h/0KZxm1CvbSZQr5U0kKLtUhCYjrIQpFJiEKoz0ZCah5fQn34ykKSRbxCmspcQkrSpUKSh7lCvZ64QpnxXEKaId5CT35YQlfsxEJZDPRCt0guQpRhQkK8AG5CuOsSQqDuzkK+fFhCSPOKQsEblkKbQI1CjUEOQpbHN0KLxMNCqbHHQmqGekK5LjNCo911QsUaxkJaAitCjMZmQsDtTAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOcBUwgREP///////////wEQARD/ARDpi9fwAhDWk9/iAxDp96eiBBCwluKsAhCy0+6zAhCz5IyjBBCjj+PkAxCA6Z/QAxCSg7uOBBD3392qAhDRy4vuAhDiuK+VAhANEABUWwgREP///////////wEQARD/ARDT2LSpBBC/+7ipBBDWzPPkAxC/gdKzAhCtk+2wAhCk+s7qAhDJ68DlAxC7x8eOBBDu16mfAhCV4/+pAhDEgNnOAxDn3OPZAhANEABcYwgAZGgXcwgAdHgXgAEBPEEAAAAAAAAAAEgBUJ+5pK/z0sf95AFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEMKVvbYBEO31+hAQrvrO4gIQ+MnnsQMQxdjM4gIQ59G9lQEQv4jnPRC18uuuAxD5/8X8ARDajf40EOm32nsQtbHuNBCxyKf7AxC10P3oAhCt8cb7AhDymdvbARDG8dtmEO2b3NwCEL7L2loQ05j7sQIQtZ7t4gEQ8rHHIRC37sRVEM+z194BELfRlPkDEMWSnJsCELqv0moQz5Xs5wMQ39HD4gIQx52tVRDGque5AhD/9bUTEOqe7e0DEL6M/tcBELuS5ZgCEKvSx7IDEOqRkpEBEPuV7v0CENMwEAAQABAAEAAkKvwHRXQ1kkU+8HVFez2hRRw+5kETEOBCRmLjQcni3EJMtLtCnYweQpy7skIxzZ5CgEFiQsC0nkJPQbNCcZqXQeGkmUJLbLdCwDT0QMoh9kJX9vhClw4UQnMPakJb/alCYex3QpWCw0KqHANCSLOeQqa6QEKavV9Cwpi8QlGoXEJVRnpCxQYRQrVzkUK7pulCpv07QlPMJkKHrlpCmvgoQnEshUJmvtNCTyWMQpQVDkKRVy9CwxaKQp1dgUKTkAlCuxCtQoF60EJRsnBCivPFQrQqt0KRzNZCZGonQrGXsULHaqRCmFGLQk7nqEKZdY9CiqZ6QoMvV0JvozNCpIQ5QpEGmEKOWctCZl+JQpOrNkKVNhBCtM1KQmOl9UKFuuNCSlA8Qg/y90Kkd+RCiwnIQllerUK6WqFCgo7xQqskyUKf6w9CwZF9QsMNB0KI4CRCuUR+QnyokEKXY7VCwVBmQoe7rkJT+CBCYek8Qrx/40JWUcZCc8reQpD1ukJW7LlChZZYQrKg7EK4cEFCpsSJQpatCkKFuwRCwacuQpUkOUJNYNJCf1GoQrBglUKHM3BClwX5QrH2JUKEvJhCmCa+QonwBkKeHU9Cd4xSQljwOUK9mlVCVvrXQpU2pUJUfK5Cd3OxQoiy7kKSAXVCV+X7Qrp1pUKUqI5Cw2SiQr9LQkJ57bJCv1WXQk3rIUJEobVCs0I8QmlvrEKa9vVCkuYtQqwh1EJuIeBCidL+QlReyEK7oIJCvpbdQlmTa0Ki5j5Ci5auQlJbDkKaEPpCxK+TQrcjXEK2PspCbnX7QpVWsELEC5BCw7J4Qr9O1UK355pCgq+3Qr6Ft0KFaLRCh6KnQoRKnUKM/rRCvP6nQkuIjEK/nNVCw2ZWQrJEnkJIJR5CtvGFQnQHrkKwmwZCsqXnQo51uUKik+BCtH03QoE3lkKrSyBCSn/eQqpD8ELD1eVCjiOrQrZGm0K386lClAO/QkrENUJolPNCZ9kKQm46MkJYbG5CwHddQqloTEKvBV5CjFwNQr3n/0JaE+5CTScSQq3fGEJU141CprLiQsJK4UKWiFpCotOgQnwW+EJdQQ1CiDmZQlhg20Kc6UdCumBLQrlTk0KI3NdClGcTQlgOwUKJjjlCwHApQlg1d0KMqgxCVMv3Qr8sKEJKw0dCvzJrQpNSckK8LZJCkcPfQlPhfEJNS3pCxSbzQlIKf0Jm0cNCRRtCQoThEUK76yhCvLX2AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5wFTCBEQ////////////ARABEP8BELimho4EEL6j4eUDEKrYtKkEENne/J8EEOzbr+oCEL7b5pwCELKE/6AEEMiLo4wEEMbC0qoCEIjV5qcEEL38yrACEMXR9qwCEA0QAFRbCBEQ////////////ARABEP8BELSul+8CEITAiNgDEOGBi+oDEPfujKkEEOTdypQEEJv16NcDEL7qkacEEKiSqZ4CEN+5ip8CEJS82tUDEMnquJUCEN7enOgCEA0QAFxjCABkaBdzCAB0eBeAAQE8QQAAAAAAAAAASAFQivb/p8+Wnfz4AVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ3rLlkgIQzcyNugMQybPmoAIQ1rO2ERDT0/rSAxD/q+L0ARDL+PYjEMHKv/sBEMvcs5sCEPWdx2YQ5p+UFRDp7JqjAhDCjNzYAhC3j+/aAhDV/793EP3O4hIQv9T3vwIQ4cme7QEQ65nK+QIQy5P+tgEQp63DIxD52Oo+ENadnF0Quq+1nwMQ4Zvk4AEQroq05QEQs/bVFBDr8/2rAxDfz/9yENXL6uMCENqMp+cCEM2U2uQCEM+N9TQQ9vjs2wMQ3vfPuAEQoe6zmwMQxfbaVhAAEAAQABAAEAAQACQq/AdE2bLFRSenFkSTZH1FaNqEQjwn50KfgC1COpgbQrPmMUKb9J1EhlxEQOeq7kBoL9xCBHelQsWXOULG7/VCAT/DQhp400Ks1KdCwbNKQqQpn0K9EJhCel37QrRAcUIEHEdCYkEIQqA3aEK5ZKlCkBH1QpybbUJ+iUxCTK//Qk5u3UKVi0pCg5axQpgO1EJundFCYKf1QsI2vkJJAKNCU2rHQlsqq0K2939Cn68dQq6WD0Kb6kpCXQnKQsJ4ckJ1lIBChWPIQsIvqkKUhIdClrBOQsRYyEKUt+JCnDbbQmOVtEJsk+tCcYBQQpJlDkK9lHxCsvWoQn1VoUJUJFdCuG+xQrmlFUKdqv5Cn/OSQkkcskKoZSJCiygnQr+NakKv80FCxH2KQrE5R0KVKW9CvxLEQpZfZkKcdxlCUdfTQojU/kKGoldCZVYBQplIvkKeZMBCofIpQr9kBEJXPGRCa52AQqU32kJi0N9Ci55oQmw3eUKKt/NCkV8TQoi/qEKUpW5CaMMLQqa8R0LDvhVCVBvQQnH740JELQFCmSXeQkYqwUKLxdpCT1iiQsJ31EK3CAlCUEUkQn8PIkJk+rdChaPtQk/mx0JsNpdCZb/qQsD8IkLAl5tCe3IuQobOg0JMIoxCTbn+QphosUKgZYhCiXDwQnOgOEK6RFpCnqHsQk4NiEJRSEpClvstQpGh9kJiD7hCuYdcQr/GP0JoD0NCuJAvQqf/3EKPY+1Cu2f+QoMqdkKLXYBCwuu9QsK2o0KiTlJCl2yGQmqzKEJImi9Ct9xXQovwVkLDWv9CwGcxQoyXTUKGxk5CuhAsQrcSy0K9ROVCVg4dQpWgCEK36TRCwFVYQo8agkKRFVhCWIIIQmTYmUK7PvNCuE9fQmfIzUKwxt5CYcouQsP/SUKTeRtCX7bsQou+G0JF3c9CkqpZQrMR60KH+XhCiiisQl5oK0K4vjRCwb0yQr374kKKbIlCaGQVQrgRpUKzlW5CokhlQr579kK9C5FCkA0SQotLSUK4oopCVSeEQsLXDEKGZVNCv82nQpza5UJ9IS9CvgqCQlQfWEKDf5tCwRBVQkqbwUKTgltCwJORQrOe50LAA/tCVxdrQp3NZUJXEXtChLWyQoW43kKVQvNCqof7QsKO/kKO+1NCmU18QsYWdEK+mSBCn85RQoqirEKHX7gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyCEZMT0FUXzMyOABAAUjeAVMIERD///////////8BEAEQ/wEQlJO55QMQyIfKogQQ3YOHoQQQ/YXb8AIQld3VqQIQ+Pja1QMQ8LWM8QIQyPOTpwQQlNuDrAIQ1PzeqgIQwrb2qQIQ1/4xEAAQAFRbCBEQ////////////ARABEP8BEMSuy+wDEIuxjdYCEKLQx6cEEKDoj6oEEMfawqIEEKyPsuMDEKG8+p8EEMK/2OUDEO7t780DEL/gvZcCEJaNr5UCENPVPBAAEABcYwgAZGggcwgAdHgggAEBPEEAAAAAAAAAAEgBUNTIo+WqhdbQ/wFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEL6L7BgQqZWO/wIQ8sf3kgMQudPyvwEQr7nk1wEQz6j2mAMQr8387gMQscnurwMQqv3v6AEQ2tqdZhCh19qpARDBvof9ARD+vpPzAxCr/teeARDi9d/wARCj0/LyAhDend6pAxD93fOaAxDuqt26AhDS890hEP/yo+8BEKvtm+sCENGS3vcBEM3I9VYQodPkqAEQq760WRCyreeTAhC6vvb0AxDeiufbAhCz2YubARDVyebUAhDpjuvvAxD1ueKZAhCu+c2VARDJrseUAxDpyNTxAhC1iO6wAxD90MW2AhAAEAAQABAAEAAkKvwHRO4++kPtlXJCPLRxQrKuskICObFB4qebQZRXT0KA46dCtgWRQXaSPkKrgN9Bh3gdQrV2REJylNI/5ec+QmqRHkKQZapCl+J/Qo/J3UKjmzFCuV7IQnzrakJzP4dCc0tRQnkELUKPa7dCWyTgQq8O+kJWHaNCj6ZDQmPsH0LGtpdChhcsQlidBEKDvA5ClcTgQm14FkKdvftCj+txQsR6lUKQ2whCv5oqQqAwNkKCBEJCmlA9Qmw43kJF74tCvdrrQr2840KQM0ZCiK/aQmGNx0KPudFCu0q0QqKphEK1wEtBt3AQQsQ4KD/uP0pCsLfYQo21GUKLP6JCshbgQqZCQ0JsxAJCvZqLQo8OcEK0XopCfRZKQmDkRkK4AI9CXl8fQsFr90JP08xCSQR5QplUlUK20idCSzAbQknQE0KErkZCZlHXQmhdJ0KMBu5CizT3QqevJEJfK89CtXwGQmvCVEKNkwFCWxBhQoRS5kKH8YZCuS9ZQpU9gUKHrxFCazuXQrPCkEK/2A5CVlPSQlDdKEIlqT1Cs4cfQp75D0KAL9RCTgkgQp6H20Knh7VCncx5QsIKfEKAwOhClAxlQrikNkKMEvRCVhcDQrlDpEK1fK5CwJK7QsC+rUJbLBhCpGmOQn2ByUJbGMJCt+iDQpzJKUK5SWtCw03oQlY/vUKZJRBCipMsQpf330KxMO1CtmEJQqBJu0LFQXhCqGfcQljijUJSjh1CaNunQp3XAUKz8NBCnRiLQsXg90KLPlNCnSvKQpFz20KL925Cv2xZQrL61kKef+FCuLghQou0ckKMLu5CZxGPQq2flkLCIzBCnRQbQsPTr0KL8YlClw98Qmy9qEJZg/xClecEQsDajUKc7NBCWAZNQlbqAEKglKdCXmTJQr4OHEKnPCVCiqwiQmBJTEJQQI5CjZgTQkseYkKXW5ZCSh/1Qor99UKuuLhCjyOtQoyPpUK3VAVCk6KYQnftJ0KLEWxCnnWqQpo4j0JgWmJCkTWCQpjAOUJWTYdCv54zQo/GxkJNmB1CSDLSQoPcD0Jb4X5CnmpRQq2NVkKM6BZCXm4pQrpBmEJecaJClc1EQpx6x0K58GlCiZztQmryN0KPn1lCixP+Qobla0Jy9f1CkAqCQrTmUEJas6dCVwo5Qo0tPkKUiMdCkuWLQm6gLEKDA2VCV6eJQpAUZkKA+2dCwwbEQriR6kKepP1CSZ76AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI5AFTCBEQ////////////ARABEP8BEMjoqakEEIjPg+UDELatn5YCEKaIl84DEOTY4dUCELv4q54CEJraw4wEEK6/u+cCEMOe8LICENyMtKkEEPaiv5gCEL/R2pwCEAAQAFRbCBEQ////////////ARABEP8BEO75kacEEK2a5qwCENzPnZUCEIbRkpUCEO7JnOkDEMableMDEOCutNoCENXh06oCEJ2Nt+4CENXnuNMCEMbd+pUCENrLnZgCEAAQAFxjCABkaBpzCAB0eBqAAQE8QQAAAAAAAAAASAFQseDq+sTxlMskWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDeq+r3AhC26e38AhDG3eGuARC9z4Q5EM7L2x8Q4u/6lgMQ+fi27wEQp5PEkQMQssz0swMQtdKFnwMQsdrv1AIQ9eik6QEQ2tPu2AMQ177VkwIQw/XCIxDir7NxELOW/W4Qvurq/AMQ7bH1nQIQveindxCrr4PnAxD+s5ZtEPG7/JsCEO2ozX0Qvdq1mwIQ/537vwEQ3cqcmQMQrfn0FRDpvtpcEN/JzuEDEOK+1mkQz46cpwIQsure0AIQ1fGrNRC5zrZbEMW0/q4DELeLqrUBEPaNARAAEAAQABAAEAAkKvwHRXC+VkVcJxxCT4c+QpgosEJOW8VCWrBbQqW9U0KKIHZCoL+tQqFHj0HB8BNCRHUOQrKj9kK2vvZCf/ioQVWmB0IYypFCw+XYQoNWPEKQdSJCkz6CQrTmYkK1BHZAzUijQq7IPUKtamBCknjZQosOgEJZ59ZChkMFQqwD/0JvDJNCns5xQppaB0KDORVCe+AnQpIFSUJ1YANBlDuMQlezH0KuH3NCxffRQnvraUKJAaRCu+GnQqvPj0JSzqZCnpqJQryxEUKfPLFCqI8qQqb2wUJ5eqZCSSItQpIbB0KVmfBCsikrQrmjiELB05RCnc4oQpp0E0KKKg9CDxqeQlPzEEKdf9VCmBgtQpnuykJWMJlCkcm4QrrOz0LBvAZCw3hRQrFBX0LEQTlCw8HZQhWFh0KPLTZCh4d2QmzPrEK5YudCVLu5Qp4Nn0JHCTpCjcLaQkVDG0K2GDhCvmRvQp+hsUJcg8FCvpNtQsHycUJkKCxChvjdQp/4YkK9qExCgVblQlQSfUKYHVxCwoCuQqwk50KPAiNCwfgcQriuMkKBDqlCkmXyQpoPQ0KdtsFCF9qAQo94uEJiJ+VCsTMkQrsAIkJX12pCnsWfQpuV90KUlcBCmT31QmLciEK3eMpCWeq+QkXaYUKI2JlCpt5GQqBr0UK9ul9Cp6q7QrIR+UJUqSNCsNhxQnbcbkKi+UdCvFuDQoykrEJ88Z5CRTnZQrYzqUJFXJJCjxBkQoxAkEJRDjFCoa0SQrPnZkJXenJCUV4AQoX4xkKEGvRCV74oQsWJLUK8Nr5Cgu9mQkhWXkJymvVCUPaQQqANmEJWxhtCTxU5Qo/9hEKwaQNCi3gNQrPA9UK/0GNCmkv/QppTW0KdJaJCYlqpQq1AfkJNx5NCmp0XQpZwoEJ6VmFCZw41QpYlSEKVYeBCkVApQltUe0KxrN1CkO+gQlYbwUJK6FJCuPn6QsHpOUJS7EpCiwq2QliZA0JhD8lCvKpcQl1el0LAwflChOxoQruJr0K/E+xCvCzRQsNB/kKbGTtClbRkQmr7NUKIMwtCnWm8QpVd6UK/bu5CxG7LQoCpO0KhOkhCXo3iQpKzwkKmbqdCkQ+BQlAc4UK2IHZCo9RMQpB2D0KeN7VCxacqQlTqp0KQukBCxMwnQlZNHkKKjxFCkamKQouLc0KxBZNCVd1nQsC2IUJFOS9CiREOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMghGTE9BVF8zMjgAQAFI4QFTCBEQ////////////ARABEP8BEI7lw5IEEOj85YsEEObBndUCEMDQiOgCEJjfuvECEPKOh+gCEKHEu4wEEJ6Dv44EEKDfgtMCEIbu4bACEJ7M0M4DENXxogoQABAAVFsIERD///////////8BEAEQ/wEQlt3HjAQQ8bfL6QMQ0fzw0AMQ9vfxpwQQvrqPqgQQ7fiW7wIQveKtowQQ6ZKGoQQQkO7a0wIQirSJpwQQtu260AMQz+jKDBAAEABcYwgAZGgdcwgAdHgdgAEBPEEAAAAAAAAAAEgBUK+/uqD95tXHFFgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQvtfLvAEQro37lgIQyYG3rQMQqqyGuwIQ/ZbXPRD5tuSWAhC6y9VsEMe84uwBEKOoxO4BELWw3aUDELL5010Q3/2XrwEQ5pPLlAEQ27jH3wIQr63S2AEQrqyi8wEQwvOWVRCv8dJVEPq+/pQBEKb/5CwQrvn7exCirZ3/ARDD1YSRAhC+qsPWAhDOmaenAxDh0KuxARDs7fTsAxDmzd3SAhCl1dUQEK6NzLEDEO35xqgCEKWIxJkCENev/5kDEN+Z6tEBEKny+psCEMKx3zAQ/fj2owEQ18/kogMQABAAEAAQABAAJCr8B0Sb2JxCg08KQrTjIEPR2iNCVmmAQlQ66kMBxXtCjqA3QiPypUJxX41B0YJFQobEeUKJiO5CrmqiQQwbw0JNaNpCuF7zRWS2ykK3ddVCkvgrQpCTvkJmeoRCvyycQk5gckKqfnNCbVJaQrMhS0ERc6o/3dECQnJ+JEKX+gxBJSLxQrBX7kK3dsVCk/1IQo3L4ELEvaJCkvzZQqP440KnF9hCmx4vQqLssEKCWKhCtDk6P93rekKchY1CsVRCQq4DOkHfQVxCoFFSQpjgoEKs6DlCmpoIQq0TGUKhD4RClAPaQrL0+kK+c09CXpXOQra2MELAjZFCn9JzQlMRQEKIzbRCUm/XQp3oyEJSAulCwNmyQlX20EJtDZBCtR2WQrQfmUK8eWhCS0TeQr4tb0KKTcBCmpuhQpRg3UJa+mxCpUkAQmUMsUJMHiRCXAblQp3fZEKAF1ZCkX5zQoFGp0I+beZCjzVtQoO49EK5PaNCliPzQov78EKf1YdCqD6lQmMGNUK5nWJCusQpQr1RaEJa1WlCwXa0QqF+IUJZhm1CoON1QsEDz0KcqE9CSmwyQpLSE0Km1rZCciQGQlsfK0JVH3lCl9QiQpVSF0LAxlFCh/YmQmOXH0KtAIBCmLKqQrpJL0K8ArxCj4exQllVbEKJVK5CT9TaQk/sckK4znRCg2DnQoLr+kLFgKZCs3YDQlJLe0Jhw6pCsV2pQsALHkK+ogNCxI/YQpWcJUK001JClQlJQqKFfUKQZRxClol5QquQCELEQxBCtpwzQkVuqEJfCcVCxMjhQq7xMUKYos5CUzSaQr7oCkKLsaVCvcV3Qm29bj+BNZVCSO4zQojDM0K7ti9CiiOzQr0WHkK9VgFCTJLKQo+ev0K7x4xCjx88QpcdJkJ/ZX1CmYfXQqSEPEKyVnhCjI7IQpY1y0LCQUVCh8ZtQoM6yUK+usZCRiA1QlSTT0KLAnNCdauAQlA3ekJnSYFCjZ4mQsGov0KaEHtCixHWQpLSFUK62F5CZzzaQouxPkJdidVCvtNuQpW+u0JUUWpCcz8/Qp0Pn0JllwxCsUR1QlOHGEKWmflCaE20QpCeukKL+CpCmAZ4QrqfJkKdPxxCgeviQpJmFUKrbjhCwnv9QsVviUJsnXNCmJaLQmNoZUKVk9lCRfnRQnagGkJky7xCVGSVQsBcqUJo2D5CwJsQQp3ppULEw8dCnLBDQrSiXAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOQBUwgREP///////////wEQARD/ARD52ofcAhCz+o/xAhDa8YuzAhCS166fAhCY6cPZAhCPvK2pBBCV2uSNBBDM+Z/RAxDcgbrTAhCRwd6cAhD+zvypAhCcqeWcAhAAEABUWwgREP///////////wEQARD/ARCLpIazAhCR/K3xAhD5/7vxAhCqwMXsAxDXv4mnBBCe9bzVAxC/6JPcAhDe6LzjAxCXnamgBBD21Y6VAhCg/tGpAhDy8ryqAhAAEABcYwgAZGgacwgAdHgagAEBPEEAAAAAAAAAAEgBUPat25T26t28+wFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BELaysxkQztTDtAIQu9+LORDq6ubuAhDjy9wmEOq4tiMQxZv3ZhDKqNboARDX1uzaAhCivd6bAxDRrualARC188zcAxD+1df6AhC2/+akARC6s+w3EK321/8BEKHx5nIQyrOapwIQ7svDYRDRyt7iAhDxiONiELWX69UCENXv/aoBEMHU59oDEKna1u0BENqw5PkCEMvy/9kDEL7/6/ADEL2Mg5ECELO4zewCELXoj+8CEOqp/PsDENHSpv0CELnbs7MDEKfRrrUBEKvuxZ0BEMb137wCEKnsz74BELmqARAAEAAQABAAJCr8B0VMJg5CM93HQq0ksEO+Zp5EsIC8Qoh5TEKqEWFCrhNSQsaueUKL+LdCpJ+NQmW2AUJeYHBCik1BQh9Tf0KLMElCwX6jQVpyS0Ky13FCcri3QqSZhkJu9h5CmygvQrKUc0JSMd9CY9cOQqC8kULF2XRCdVNgQp70x0KdGgZCZE4oQlFb1kJWGm5CsR1aQrAltkKdkLxCqFcTQg0QE0JEBltCkY2eQsXZCkKwoKFCxGVNQpQjYkJ3F/BCd/BhQsKKVkIXkkpCvivUQqhjIkK9ehJCl1fJQovFj0K5XspCife9QmZbDkIuU4NCabNcQpS/m0KoHEdCvHI4Qou3iUJIqwlCbAr2QrGSREKNibZCi6YGQr7Q40KsQ81ChlvSQqUid0KzV8pCRdEFQrnM0EJfuUNCnV7NQmIltkK6I1pCjZPhQmmkH0KAB4tCh9tdQr1AgkK4hnxCh2kTQsQYyUKvFCpCUVqRQlvKCEKQqdpCo9KuQsIQ3kJuyUtCU1mSQlqbJEKXNq1CqqvIQrC6nUJhTbVCnRkqQlFbIkKyR4xCusb9QsXrkUKbeQtCwWWOQsFNV0K1/3xCRYZNQpAU+0KWEdxCbI2EQrjoqkKbdN1CwSwOQo8ki0JoNNRCloqHQr3T6EKSGzxCYluvQowkCkKOkSJCmLUdQsQs1EJ9z7BChoiwQsQuAEKBWLZCVAGJQo6fBkKVERtCvE8rQpCQskLAG9FCgYAHQq6vMUKZzN1CuqFzQr2a1EJQOKlChbTyQpMRrUKZ6s9Cla9MQlvxk0JdzmpCR1etQr+NmEKwYT1CxGYmQruvCkLGzU9CRijqQmCouEJfzGlCwW0PQrkl5UJSLz1CVDosQpJAZUK51gVCkvsoQnOnWkKTjzhCmSteQl6NLUKJ11hCmO1SQp7OwkKbIuRCwY5xQotCAkJLEkRCjpj3QlqfpkLFGTBCkgzQQsFd+EKZhGlCmpWpQpiY4EJwy4hCt/iBQqlyFULDBLJCgE6hQppbf0LBafFCSXF8QlQ3lUKQZdBCqxe3QsNsD0JeXZVCthOMQoSRw0KgXJlCoUwVQmglEkKT+dVCwTArQlVo6UJE2mhCn4p9QrZSUkJV/29CvqWeQk/i1kJRZDtCjxnRQpS6IUK6G8ZCRLwFQpnhO0K20OVCqfoMQsABykJIVvtCknM5Ql/3V0KWxuJCjtX3QoSLw0JQhy5Cl5j3QkXQk0KMwoJCmw+6QpwCCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIIRkxPQVRfMzI4AEABSOcBUwgREP///////////wEQARD/ARCI2OKqAhCtzJWsAhDBm5znAhCirsLrAxD4oMraAhDTgp+cAhCRxr2XAhCyofzQAxD2xfvWAxCZhOmgBBDdh6GYAhDF/4PTAhANEABUWwgREP///////////wEQARD/ARDS+/mpBBCWgovXAxD+tqzuAhDImY3vAhC6j8OYAhDc2fnUAxCA3vycAhCMs93VAxCR36ixAhCj37GVBBDk4L7rAxDJnNaLBBANEABcYwgAZGgXcwgAdHgXgAEBPEEAAAAAAAAAAEgBUNWBtq/rureVGlgAYCBoAHUAAAAAgAEAnAGjAQgAEACkAagBAbABAbgBARQbCAAQABl7FK5H4XqEPyAAKNkDMAo7CYAUrkfhenQ/EXUHIZSo2WVAGXm+OzIPJ0JAIfHDd1QmO1NAKNkDPEMJgBSuR+F6dD8RdQchlKjZZUAZeb47Mg8nQkAh8cN3VCY7U0Ao2QNESwmAFK5H4XpkPxHg+luEZEdtQBlMeLjAHMzXPyFMeLjAHMzXPyjZA0xRAAAAAAAAAEBZAAAAAAAA8D9hAAAAAAAA8D9oAHEAAAAAAAD4P3kAAAAAAAAEQIEBAAAAAAAAFECJAQAAAAAAAOA/kQEAAAAAAADgPxwjCAETCgMyLjERAAAAAAAA4D8aCFBSRVZJT1VTIghTVEFOREFSRCoETk9ORTMIIREAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPxEAAAAAAADwPzQ7CCARAAAAAAAAAAARZmZmZmZGVEARAAAAAACAWEARAAAAAAAAS0ARAAAAAAAAAAARAAAAAAA4UkARAAAAAAAAV0ARAAAAAACASkARAAAAAAAAAAARMzMzMzMTUEARAAAAAAAAVUARAAAAAAAASUARAAAAAAAAAAARmpmZmZk5VUARAAAAAABAWEARAAAAAACAS0ARAAAAAAAAAAARchzHcRwHUkARAAAAAAAAV0ARAAAAAACASUARAAAAAAAAAAARF1100UWXUUARAAAAAAAAVUARAAAAAACASUARAAAAAAAAAAARq6qqqqpqVUARAAAAAAAAWEARAAAAAABAUEARAAAAAAAAAAARAAAAAAD4UkARAAAAAABAWEARAAAAAACASkA8QwggEQAAAAAAAAAAEWZmZmZmRlRAEQAAAAAAgFhAEQAAAAAAAEtAEQAAAAAAAAAAEQAAAAAAOFJAEQAAAAAAAFdAEQAAAAAAgEpAEQAAAAAAAAAAETMzMzMzE1BAEQAAAAAAAFVAEQAAAAAAAElAEQAAAAAAAAAAEZqZmZmZOVVAEQAAAAAAQFhAEQAAAAAAgEtAEQAAAAAAAAAAEXIcx3EcB1JAEQAAAAAAAFdAEQAAAAAAgElAEQAAAAAAAAAAERdddNFFl1FAEQAAAAAAAFVAEQAAAAAAgElAEQAAAAAAAAAAEauqqqqqalVAEQAAAAAAAFhAEQAAAAAAQFBAEQAAAAAAAAAAEQAAAAAA+FJAEQAAAAAAQFhAEQAAAAAAgEpARFEAAAAAAAAAAFgKYP////8HaAhwIHgggQEAAAAAAAAkQIgBAKMBCAgQABAAEAAQABAAEAAQABAApAGoAfkDsAH5A7sBCS1DHOviNho/Ed1Kbs/vr29AGd1Kbs/vr29AId1Kbs/vr29AKPkDvAHDAQktQxzr4jYaPxHdSm7P769vQBkAAAAAAAAAACEAAAAAAAAAACj5A8QBFCQpMzMzMzMz0z8xAAAAAAAADEA4L0H+Jv7c0lcVQEsLCCARAAAAAAAAAAARGwIE8yxn4z0Rntx4JLyO2TwR96cbyLGnRT4RAAAAAAAAAAARsoYHYInlAD4RAAAAAAAAAAARpsoixR54Ez4RAAAAAAAAAAAR/au9fVjb2zwRgMMqBU8sDj0Rb5YAXgbbQT4RAAAAAAAAAAARUeIt/pvYqz0RwJ5alopT8j0RjZxztKxgQT0RAAAAAAAAAAARC/DaIdG0mz4Rex0tlws/JT4Rq31ncjShBD4RAAAAAAAAAAAREhzsj/Qn0jwRsiT9ZehrTj4RAAAAAAAAAAARAAAAAAAAAAARwo6AkXBAQj4RzeuKJG46Dj4RAAAAAAAAAAARAAAAAAAAAAARXp/788293j8RkK5aPcFqE0ARAAAAAAAAAAAMEwggEQAAAAAAAAAAEYSzRH0IZzA+EQxqU1Uv7UA+ETv9wwVsWKc9EQAAAAAAAAAAEab3WaHbI0Q+EUMzp+XT41I/EVCBnFWuTEE+EQAAAAAAAAAAEZBg+Kr9hn0+EW32sw61Vb8+EW291akzmDI+EQAAAAAAAAAAEb3t3mndIsQ+ETMGIIQiWxc+EXCVG6CWTyA+EQAAAAAAAAAAEQt3M0lnnBU+EQvrW80FbSI+EUjfBCKaR5E+EQAAAAAAAAAAEQFncavNteo+ER3w8U87rgs+EVG5rCl5dlc+EQAAAAAAAAAAEVu+b0Vv1Ps9Eek8dvWZSMw+ERMX/155T7w+EQAAAAAAAAAAEQAAAAAAAAAAEQAAAAAAAAAAEaJTv064zb8+FExRAAAAAAAAAABbCCARAAAAAAAAAAARAAAAAABAUkARAAAAAAAAV0ARAAAAAAAATUARAAAAAAAAAAAR5DiO4ziOUUARAAAAAACAVUARAAAAAAAAS0ARAAAAAAAAAAARjuM4juN4UUARAAAAAADAVUARAAAAAACAS0ARAAAAAAAAAAAROY7jOI7jUUARAAAAAAAAWEARAAAAAAAASUARAAAAAAAAAAARzczMzMwMVEARAAAAAACAWEARAAAAAACASkARAAAAAAAAAAARchzHcRzHUEARAAAAAABAWEARAAAAAACASEARAAAAAAAAAAAR0UUXXXQRU0ARAAAAAABAV0ARAAAAAACASEARAAAAAAAAAAARzczMzMx8f0ARAAAAAAD3sEARAAAAAACASEBcaABwAHgAgAEFiAEAkgEIU1RBTkRBUkSaAQROT05FoAEAqAEA"
  },
  {
    "path": "Java/parkservices/src/test/resources/com/amazon/randomcutforest/parkservices/state/byte_base64_2.txt",
    "content": "CgMyLjETCgMyLjAQmQsZLUMc6+I2Gj8gHiiAAjAIOCBAIEgBUAFZAAAAAAAAAABgAGgBcAB4AIIBCEZMT0FUXzMyiwEKAzIuMBAgGIE8IAgqCEZMT0FUXzMyMIAtOoC0AUHYAABCRAAAQdgAAEJEAABB4AAAQjgAAEHgAABCOAAAQegAAEI0AABB6AAAQjQAAEHwAABCPAAAQfAAAEI8AABB+AAAQjQAAEH4AABCNAAAQfAAAEI4AABB8AAAQjgAAEIEAABCRAAAQgQAAEJEAABB8AAAQjgAAEHwAABCOAAAQgQAAEJAAABCBAAAQkAAAEH4AABCTAAAQfgAAEJMAABB+AAAQkAAAEH4AABCQAAAQfAAAEJAAABB8AAAQkAAAEHgAABCSAAAQeAAAEJIAABB2AAAQlQAAEHYAABCVAAAQdAAAEJYAABB0AAAQlgAAEHgAABCTAAAQeAAAEJMAABB+AAAQkgAAEH4AABCSAAAQgQAAEJAAABCBAAAQkAAAEIMAABCSAAAQgwAAEJIAABCBAAAQkgAAEIEAABCSAAAQgwAAEJIAABCDAAAQkgAAEIAAABCTAAAQgAAAEJMAABB8AAAQkgAAEHwAABCSAAAQeAAAEJAAABB4AAAQkAAAEHQAABCPAAAQdAAAEI8AABB6AAAQkgAAEHoAABCSAAAQdAAAEI8AABB0AAAQjwAAEHIAABCSAAAQcgAAEJIAABB4AAAQkQAAEHgAABCRAAAQfgAAEJAAABB+AAAQkAAAEIAAABCSAAAQgAAAEJIAABCCAAAQkAAAEIIAABCQAAAQfgAAEJEAABB+AAAQkQAAEIIAABCRAAAQggAAEJEAABB+AAAQkwAAEH4AABCTAAAQgAAAEJQAABCAAAAQlAAAEIEAABCTAAAQgQAAEJMAABCDAAAQlQAAEIMAABCVAAAQgQAAEJMAABCBAAAQkwAAEIMAABCWAAAQgwAAEJYAABCCAAAQlgAAEIIAABCWAAAQfgAAEJQAABB+AAAQlAAAEIEAABCUAAAQgQAAEJQAABCCAAAQlgAAEIIAABCWAAAQgwAAEJcAABCDAAAQlwAAEIEAABCXAAAQgQAAEJcAABCAAAAQlwAAEIAAABCXAAAQegAAEJYAABB6AAAQlgAAEH4AABCUAAAQfgAAEJQAABCBAAAQlgAAEIEAABCWAAAQfgAAEJcAABB+AAAQlwAAEIEAABCWAAAQgQAAEJYAABB+AAAQlQAAEH4AABCVAAAQfAAAEJQAABB8AAAQlAAAEH4AABCUAAAQfgAAEJQAABB6AAAQlwAAEHoAABCXAAAQfAAAEJUAABB8AAAQlQAAEH4AABCVAAAQfgAAEJUAABB+AAAQlgAAEH4AABCWAAAQggAAEJcAABCCAAAQlwAAEIAAABCXAAAQgAAAEJcAABB8AAAQlQAAEHwAABCVAAAQgQAAEJQAABCBAAAQlAAAEIEAABCXAAAQgQAAEJcAABCCAAAQlgAAEIIAABCWAAAQggAAEJUAABCCAAAQlQAAEIIAABCSAAAQggAAEJIAABCAAAAQkwAAEIAAABCTAAAQggAAEJQAABCCAAAQlAAAEIAAABCSAAAQgAAAEJIAABB8AAAQlQAAEHwAABCVAAAQdgAAEJIAABB2AAAQkgAAEHQAABCTAAAQdAAAEJMAABB0AAAQkQAAEHQAABCRAAAQdgAAEJAAABB2AAAQkAAAEHQAABCPAAAQdAAAEI8AABB2AAAQjwAAEHYAABCPAAAQeAAAEJEAABB4AAAQkQAAEHYAABCOAAAQdgAAEI4AABB8AAAQjwAAEHwAABCPAAAQgAAAEI0AABCAAAAQjQAAEHwAABCOAAAQfAAAEI4AABB2AAAQjgAAEHYAABCOAAAQcgAAEI4AABByAAAQjgAAEHYAABCNAAAQdgAAEI0AABB0AAAQkAAAEHQAABCQAAAQeAAAEI4AABB4AAAQjgAAEHYAABCOAAAQdgAAEI4AABB0AAAQjwAAEHQAABCPAAAQdAAAEI0AABB0AAAQjQAAEHYAABCOAAAQdgAAEI4AABB6AAAQkAAAEHoAABCQAAAQeAAAEI0AABB4AAAQjQAAEHwAABCOAAAQfAAAEI4AABB4AAAQjwAAEHgAABCPAAAQeAAAEJEAABB4AAAQkQAAEHQAABCRAAAQdAAAEJEAABB0AAAQjwAAEHQAABCPAAAQegAAEI0AABB6AAAQjQAAEHYAABCPAAAQdgAAEI8AABB2AAAQjQAAEHYAABCNAAAQeAAAEJAAABB4AAAQkAAAEHwAABCPAAAQfAAAEI8AABB8AAAQjQAAEHwAABCNAAAQgQAAEJAAABCBAAAQkAAAEIIAABCPAAAQggAAEI8AABCBAAAQjQAAEIEAABCNAAAQgQAAEI0AABCBAAAQjQAAEH4AABCNAAAQfgAAEI0AABCBAAAQkAAAEIEAABCQAAAQfgAAEJAAABB+AAAQkAAAEHwAABCQAAAQfAAAEJAAABCAAAAQjQAAEIAAABCNAAAQfAAAEJAAABB8AAAQkAAAEIAAABCQAAAQgAAAEJAAABB6AAAQkAAAEHoAABCQAAAQfAAAEJEAABB8AAAQkQAAEHgAABCUAAAQeAAAEJQAABByAAAQkQAAEHIAABCRAAAQcgAAEI8AABByAAAQjwAAEHQAABCNAAAQdAAAEI0AABB6AAAQjwAAEHoAABCPAAAQdAAAEI0AABB0AAAQjQAAEHYAABCQAAAQdgAAEJAAABB6AAAQkAAAEHoAABCQAAAQfgAAEI8AABB+AAAQjwAAEHwAABCSAAAQfAAAEJIAABCAAAAQkAAAEIAAABCQAAAQegAAEJEAABB6AAAQkQAAEHYAABCQAAAQdgAAEJAAABB6AAAQjwAAEHoAABCPAAAQdAAAEI8AABB0AAAQjwAAEHQAABCRAAAQdAAAEJEAABB2AAAQlAAAEHYAABCUAAAQcgAAEJcAABByAAAQlwAAEHIAABCWAAAQcgAAEJYAABB0AAAQlgAAEHQAABCWAAAQdgAAEJMAABB2AAAQkwAAEHgAABCWAAAQeAAAEJYAABB4AAAQlAAAEHgAABCUAAAQegAAEJMAABB6AAAQkwAAEIAAABCQAAAQgAAAEJAAABCBAAAQkgAAEIEAABCSAAAQgQAAEJQAABCBAAAQlAAAEIAAABCRAAAQgAAAEJEAABB+AAAQkQAAEH4AABCRAAAQegAAEJAAABB6AAAQkAAAEHwAABCNAAAQfAAAEI0AABB+AAAQjgAAEH4AABCOAAAQfAAAEI8AABB8AAAQjwAAEIEAABCQAAAQgQAAEJAAABCAAAAQjgAAEIAAABCOAAAQegAAEI4AABB6AAAQjgAAEHwAABCNAAAQfAAAEI0AABB6AAAQkAAAEHoAABCQAAAQeAAAEI8AABB4AAAQjwAAEHQAABCOAAAQdAAAEI4AABB0AAAQjwAAEHQAABCPAAAQdgAAEJEAABB2AAAQkQAAEHQAABCRAAAQdAAAEJEAABB6AAAQjwAAEHoAABCPAAAQdAAAEI0AABB0AAAQjQAAEHgAABCPAAAQeAAAEI8AABB6AAAQjgAAEHoAABCOAAAQdgAAEI8AABB2AAAQjwAAEHIAABCNAAAQcgAAEI0AABB0AAAQjQAAEHQAABCNAAAQdgAAEI8AABB2AAAQjwAAEHoAABCPAAAQegAAEI8AABB8AAAQkAAAEHwAABCQAAAQfgAAEJMAABB+AAAQkwAAEIIAABCRAAAQggAAEJEAABCBAAAQjwAAEIEAABCPAAAQfgAAEI8AABB+AAAQjwAAEHwAABCNAAAQfAAAEI0AABCAAAAQjgAAEIAAABCOAAAQggAAEI0AABCCAAAQjQAAEH4AABCNAAAQfgAAEI0AABB+AAAQjQAAEH4AABCNAAAQgAAAEI4AABCAAAAQjgAAEIMAABCPAAAQgwAAEI8AABCBAAAQjwAAEIEAABCPAAAQgwAAEJIAABCDAAAQkgAAEIEAABCQAAAQgQAAEJAAABCAAAAQjwAAEIAAABCPAAAQfgAAEJIAABB+AAAQkgAAEIEAABCPAAAQgQAAEI8AABCBAAAQjgAAEIEAABCOAAAQggAAEI0AABCCAAAQjQAAEH4AABCNAAAQfgAAEI0AABB+AAAQjQAAEH4AABCNAAAQgAAAEI4AABCAAAAQjgAAEIAAABCOAAAQgAAAEI4AABB+AAAQkAAAEH4AABCQAAAQeAAAEI8AABB4AAAQjwAAEH4AABCPAAAQfgAAEI8AABB6AAAQjgAAEHoAABCOAAAQeAAAEI8AABB4AAAQjwAAEHwAABCOAAAQfAAAEI4AABB4AAAQjgAAEHgAABCOAAAQdgAAEI4AABB2AAAQjgAAEHIAABCNAAAQcgAAEI0AABB0AAAQjQAAEHQAABCNAAAQeAAAEI0AABB4AAAQjQAAEHgAABCNAAAQeAAAEI0AABB2AAAQjgAAEHYAABCOAAAQfAAAEI4AABB8AAAQjgAAEHoAABCRAAAQegAAEJEAABB0AAAQjgAAEHQAABCOAAAQdAAAEI8AABB0AAAQjwAAEHQAABCRAAAQdAAAEJEAABB0AAAQkAAAEHQAABCQAAAQdgAAEI4AABB2AAAQjgAAEHoAABCRAAAQegAAEJEAABB+AAAQkAAAEH4AABCQAAAQgAAAEJEAABCAAAAQkQAAEH4AABCSAAAQfgAAEJIAABB4AAAQlQAAEHgAABCVAAAQfAAAEJYAABB8AAAQlgAAEHoAABCVAAAQegAAEJUAABB2AAAQlgAAEHYAABCWAAAQeAAAEJUAABB4AAAQlQAAEHgAABCXAAAQeAAAEJcAABB2AAAQlQAAEHYAABCVAAAQcgAAEJMAABByAAAQkwAAEHYAABCUAAAQdgAAEJQAABB8AAAQkwAAEHwAABCTAAAQfAAAEJMAABB8AAAQkwAAEHwAABCVAAAQfAAAEJUAABCAAAAQkgAAEIAAABCSAAAQggAAEJIAABCCAAAQkgAAEIMAABCRAAAQgwAAEJEAABCAAAAQkwAAEIAAABCTAAAQfgAAEJMAABB+AAAQkwAAEHoAABCQAAAQegAAEJAAABB0AAAQkgAAEHQAABCSAAAQdAAAEJIAABB0AAAQkgAAEHIAABCQAAAQcgAAEJAAABB2AAAQkQAAEHYAABCRAAAQcgAAEJQAABByAAAQlAAAEHIAABCUAAAQcgAAEJQAABByAAAQkQAAEHIAABCRAAAQcgAAEJMAABByAAAQkwAAEHIAABCWAAAQcgAAEJYAABByAAAQlgAAEHIAABCWAAAQdAAAEJMAABB0AAAQkwAAEHIAABCUAAAQcgAAEJQAABB2AAAQlAAAEHYAABCUAAAQcgAAEJEAABByAAAQkQAAEHIAABCUAAAQcgAAEJQAABB4AAAQlAAAEHgAABCUAAAQegAAEJMAABB6AAAQkwAAEHgAABCQAAAQeAAAEJAAABB6AAAQkwAAEHoAABCTAAAQfAAAEJIAABB8AAAQkgAAEH4AABCTAAAQfgAAEJMAABB6AAAQlAAAEHoAABCUAAAQgAAAEJQAABCAAAAQlAAAEH4AABCUAAAQfgAAEJQAABCAAAAQkwAAEIAAABCTAAAQgwAAEJUAABCDAAAQlQAAEIMAABCXAAAQgwAAEJcAABCCAAAQlwAAEIIAABCXAAAQgwAAEJUAABCDAAAQlQAAEIMAABCVAAAQgwAAEJUAABCCAAAQlAAAEIIAABCUAAAQgQAAEJUAABCBAAAQlQAAEIEAABCSAAAQgQAAEJIAABCCAAAQkQAAEIIAABCRAAAQfgAAEJMAABB+AAAQkwAAEHoAABCQAAAQegAAEJAAABB2AAAQjwAAEHYAABCPAAAQfAAAEI0AABB8AAAQjQAAEIAAABCNAAAQgAAAEI0AABB+AAAQjQAAEH4AABCNAAAQgQAAEI0AABCBAAAQjQAAEIAAABCQAAAQgAAAEJAAABCBAAAQkgAAEIEAABCSAAAQggAAEI8AABCCAAAQjwAAEH4AABCNAAAQfgAAEI0AABB6AAAQjgAAEHoAABCOAAAQfgAAEI4AABB+AAAQjgAAEHwAABCQAAAQfAAAEJAAABB2AAAQkwAAEHYAABCTAAAQegAAEJAAABB6AAAQkAAAEHQAABCPAAAQdAAAEI8AABB4AAAQjQAAEHgAABCNAAAQfAAAEJAAABB8AAAQkAAAEIAAABCRAAAQgAAAEJEAABCAAAAQjgAAEIAAABCOAAAQgQAAEJAAABCBAAAQkAAAEIMAABCTAAAQgwAAEJMAABCCAAAQlAAAEIIAABCUAAAQgwAAEJUAABCDAAAQlQAAEIAAABCTAAAQgAAAEJMAABCBAAAQkQAAEIEAABCRAAAQggAAEJEAABCCAAAQkQAAEIIAABCPAAAQggAAEI8AABCAAAAQjgAAEIAAABCOAAAQggAAEJAAABCCAAAQkAAAEIIAABCOAAAQggAAEI4AABB+AAAQjwAAEH4AABCPAAAQgQAAEJAAABCBAAAQkAAAEHwAABCQAAAQfAAAEJAAABB2AAAQkQAAEHYAABCRAAAQfAAAEI4AABB8AAAQjgAAEHYAABCPAAAQdgAAEI8AABB8AAAQjQAAEHwAABCNAAAQgQAAEJAAABCBAAAQkAAAEHwAABCRAAAQfAAAEJEAABB8AAAQlAAAEHwAABCUAAAQegAAEJcAABB6AAAQlwAAEHQAABCXAAAQdAAAEJcAABB2AAAQlwAAEHYAABCXAAAQcgAAEJYAABByAAAQlgAAEHQAABCUAAAQdAAAEJQAABByAAAQlgAAEHIAABCWAAAQeAAAEJYAABB4AAAQlgAAEHoAABCUAAAQegAAEJQAABB6AAAQlwAAEHoAABCXAAAQeAAAEJYAABB4AAAQlgAAEH4AABCTAAAQfgAAEJMAABCBAAAQlAAAEIEAABCUAAAQggAAEJMAABCCAAAQkwAAEIMAABCUAAAQgwAAEJQAABCBAAAQkgAAEIEAABCSAAAQfAAAEJAAABB8AAAQkAAAEHYAABCNAAAQdgAAEI0AABB8AAAQjgAAEHwAABCOAAAQfgAAEI0AABB+AAAQjQAAEHoAABCPAAAQegAAEI8AABB+AAAQkAAAEH4AABCQAAAQfgAAEI8AABB+AAAQjwAAEHoAABCPAAAQegAAEI8AABB8AAAQjQAAEHwAABCNAAAQgAAAEJAAABCAAAAQkAAAEHoAABCQAAAQegAAEJAAABB6AAAQkAAAEHoAABCQAAAQdAAAEJEAABB0AAAQkQAAEHQAABCOAAAQdAAAEI4AABB0AAAQjQAAEHQAABCNAAAQdAAAEI4AABB0AAAQjgAAEHIAABCQAAAQcgAAEJAAABB4AAAQjwAAEHgAABCPAAAQegAAEJEAABB6AAAQkQAAEHoAABCUAAAQegAAEJQAABB+AAAQlwAAEH4AABCXAAAQggAAEJYAABCCAAAQlgAAEIAAABCWAAAQgAAAEJYAABB+AAAQkwAAEH4AABCTAAAQegAAEJYAABB6AAAQlgAAEHwAABCVAAAQfAAAEJUAABB2AAAQlgAAEHYAABCWAAAQegAAEJYAABB6AAAQlgAAEHoAABCXAAAQegAAEJcAABB2AAAQlQAAEHYAABCVAAAQfAAAEJMAABB8AAAQkwAAEIEAABCUAAAQgQAAEJQAABCDAAAQkwAAEIMAABCTAAAQgQAAEJQAABCBAAAQlAAAEIIAABCUAAAQggAAEJQAABCDAAAQlgAAEIMAABCWAAAQgQAAEJYAABCBAAAQlgAAEIAAABCTAAAQgAAAEJMAABCBAAAQkQAAEIEAABCRAAAQgQAAEI4AABCBAAAQjgAAEH4AABCOAAAQfgAAEI4AABB6AAAQjQAAEHoAABCNAAAQfgAAEJAAABB+AAAQkAAAEH4AABCSAAAQfgAAEJIAABB8AAAQkQAAEHwAABCRAAAQdgAAEJQAABB2AAAQlAAAEHYAABCXAAAQdgAAEJcAABB4AAAQlAAAEHgAABCUAAAQfAAAEJQAABB8AAAQlAAAEHoAABCUAAAQegAAEJQAABB2AAAQlgAAEHYAABCWAAAQdgAAEJcAABB2AAAQlwAAEHgAABCUAAAQeAAAEJQAABByAAAQlgAAEHIAABCWAAAQdgAAEJUAABB2AAAQlQAAEHYAABCSAAAQdgAAEJIAABB2AAAQkgAAEHYAABCSAAAQdgAAEJAAABB2AAAQkAAAEHIAABCOAAAQcgAAEI4AABB4AAAQjgAAEHgAABCOAAAQdAAAEI4AABB0AAAQjgAAEHYAABCNAAAQdgAAEI0AABB0AAAQjwAAEHQAABCPAAAQdgAAEI0AABB2AAAQjQAAEHYAABCNAAAQdgAAEI0AABB6AAAQjgAAEHoAABCOAAAQfgAAEJAAABB+AAAQkAAAEHwAABCOAAAQfAAAEI4AABCBAAAQkAAAEIEAABCQAAAQgAAAEI8AABCAAAAQjwAAEIAAABCQAAAQgAAAEJAAABB+AAAQjgAAEH4AABCOAAAQeAAAEI4AABB4AAAQjgAAEH4AABCPAAAQfgAAEI8AABB8AAAQjgAAEHwAABCOAAAQeAAAEI4AABB4AAAQjgAAEHgAABCPAAAQeAAAEI8AABB0AAAQkgAAEHQAABCSAAAQeAAAEI8AABB4AAAQjwAAEHoAABCRAAAQegAAEJEAABB4AAAQkQAAEHgAABCRAAAQfgAAEJAAABB+AAAQkAAAEHgAABCRAAAQeAAAEJEAABB8AAAQlAAAEHwAABCUAAAQfgAAEJcAABB+AAAQlwAAEIEAABCXAAAQgQAAEJcAABCCAAAQlgAAEIIAABCWAAAQfgAAEJQAABB+AAAQlAAAEH4AABCWAAAQfgAAEJYAABB6AAAQlwAAEHoAABCXAAAQgAAAEJYAABCAAAAQlgAAEHoAABCTAAAQegAAEJMAABCAAAAQkQAAEIAAABCRAAAQfgAAEJIAABB+AAAQkgAAEHoAABCUAAAQegAAEJQAABB8AAAQkQAAEHwAABCRAAAQeAAAEI4AABB4AAAQjgAAEHQAABCPAAAQdAAAEI8AABB0AAAQkQAAEHQAABCRAAAQdAAAEI8AABB0AAAQjwAAEHgAABCRAAAQeAAAEJEAABB4AAAQlAAAEHgAABCUAAAQfAAAEJEAABB8AAAQkQAAEH4AABCOAAAQfgAAEI4AABB6AAAQjQAAEHoAABCNAAAQdAAAEI4AABB0AAAQjgAAEHgAABCPAAAQeAAAEI8AABByAAAQkgAAEHIAABCSAAAQdgAAEJAAABB2AAAQkAAAEHoAABCNAAAQegAAEI0AABB4AAAQjgAAEHgAABCOAAAQcgAAEI4AABByAAAQjgAAEHQAABCNAAAQdAAAEI0AABByAAAQjgAAEHIAABCOAAAQeAAAEI8AABB4AAAQjwAAEHYAABCQAAAQdgAAEJAAABB0AAAQkgAAEHQAABCSAAAQdgAAEJUAABB2AAAQlQAAEHgAABCWAAAQeAAAEJYAABB+AAAQlwAAEH4AABCXAAAQggAAEJcAABCCAAAQlwAAEIAAABCVAAAQgAAAEJUAABB6AAAQlAAAEHoAABCUAAAQdgAAEJMAABB2AAAQkwAAEHwAABCWAAAQfAAAEJYAABCAAAAQlAAAEIAAABCUAAAQegAAEJMAABB6AAAQkwAAEHoAABCUAAAQegAAEJQAABB+AAAQlQAAEH4AABCVAAAQfgAAEJUAABB+AAAQlQAAEIEAABCXAAAQgQAAEJcAABCBAAAQlAAAEIEAABCUAAAQgQAAEJQAABCBAAAQlAAAEIMAABCRAAAQgwAAEJEAABCDAAAQlAAAEIMAABCUAAAQgQAAEJQAABCBAAAQlAAAEH4AABCTAAAQfgAAEJMAABB+AAAQlgAAEH4AABCWAAAQfAAAEJUAABB8AAAQlQAAEHwAABCWAAAQfAAAEJYAABB6AAAQlgAAEHoAABCWAAAQegAAEJcAABB6AAAQlwAAEHQAABCVAAAQdAAAEJUAABByAAAQlAAAEHIAABCUAAAQdgAAEJMAABB2AAAQkwAAEHQAABCVAAAQdAAAEJUAABByAAAQlAAAEHIAABCUAAAQdAAAEJYAABB0AAAQlgAAEHYAABCWAAAQdgAAEJYAABB2AAAQlgAAEHYAABCWAAAQeAAAEJUAABB4AAAQlQAAEHYAABCWAAAQdgAAEJYAABB8AAAQlQAAEHwAABCVAAAQegAAEJcAABB6AAAQlwAAEHQAABCVAAAQdAAAEJUAABB2AAAQlwAAEHYAABCXAAAQdAAAEJUAABB0AAAQlQAAEHYAABCWAAAQdgAAEJYAABB2AAAQkwAAEHYAABCTAAAQdAAAEJEAABB0AAAQkQAAEHYAABCSAAAQdgAAEJIAABB0AAAQjwAAEHQAABCPAAAQdgAAEJAAABB2AAAQkAAAEHwAABCSAAAQfAAAEJIAABB6AAAQjwAAEHoAABCPAAAQeAAAEI8AABB4AAAQjwAAEHYAABCOAAAQdgAAEI4AABB4AAAQjQAAEHgAABCNAAAQegAAEI8AABB6AAAQjwAAEHgAABCOAAAQeAAAEI4AABByAAAQjwAAEHIAABCPAAAQdgAAEJAAABB2AAAQkAAAEHgAABCTAAAQeAAAEJMAABB2AAAQkAAAEHYAABCQAAAQdAAAEJEAABB0AAAQkQAAEHQAABCQAAAQdAAAEJAAABB0AAAQkAAAEHQAABCQAAAQcgAAEI8AABByAAAQjwAAEHIAABCNAAAQcgAAEI0AABB4AAAQjQAAEHgAABCNAAAQdgAAEJAAABB2AAAQkAAAEHgAABCSAAAQeAAAEJIAABB4AAAQkAAAEHgAABCQAAAQfAAAEI0AABB8AAAQjQAAEIEAABCPAAAQgQAAEI8AABCCAAAQkAAAEIIAABCQAAAQgwAAEI8AABCDAAAQjwAAEIAAABCOAAAQgAAAEI4AABCDAAAQkQAAEIMAABCRAAAQggAAEJQAABCCAAAQlAAAEH4AABCXAAAQfgAAEJcAABCCAAAQlAAAEIIAABCUAAAQggAAEJYAABCCAAAQlgAAEIEAABCXAAAQgQAAEJcAABB8AAAQlAAAEHwAABCUAAAQegAAEJUAABB6AAAQlQAAEH4AABCXAAAQfgAAEJcAABB+AAAQlQAAEH4AABCVAAAQegAAEJUAABB6AAAQlQAAEH4AABCWAAAQfgAAEJYAABCAAAAQkwAAEIAAABCTAAAQgwAAEJMAABCDAAAQkwAAEIIAABCTAAAQggAAEJMAABCCAAAQkQAAEIIAABCRAAAQggAAEJIAABCCAAAQkgAAEIMAABCTAAAQgwAAEJMAABCDAAAQlgAAEIMAABCWAAAQgwAAEJcAABCDAAAQlwAAEIAAABCXAAAQgAAAEJcAABB+AAAQlgAAEH4AABCWAAAQegAAEJYAABB6AAAQlgAAEHwAABCVAAAQfAAAEJUAABCAAAAQlQAAEIAAABCVAAAQfAAAEJMAABB8AAAQkwAAEIEAABCRAAAQgQAAEJEAABCDAAAQjwAAEIMAABCPAAAQggAAEJEAABCCAAAQkQAAEIMAABCRAAAQgwAAEJEAABCBAAAQlAAAEIEAABCUAAAQfgAAEJQAABB+AAAQlAAAEHoAABCSAAAQegAAEJIAABB6AAAQlQAAEHoAABCVAAAQdgAAEJYAABB2AAAQlgAAEHIAABCXAAAQcgAAEJcAABB4AAAQlwAAEHgAABCXAAAQeAAAEJQAABB4AAAQlAAAEHgAABCVAAAQeAAAEJUAABByAAAQlgAAEHIAABCWAAAQdAAAEJQAABB0AAAQlAAAEHQAABCRAAAQdAAAEJEAABB0AAAQkwAAEHQAABCTAAAQdAAAEJUAABB0AAAQlQAAEHYAABCTAAAQdgAAEJMAABB6AAAQlQAAEHoAABCVAAAQeAAAEJMAABB4AAAQkwAAEH4AABCRAAAQfgAAEJEAABCAAAAQlAAAEIAAABCUAAAQggAAEJcAABCCAAAQlwAAEIIAABCVAAAQggAAEJUAABCDAAAQlAAAEIMAABCUAAAQgwAAEJcAABCDAAAQlwAAEIMAABCXAAAQgwAAEJcAABCBAAAQlgAAEIEAABCWAAAQfAAAEJcAABB8AAAQlwAAEHoAABCVAAAQegAAEJUAABB8AAAQkgAAEHwAABCSAAAQegAAEI8AABB6AAAQjwAAEHQAABCSAAAQdAAAEJIAABByAAAQkwAAEHIAABCTAAAQdAAAEJUAABB0AAAQlQAAEHQAABCUAAAQdAAAEJQAABB4AAAQkgAAEHgAABCSAAAQegAAEJAAABB6AAAQkAAAEHQAABCQAAAQdAAAEJAAABB2AAAQjQAAEHYAABCNAAAQdAAAEI4AABB0AAAQjgAAEHYAABCNAAAQdgAAEI0AABByAAAQjgAAEHIAABCOAAAQcgAAEI4AABByAAAQjgAAEHIAABCNAAAQcgAAEI0AABB2AAAQjQAAEHYAABCNAAAQdAAAEI0AABB0AAAQjQAAEHYAABCPAAAQdgAAEI8AABB8AAAQjQAAEHwAABCNAAAQeAAAEI4AABB4AAAQjgAAEHoAABCNAAAQegAAEI0AABB+AAAQkAAAEH4AABCQAAAQggAAEJEAABCCAAAQkQAAEIEAABCOAAAQgQAAEI4AABCCAAAQjwAAEIIAABCPAAAQgQAAEI4AABCBAAAQjgAAEIMAABCNAAAQgwAAEI0AABCBAAAQjgAAEIEAABCOAAAQgQAAEI4AABCBAAAQjgAAEHwAABCNAAAQfAAAEI0AABB4AAAQjgAAEHgAABCOAAAQfAAAEI4AABB8AAAQjgAAEHYAABCPAAAQdgAAEI8AABB6AAAQjQAAEHoAABCNAAAQdgAAEI8AABB2AAAQjwAAEHwAABCNAAAQfAAAEI0AABB8AAAQjgAAEHwAABCOAAAQfgAAEI0AABB+AAAQjQAAEIIAABCPAAAQggAAEI8AABCCAAAQkQAAEIIAABCRAAAQfgAAEJEAABB+AAAQkQAAEHoAABCUAAAQegAAEJQAABB4AAAQlwAAEHgAABCXAAAQegAAEJYAABB6AAAQlgAAEHoAABCVAAAQegAAEJUAABB4AAAQlgAAEHgAABCWAAAQdgAAEJUAABB2AAAQlQAAEHgAABCXAAAQeAAAEJcAABB6AAAQlAAAEHoAABCUAAAQfAAAEJcAABB8AAAQlwAAEHgAABCWAAAQeAAAEJYAABByAAAQlwAAEHIAABCXAAAQdgAAEJcAABB2AAAQlwAAEHYAABCWAAAQdgAAEJYAABB0AAAQlwAAEHQAABCXAAAQcgAAEJYAABByAAAQlgAAEHgAABCUAAAQeAAAEJQAABB0AAAQlQAAEHQAABCVAAAQdAAAEJQAABB0AAAQlAAAEHIAABCTAAAQcgAAEJMAABByAAAQkQAAEHIAABCRAAAQdgAAEJEAABB2AAAQkQAAEHIAABCUAAAQcgAAEJQAABB2AAAQlQAAEHYAABCVAAAQdAAAEJMAABB0AAAQkwAAEHQAABCQAAAQdAAAEJAAABB0AAAQkQAAEHQAABCRAAAQdgAAEJAAABB2AAAQkAAAEHgAABCRAAAQeAAAEJEAABB0AAAQkAAAEHQAABCQAAAQdAAAEI0AABB0AAAQjQAAEHYAABCQAAAQdgAAEJAAABB8AAAQjgAAEHwAABCOAAAQfgAAEI0AABB+AAAQjQAAEHgAABCNAAAQeAAAEI0AABB6AAAQjQAAEHoAABCNAAAQfgAAEI4AABB+AAAQjgAAEIEAABCQAAAQgQAAEJAAABB8AAAQkgAAEHwAABCSAAAQegAAEJIAABB6AAAQkgAAEHoAABCPAAAQegAAEI8AABB0AAAQkgAAEHQAABCSAAAQdAAAEJIAABB0AAAQkgAAEHYAABCRAAAQdgAAEJEAABB0AAAQkAAAEHQAABCQAAAQdAAAEJAAABB0AAAQkAAAEHYAABCOAAAQdgAAEI4AABB2AAAQjwAAEHYAABCPAAAQeAAAEI0AABB4AAAQjQAAEHIAABCNAAAQcgAAEI0AABB0AAAQjgAAEHQAABCOAAAQdAAAEJEAABB0AAAQkQAAEHYAABCRAAAQdgAAEJEAABB6AAAQkAAAEHoAABCQAAAQdgAAEJMAABB2AAAQkwAAEHgAABCQAAAQeAAAEJAAABB6AAAQkQAAEHoAABCRAAAQegAAEI4AABB6AAAQjgAAEIAAABCOAAAQgAAAEI4AABB6AAAQjQAAEHoAABCNAAAQgAAAEI8AABCAAAAQjwAAEHwAABCQAAAQfAAAEJAAABB2AAAQjQAAEHYAABCNAAAQdgAAEJAAABB2AAAQkAAAEHoAABCRAAAQegAAEJEAABB6AAAQlAAAEHoAABCUAAAQegAAEJcAABB6AAAQlwAAEIAAABCVAAAQgAAAEJUAABB6AAAQlQAAEHoAABCVAAAQdgAAEJUAABB2AAAQlQAAEHgAABCSAAAQeAAAEJIAABB6AAAQkgAAEHoAABCSAAAQfAAAEJAAABB8AAAQkAAAEHYAABCOAAAQdgAAEI4AABB6AAAQjgAAEHoAABCOAAAQeAAAEJEAABB4AAAQkQAAEHIAABCQAAAQcgAAEJAAABB0AAAQkQAAEHQAABCRAAAQdAAAEJEAABB0AAAQkQAAEHoAABCOAAAQegAAEI4AABB+AAAQjgAAEH4AABCOAAAQeAAAEJEAABB4AAAQkQAAEHoAABCPAAAQegAAEI8AABB6AAAQjwAAEHoAABCPAAAQgAAAEJIAABCAAAAQkgAAEIEAABCRAAAQgQAAEJEAABCBAAAQjwAAEIEAABCPAAAQgQAAEJEAABCBAAAQkQAAEIEAABCPAAAQgQAAEI8AABCDAAAQkgAAEIMAABCSAAAQgAAAEJMAABCAAAAQkwAAEIAAABCQAAAQgAAAEJAAABCBAAAQjQAAEIEAABCNAAAQggAAEI0AABCCAAAQjQAAEH4AABCPAAAQfgAAEI8AABB6AAAQkAAAEHoAABCQAAAQdAAAEI8AABB0AAAQjwAAEHIAABCOAAAQcgAAEI4AABB2AAAQjwAAEHYAABCPAAAQcgAAEJAAABByAAAQkAAAEHIAABCSAAAQcgAAEJIAABB0AAAQlQAAEHQAABCVAAAQcgAAEJIAABByAAAQkgAAEHYAABCTAAAQdgAAEJMAABB2AAAQkgAAEHYAABCSAAAQdAAAEI8AABB0AAAQjwAAEHoAABCOAAAQegAAEI4AABB4AAAQjgAAEHgAABCOAAAQfAAAEJAAABB8AAAQkAAAEH4AABCOAAAQfgAAEI4AABCCAAAQjgAAEIIAABCOAAAQgQAAEI4AABCBAAAQjgAAEH4AABCPAAAQfgAAEI8AABB6AAAQjgAAEHoAABCOAAAQfgAAEJAAABB+AAAQkAAAEHwAABCSAAAQfAAAEJIAABCAAAAQkwAAEIAAABCTAAAQfgAAEJMAABB+AAAQkwAAEHoAABCTAAAQegAAEJMAABB4AAAQkQAAEHgAABCRAAAQfgAAEI8AABB+AAAQjwAAEHgAABCOAAAQeAAAEI4AABB8AAAQkQAAEHwAABCRAAAQfAAAEJAAABB8AAAQkAAAEHoAABCNAAAQegAAEI0AABB4AAAQjgAAEHgAABCOAAAQdgAAEI0AABB2AAAQjQAAEHwAABCQAAAQfAAAEJAAABB+AAAQkAAAEH4AABCQAAAQegAAEJEAABB6AAAQkQAAEHoAABCOAAAQegAAEI4AABB6AAAQkAAAEHoAABCQAAAQeAAAEI0AABB4AAAQjQAAEHgAABCNAAAQeAAAEI0AABB2AAAQjQAAEHYAABCNAAAQeAAAEI4AABB4AAAQjgAAEHwAABCQAAAQfAAAEJAAABB+AAAQkQAAEH4AABCRAAAQgAAAEI4AABCAAAAQjgAAEHoAABCRAAAQegAAEJEAABB4AAAQjgAAEHgAABCOAAAQfAAAEI4AABB8AAAQjgAAEHgAABCOAAAQeAAAEI4AABByAAAQjwAAEHIAABCPAAAQcgAAEI0AABByAAAQjQAAEHIAABCOAAAQcgAAEI4AABB2AAAQjgAAEHYAABCOAAAQcgAAEI8AABByAAAQjwAAEHIAABCNAAAQcgAAEI0AABB4AAAQkAAAEHgAABCQAAAQdAAAEJIAABB0AAAQkgAAEHoAABCQAAAQegAAEJAAABB8AAAQkAAAEHwAABCQAAAQfgAAEI8AABB+AAAQjwAAEHgAABCSAAAQeAAAEJIAABB0AAAQkwAAEHQAABCTAAAQdAAAEJAAABB0AAAQkAAAEHYAABCRAAAQdgAAEJEAABB6AAAQkwAAEHoAABCTAAAQdAAAEJYAABB0AAAQlgAAEHgAABCVAAAQeAAAEJUAABB2AAAQkgAAEHYAABCSAAAQcgAAEJIAABByAAAQkgAAEHYAABCSAAAQdgAAEJIAABB4AAAQlQAAEHgAABCVAAAQcgAAEJQAABByAAAQlAAAEHYAABCUAAAQdgAAEJQAABB0AAAQkgAAEHQAABCSAAAQeAAAEJAAABB4AAAQkAAAEHYAABCOAAAQdgAAEI4AABB2AAAQjgAAEHYAABCOAAAQeAAAEI0AABB4AAAQjQAAEHQAABCNAAAQdAAAEI0AABByAAAQjQAAEHIAABCNAAAQdgAAEI8AABB2AAAQjwAAEHwAABCNAAAQfAAAEI0AABCBAAAQkAAAEIEAABCQAAAQgQAAEI8AABCBAAAQjwAAEH4AABCRAAAQfgAAEJEAABCCAAAQkAAAEIIAABCQAAAQgwAAEJMAABCDAAAQkwAAEIMAABCTAAAQgwAAEJMAABCCAAAQkAAAEIIAABCQAAAQfgAAEI4AABB+AAAQjgAAEHoAABCNAAAQegAAEI0AABB6AAAQjQAAEHoAABCNAAAQdgAAEI8AABB2AAAQjwAAEHIAABCRAAAQcgAAEJEAABB2AAAQkgAAEHYAABCSAAAQcgAAEJEAABByAAAQkQAAEHYAABCTAAAQdgAAEJMAABB2AAAQlQAAEHYAABCVAAAQegAAEJIAABB6AAAQkgAAEHwAABCVAAAQfAAAEJUAABB+AAAQlwAAEH4AABCXAAAQegAAEJcAABB6AAAQlwAAEHgAABCXAAAQeAAAEJcAABB2AAAQlwAAEHYAABCXAAAQdgAAEJUAABB2AAAQlQAAEHgAABCSAAAQeAAAEJIAABB2AAAQkwAAEHYAABCTAAAQdgAAEJEAABB2AAAQkQAAEHgAABCUAAAQeAAAEJQAABB2AAAQlAAAEHYAABCUAAAQeAAAEJQAABB4AAAQlAAAEHIAABCRAAAQcgAAEJEAABB2AAAQjwAAEHYAABCPAAAQdgAAEI8AABB2AAAQjwAAEHIAABCQAAAQcgAAEJAAABB2AAAQjQAAEHYAABCNAAAQdgAAEI0AABB2AAAQjQAAEHIAABCQAAAQcgAAEJAAABB4AAAQjwAAEHgAABCPAAAQfgAAEI4AABB+AAAQjgAAEHwAABCNAAAQfAAAEI0AABB+AAAQjwAAEH4AABCPAAAQgQAAEI0AABCBAAAQjQAAEIEAABCOAAAQgQAAEI4AABCDAAAQjQAAEIMAABCNAAAQggAAEI0AABCCAAAQjQAAEIAAABCQAAAQgAAAEJAAABCBAAAQkQAAEIEAABCRAAAQgQAAEJAAABCBAAAQkAAAEIIAABCOAAAQggAAEI4AABCDAAAQjwAAEIMAABCPAAAQgwAAEI4AABCDAAAQjgAAEIMAABCRAAAQgwAAEJEAABCBAAAQkAAAEIEAABCQAAAQggAAEI4AABCCAAAQjgAAEIIAABCPAAAQggAAEI8AABCBAAAQjwAAEIEAABCPAAAQfgAAEJAAABB+AAAQkAAAEHwAABCPAAAQfAAAEI8AABCAAAAQjgAAEIAAABCOAAAQgQAAEI8AABCBAAAQjwAAEIAAABCOAAAQgAAAEI4AABB8AAAQjgAAEHwAABCOAAAQgAAAEJAAABCAAAAQkAAAEH4AABCQAAAQfgAAEJAAABCCAAAQjwAAEIIAABCPAAAQggAAEJIAABCCAAAQkgAAEIEAABCRAAAQgQAAEJEAABCCAAAQkAAAEIIAABCQAAAQgwAAEJAAABCDAAAQkAAAEIAAABCQAAAQgAAAEJAAABB8AAAQjgAAEHwAABCOAAAQdgAAEJAAABB2AAAQkAAAEHQAABCTAAAQdAAAEJMAABB4AAAQlQAAEHgAABCVAAAQfAAAEJQAABB8AAAQlAAAEHoAABCWAAAQegAAEJYAABB+AAAQlgAAEH4AABCWAAAQfAAAEJUAABB8AAAQlQAAEHoAABCWAAAQegAAEJYAABB8AAAQlwAAEHwAABCXAAAQgQAAEJYAABCBAAAQlgAAEH4AABCXAAAQfgAAEJcAABB+AAAQlgAAEH4AABCWAAAQfAAAEJMAABB8AAAQkwAAEHgAABCVAAAQeAAAEJUAABB8AAAQkwAAEHwAABCTAAAQgAAAEJIAABCAAAAQkgAAEIAAABCSAAAQgAAAEJIAABCCAAAQkQAAEIIAABCRAAAQgwAAEJAAABCDAAAQkAAAEIEAABCPAAAQgQAAEI8AABCAAAAQkAAAEIAAABCQAAAQfAAAEI0AABB8AAAQjQAAEHwAABCOAAAQfAAAEI4AABCAAAAQjwAAEIAAABCPAAAQgAAAEI8AABCAAAAQjwAAEIIAABCQAAAQggAAEJAAABCDAAAQkAAAEIMAABCQAAAQgAAAEI4AABCAAAAQjgAAEIMAABCPAAAQgwAAEI8AABCBAAAQkQAAEIEAABCRAAAQggAAEJMAABCCAAAQkwAAEIAAABCTAAAQgAAAEJMAABCCAAAQlAAAEIIAABCUAAAQgwAAEJIAABCDAAAQkgAAEIMAABCTAAAQgwAAEJMAABCDAAAQkQAAEIMAABCRAAAQggAAEJEAABCCAAAQkQAAEIIAABCUAAAQggAAEJQAABCCAAAQlQAAEIIAABCVAAAQgQAAEJMAABCBAAAQkwAAEIAAABCSAAAQgAAAEJIAABCBAAAQkwAAEIEAABCTAAAQgwAAEJQAABCDAAAQlAAAEIEAABCRAAAQgQAAEJEAABCBAAAQkwAAEIEAABCTAAAQgwAAEJEAABCDAAAQkQAAEIMAABCQAAAQgwAAEJAAABCAAAAQjwAAEIAAABCPAAAQgwAAEI4AABCDAAAQjgAAEIEAABCPAAAQgQAAEI8AABCDAAAQjgAAEIMAABCOAAAQggAAEI0AABCCAAAQjQAAEIMAABCNAAAQgwAAEI0AABCDAAAQjgAAEIMAABCOAAAQgwAAEI0AABCDAAAQjQAAEIIAABCNAAAQggAAEI0AABCDAAAQjQAAEIMAABCNAAAQgQAAEI0AABCBAAAQjQAAEIEAABCOAAAQgQAAEI4AABB8AAAQkAAAEHwAABCQAAAQeAAAEJAAABB4AAAQkAAAEHQAABCNAAAQdAAAEI0AABB0AAAQjwAAEHQAABCPAAAQdAAAEJIAABB0AAAQkgAAEHQAABCSAAAQdAAAEJIAABByAAAQlQAAEHIAABCVAAAQcgAAEJUAABByAAAQlQAAEHYAABCXAAAQdgAAEJcAABB6AAAQlAAAEHoAABCUAAAQfAAAEJQAABB8AAAQlAAAEH4AABCUAAAQfgAAEJQAABB6AAAQlQAAEHoAABCVAAAQfAAAEJUAABB8AAAQlQAAEHwAABCXAAAQfAAAEJcAABB8AAAQlwAAEHwAABCXAAAQgAAAEJYAABCAAAAQlgAAEH4AABCVAAAQfgAAEJUAABB4AAAQkwAAEHgAABCTAAAQdgAAEJYAABB2AAAQlgAAEHgAABCUAAAQeAAAEJQAABB2AAAQkgAAEHYAABCSAAAQeAAAEJIAABB4AAAQkgAAEHwAABCVAAAQfAAAEJUAABB2AAAQlwAAEHYAABCXAAAQcgAAEJQAABByAAAQlAAAEHYAABCTAAAQdgAAEJMAABB0AAAQkwAAEHQAABCTAAAQdgAAEJMAABB2AAAQkwAAEHYAABCTAAAQdgAAEJMAABB6AAAQkAAAEHoAABCQAAAQegAAEI8AABB6AAAQjwAAEHgAABCOAAAQeAAAEI4AABB8AAAQjwAAEHwAABCPAAAQfgAAEI8AABB+AAAQjwAAEHwAABCPAAAQfAAAEI8AABB2AAAQkQAAEHYAABCRAAAQfAAAEI8AABB8AAAQjwAAEIEAABCRAAAQgQAAEJEAABB+AAAQkAAAEH4AABCQAAAQggAAEI8AABCCAAAQjwAAEIMAABCQAAAQgwAAEJAAABCAAAAQjQAAEIAAABCNAAAQgQAAEI8AABCBAAAQjwAAEIAAABCNAAAQgAAAEI0AABCAAAAQjwAAEIAAABCPAAAQggAAEI0AABCCAAAQjQAAEH4AABCOAAAQfgAAEI4AABB6AAAQjQAAEHoAABCNAAAQdAAAEI0AABB0AAAQjQAAEHgAABCPAAAQeAAAEI8AABB0AAAQjgAAEHQAABCOAAAQegAAEI4AABB6AAAQjgAAEIAAABCOAAAQgAAAEI4AABCAAAAQjQAAEIAAABCNAAAQgAAAEI4AABCAAAAQjgAAEH4AABCRAAAQfgAAEJEAABCBAAAQjgAAEIEAABCOAAAQgAAAEI4AABCAAAAQjgAAEH4AABCNAAAQfgAAEI0AABB+AAAQjgAAEH4AABCOAAAQgQAAEI8AABCBAAAQjwAAEIIAABCRAAAQggAAEJEAABCBAAAQkQAAEIEAABCRAAAQfAAAEJAAABB8AAAQkAAAEIEAABCRAAAQgQAAEJEAABCBAAAQkQAAEIEAABCRAAAQfgAAEJMAABB+AAAQkwAAEIAAABCQAAAQgAAAEJAAABCCAAAQkAAAEIIAABCQAAAQgAAAEJEAABCAAAAQkQAAEH4AABCTAAAQfgAAEJMAABB+AAAQkQAAEH4AABCRAAAQfAAAEJMAABB8AAAQkwAAEHoAABCVAAAQegAAEJUAABB2AAAQlgAAEHYAABCWAAAQdgAAEJQAABB2AAAQlAAAEHYAABCVAAAQdgAAEJUAABB2AAAQlQAAEHYAABCVAAAQdAAAEJIAABB0AAAQkgAAEHYAABCRAAAQdgAAEJEAABB8AAAQkAAAEHwAABCQAAAQfAAAEI8AABB8AAAQjwAAEHoAABCRAAAQegAAEJEAABB6AAAQjwAAEHoAABCPAAAQfgAAEJEAABB+AAAQkQAAEIAAABCQAAAQgAAAEJAAABCDAAAQjQAAEIMAABCNAAAQgwAAEI8AABCDAAAQjwAAEIIAABCPAAAQggAAEI8AABCBAAAQjQAAEIEAABCNAAAQgwAAEI4AABCDAAAQjgAAEIEAABCNAAAQgQAAEI0AABB+AAAQjgAAEH4AABCOAAAQgAAAEI8AABCAAAAQjwAAEIEAABCPAAAQgQAAEI8AABCBAAAQjQAAEIEAABCNAAAQfAAAEI0AABB8AAAQjQAAEHgAABCNAAAQeAAAEI0AABB0AAAQjgAAEHQAABCOAAAQdAAAEI8AABB0AAAQjwAAEHQAABCOAAAQdAAAEI4AABB0AAAQjQAAEHQAABCNAAAQdgAAEI8AABB2AAAQjwAAEHgAABCSAAAQeAAAEJIAABB4AAAQlAAAEHgAABCUAAAQegAAEJEAABB6AAAQkQAAEH4AABCSAAAQfgAAEJIAABB+AAAQjwAAEH4AABCPAAAQfgAAEI4AABB+AAAQjgAAEHgAABCRAAAQeAAAEJEAABB8AAAQkAAAEHwAABCQAAAQfgAAEJMAABB+AAAQkwAAEIIAABCWAAAQggAAEJYAABCDAAAQlgAAEIMAABCWAAAQgwAAEJYAABCDAAAQlgAAEIEAABCVAAAQgQAAEJUAABB+AAAQlwAAEH4AABCXAAAQeAAAEJUAABB4AAAQlQAAEHgAABCXAAAQeAAAEJcAABByAAAQlQAAEHIAABCVAAAQdAAAEJQAABB0AAAQlAAAEHYAABCUAAAQdgAAEJQAABB6AAAQkgAAEHoAABCSAAAQegAAEJAAABB6AAAQkAAAEHQAABCQAAAQdAAAEJAAABB0AAAQjgAAEHQAABCOAAAQdAAAEI8AABB0AAAQjwAAEHoAABCSAAAQegAAEJIAABB0AAAQkgAAEHQAABCSAAAQdgAAEJMAABB2AAAQkwAAEHgAABCSAAAQeAAAEJIAABB8AAAQkgAAEHwAABCSAAAQegAAEJUAABB6AAAQlQAAEH4AABCXAAAQfgAAEJcAABCCAAAQlAAAEIIAABCUAAAQgwAAEJIAABCDAAAQkgAAEIMAABCSAAAQgwAAEJIAABCCAAAQkQAAEIIAABCRAAAQfgAAEJAAABB+AAAQkAAAEIEAABCNAAAQgQAAEI0AABCDAAAQjgAAEIMAABCOAAAQggAAEI4AABCCAAAQjgAAEIIAABCNAAAQggAAEI0AABCAAAAQkAAAEIAAABCQAAAQfAAAEI0AABB8AAAQjQAAEHoAABCNAAAQegAAEI0AABB0AAAQjwAAEHQAABCPAAAQcgAAEI8AABByAAAQjwAAEHIAABCNAAAQcgAAEI0AABB2AAAQjQAAEHYAABCNAAAQegAAEI8AABB6AAAQjwAAEHQAABCPAAAQdAAAEI8AABB2AAAQjwAAEHYAABCPAAAQcgAAEJEAABByAAAQkQAAEHIAABCOAAAQcgAAEI4AABB2AAAQkAAAEHYAABCQAAAQegAAEJAAABB6AAAQkAAAEH4AABCSAAAQfgAAEJIAABB4AAAQkQAAEHgAABCRAAAQegAAEJEAABB6AAAQkQAAEH4AABCSAAAQfgAAEJIAABCCAAAQlAAAEIIAABCUAAAQgwAAEJQAABCDAAAQlAAAEIEAABCTAAAQgQAAEJMAABB8AAAQkwAAEHwAABCTAAAQfgAAEJIAABB+AAAQkgAAEHwAABCRAAAQfAAAEJEAABB8AAAQkQAAEHwAABCRAAAQgAAAEJIAABCAAAAQkgAAEHoAABCRAAAQegAAEJEAABB2AAAQjwAAEHYAABCPAAAQdgAAEI4AABB2AAAQjgAAEHoAABCOAAAQegAAEI4AABB2AAAQkAAAEHYAABCQAAAQcgAAEJAAABByAAAQkAAAEHYAABCNAAAQdgAAEI0AABB4AAAQkAAAEHgAABCQAAAQdAAAEJAAABB0AAAQkAAAEHgAABCOAAAQeAAAEI4AABB8AAAQkAAAEHwAABCQAAAQgQAAEI8AABCBAAAQjwAAEIIAABCSAAAQggAAEJIAABCBAAAQlAAAEIEAABCUAAAQgwAAEJUAABCDAAAQlQAAEIEAABCSAAAQgQAAEJIAABCAAAAQkQAAEIAAABCRAAAQfgAAEJEAABB+AAAQkQAAEHwAABCTAAAQfAAAEJMAABB6AAAQkAAAEHoAABCQAAAQdgAAEJMAABB2AAAQkwAAEHQAABCWAAAQdAAAEJYAABB4AAAQlAAAEHgAABCUAAAQfgAAEJMAABB+AAAQkwAAEIEAABCQAAAQgQAAEJAAABCAAAAQjwAAEIAAABCPAAAQggAAEI8AABCCAAAQjwAAEIIAABCSAAAQggAAEJIAABCAAAAQlAAAEIAAABCUAAAQegAAEJIAABB6AAAQkgAAEHoAABCRAAAQegAAEJEAABB6AAAQjgAAEHoAABCOAAAQfAAAEI0AABB8AAAQjQAAEH4AABCQAAAQfgAAEJAAABB8AAAQkgAAEHwAABCSAAAQegAAEJAAABB6AAAQkAAAEHYAABCTAAAQdgAAEJMAABB6AAAQkAAAEHoAABCQAAAQdAAAEJIAABB0AAAQkgAAEHYAABCTAAAQdgAAEJMAABB4AAAQkQAAEHgAABCRAAAQegAAEJMAABB6AAAQkwAAEHYAABCSAAAQdgAAEJIAABB8AAAQjwAAEHwAABCPAAAQfgAAEJAAABB+AAAQkAAAEH4AABCQAAAQfgAAEJAAABCBAAAQkQAAEIEAABCRAAAQgAAAEJMAABCAAAAQkwAAEHoAABCQAAAQegAAEJAAABB8AAAQkAAAEHwAABCQAAAQdgAAEI4AABB2AAAQjgAAEHQAABCPAAAQdAAAEI8AABB6AAAQjQAAEHoAABCNAAAQdAAAEI0AABB0AAAQjQAAEHIAABCNAAAQcgAAEI0AABB2AAAQjQAAEHYAABCNAAAQeAAAEI0AABB4AAAQjQAAEH4AABCQAAAQfgAAEJAAABB+AAAQjwAAEH4AABCPAAAQggAAEI8AABCCAAAQjwAAEIMAABCPAAAQgwAAEI8AABCBAAAQjgAAEIEAABCOAAAQggAAEJEAABCCAAAQkQAAEH4AABCSAAAQfgAAEJIAABCBAAAQjwAAEIEAABCPAAAQgAAAEI4AABCAAAAQjgAAEIIAABCOAAAQggAAEI4AABCAAAAQjgAAEIAAABCOAAAQgAAAEJEAABCAAAAQkQAAEIMAABCUAAAQgwAAEJQAABCBAAAQlgAAEIEAABCWAAAQgQAAEJYAABCBAAAQlgAAEIIAABCXAAAQggAAEJcAABCDAAAQlgAAEIMAABCWAAAQgAAAEJcAABCAAAAQlwAAEIMAABCXAAAQgwAAEJcAABCDAAAQlQAAEIMAABCVAAAQgAAAEJcAABCAAAAQlwAAEIIAABCXAAAQggAAEJcAABCBAAAQlgAAEIEAABCWAAAQfAAAEJQAABB8AAAQlAAAEH4AABCWAAAQfgAAEJYAABB6AAAQlQAAEHoAABCVAAAQfAAAEJIAABB8AAAQkgAAEHoAABCQAAAQegAAEJAAABB0AAAQjQAAEHQAABCNAAAQdgAAEI0AABB2AAAQjQAAEHoAABCPAAAQegAAEI8AABB+AAAQjgAAEH4AABCOAAAQgQAAEI4AABCBAAAQjgAAEIIAABCOAAAQggAAEI4AABCDAAAQjQAAEIMAABCNAAAQgwAAEI4AABCDAAAQjgAAEIAAABCNAAAQgAAAEI0AABCBAAAQjwAAEIEAABCPAAAQgAAAEJAAABCAAAAQkAAAEH4AABCNAAAQfgAAEI0AABB6AAAQjgAAEHoAABCOAAAQdAAAEI8AABB0AAAQjwAAEHYAABCOAAAQdgAAEI4AABByAAAQkQAAEHIAABCRAAAQcgAAEJQAABByAAAQlAAAEHgAABCSAAAQeAAAEJIAABB4AAAQlQAAEHgAABCVAAAQfAAAEJUAABB8AAAQlQAAEIEAABCXAAAQgQAAEJcAABB8AAAQlwAAEHwAABCXAAAQdgAAEJcAABB2AAAQlwAAEHgAABCXAAAQeAAAEJcAABB4AAAQlgAAEHgAABCWAAAQdgAAEJUAABB2AAAQlQAAEHIAABCTAAAQcgAAEJMAABByAAAQkAAAEHIAABCQAAAQeAAAEI8AABB4AAAQjwAAEHwAABCQAAAQfAAAEJAAABB6AAAQjgAAEHoAABCOAAAQdgAAEJAAABB2AAAQkAAAEHwAABCTAAAQfAAAEJMAABB+AAAQlgAAEH4AABCWAAAQggAAEJUAABCCAAAQlQAAEIMAABCSAAAQgwAAEJIAABCDAAAQkgAAEIMAABCSAAAQgQAAEI8AABCBAAAQjwAAEIIAABCOAAAQggAAEI4AABCCAAAQjwAAEIIAABCPAAAQgAAAEI0AABCAAAAQjQAAEIAAABCNAAAQgAAAEI0AABCAAAAQjwAAEIAAABCPAAAQegAAEI8AABB6AAAQjwAAEHgAABCQAAAQeAAAEJAAABB6AAAQkQAAEHoAABCRAAAQdgAAEI8AABB2AAAQjwAAEHoAABCOAAAQegAAEI4AABB0AAAQjwAAEHQAABCPAAAQdAAAEI4AABB0AAAQjgAAEHQAABCOAAAQdAAAEI4AABB2AAAQjQAAEHYAABCNAAAQcgAAEJAAABByAAAQkAAAEHIAABCSAAAQcgAAEJIAABB0AAAQkwAAEHQAABCTAAAQdgAAEJMAABB2AAAQkwAAEHoAABCRAAAQegAAEJEAABB0AAAQkwAAEHQAABCTAAAQcgAAEJYAABByAAAQlgAAEHIAABCWAAAQcgAAEJYAABByAAAQlgAAEHIAABCWAAAQdAAAEJcAABB0AAAQlwAAEHIAABCXAAAQcgAAEJcAABB0AAAQlQAAEHQAABCVAAAQcgAAEJUAABByAAAQlQAAEHIAABCTAAAQcgAAEJMAABB0AAAQkAAAEHQAABCQAAAQeAAAEI8AABB4AAAQjwAAEHoAABCQAAAQegAAEJAAABB6AAAQkgAAEHoAABCSAAAQegAAEI8AABB6AAAQjwAAEHYAABCQAAAQdgAAEJAAABB6AAAQjgAAEHoAABCOAAAQdAAAEJEAABB0AAAQkQAAEHoAABCPAAAQegAAEI8AABB4AAAQjgAAEHgAABCOAAAQfAAAEJEAABB8AAAQkQAAEH4AABCQAAAQfgAAEJAAABB4AAAQkAAAEHgAABCQAAAQdgAAEJAAABB2AAAQkAAAEHwAABCQAAAQfAAAEJAAABB8AAAQkgAAEHwAABCSAAAQdgAAEJUAABB2AAAQlQAAEHIAABCUAAAQcgAAEJQAABB4AAAQlQAAEHgAABCVAAAQegAAEJQAABB6AAAQlAAAEIAAABCRAAAQgAAAEJEAABCCAAAQkwAAEIIAABCTAAAQgQAAEJYAABCBAAAQlgAAEH4AABCXAAAQfgAAEJcAABB6AAAQlgAAEHoAABCWAAAQdAAAEJcAABB0AAAQlwAAEHQAABCVAAAQdAAAEJUAABByAAAQkgAAEHIAABCSAAAQdAAAEI8AABB0AAAQjwAAEHIAABCRAAAQcgAAEJEAABB0AAAQkwAAEHQAABCTAAAQcgAAEJYAABByAAAQlgAAEHIAABCUAAAQcgAAEJQAABB0AAAQlgAAEHQAABCWAAAQcgAAEJMAABByAAAQkwAAEHYAABCSAAAQdgAAEJIAABB6AAAQkwAAEHoAABCTAAAQdgAAEJIAABB2AAAQkgAAEHIAABCQAAAQcgAAEJAAABByAAAQjQAAEHIAABCNAAAQdAAAEI0AABB0AAAQjQAAEHQAABCPAAAQdAAAEI8AABB6AAAQkgAAEHoAABCSAAAQegAAEJIAABB6AAAQkgAAEHQAABCRAAAQdAAAEJEAABB4AAAQjgAAEHgAABCOAAAQcgAAEI0AABByAAAQjQAAEHYAABCPAAAQdgAAEI8AABB6AAAQkgAAEHoAABCSAAAQdgAAEJAAABB2AAAQkAAAEHYAABCOAAAQdgAAEI4AABByAAAQkAAAEHIAABCQAAAQdAAAEJEAABB0AAAQkQAAEHIAABCQAAAQcgAAEJAAABB0AAAQjgAAEHQAABCOAAAQdgAAEI8AABB2AAAQjwAAEHYAABCSAAAQdgAAEJIAABB8AAAQkwAAEHwAABCTAAAQfAAAEJMAABB8AAAQkwAAEIEAABCTAAAQgQAAEJMAABCCAAAQlgAAEIIAABCWAAAQggAAEJUAABCCAAAQlQAAEIEAABCTAAAQgQAAEJMAABCBAAAQkQAAEIEAABCRAAAQfAAAEJAAABB8AAAQkAAAEHgAABCNAAAQeAAAEI0AABB+AAAQjgAAEH4AABCOAAAQegAAEI8AABB6AAAQjwAAEHoAABCQAAAQegAAEJAAABB8AAAQjQAAEHwAABCNAAAQgAAAEI0AABCAAAAQjQAAEIMAABCPAAAQgwAAEI8AABCCAAAQjQAAEIIAABCNAAAQgwAAEI0AABCDAAAQjQAAEIAAABCPAAAQgAAAEI8AABB+AAAQjwAAEH4AABCPAAAQfgAAEJEAABB+AAAQkQAAEHwAABCRAAAQfAAAEJEAABB6AAAQlAAAEHoAABCUAAAQeAAAEJQAABB4AAAQlAAAEHIAABCUAAAQcgAAEJQAABB0AAAQkQAAEHQAABCRAAAQdAAAEJAAABB0AAAQkAAAEHYAABCTAAAQdgAAEJMAABB6AAAQlgAAEHoAABCWAAAQdgAAEJcAABB2AAAQlwAAEHwAABCWAAAQfAAAEJYAABB8AAAQlQAAEHwAABCVAAAQegAAEJIAABB6AAAQkgAAEHoAABCQAAAQegAAEJAAABB4AAAQjQAAEHgAABCNAAAQcgAAEI0AABByAAAQjQAAEHgAABCQAAAQeAAAEJAAABB4AAAQjQAAEHgAABCNAAAQcgAAEI8AABByAAAQjwAAEHYAABCSAAAQdgAAEJIAABB0AAAQkAAAEHQAABCQAAAQcgAAEI0AABByAAAQjQAAEHIAABCPAAAQcgAAEI8AABB0AAAQjQAAEHQAABCNAAAQdAAAEI4AABB0AAAQjgAAEHYAABCPAAAQdgAAEI8AABB4AAAQjQAAEHgAABCNAAAQdgAAEI4AABB2AAAQjgAAEHYAABCPAAAQdgAAEI8AABB4AAAQkQAAEHgAABCRAAAQdgAAEI4AABB2AAAQjgAAEHwAABCOAAAQfAAAEI4AABB8AAAQkQAAEHwAABCRAAAQdgAAEJMAABB2AAAQkwAAEHIAABCQAAAQcgAAEJAAABB2AAAQkQAAEHYAABCRAAAQdAAAEJMAABB0AAAQkwAAEHYAABCTAAAQdgAAEJMAABB0AAAQlgAAEHQAABCWAAAQegAAEJQAABB6AAAQlAAAEHQAABCVAAAQdAAAEJUAABB6AAAQlwAAEHoAABCXAAAQgAAAEJUAABCAAAAQlQAAEIEAABCVAAAQgQAAEJUAABB8AAAQlgAAEHwAABCWAAAQfAAAEJYAABB8AAAQlgAAEIEAABCVAAAQgQAAEJUAABCDAAAQlwAAEIMAABCXAAAQgQAAEJUAABCBAAAQlQAAEIMAABCVAAAQgwAAEJUAABCBAAAQlwAAEIEAABCXAAAQgwAAEJUAABCDAAAQlQAAEIIAABCTAAAQggAAEJMAABCBAAAQlAAAEIEAABCUAAAQgQAAEJEAABCBAAAQkQAAEIMAABCPAAAQgwAAEI8AABCCAAAQjgAAEIIAABCOAAAQggAAEI0AABCCAAAQjQAAEIIAABCNAAAQggAAEI0AABCAAAAQjgAAEIAAABCOAAAQegAAEI4AABB6AAAQjgAAEHYAABCNAAAQdgAAEI0AABB8AAAQjQAAEHwAABCNAAAQeAAAEI8AABB4AAAQjwAAEHgAABCOAAAQeAAAEI4AABB4AAAQjwAAEHgAABCPAAAQegAAEJAAABB6AAAQkAAAEIAAABCNAAAQgAAAEI0AABCAAAAQjQAAEIAAABCNAAAQgQAAEJAAABCBAAAQkAAAEIEAABCOAAAQgQAAEI4AABCBAAAQjgAAEIEAABCOAAAQfAAAEJAAABB8AAAQkAAAEHgAABCTAAAQeAAAEJMAABB+AAAQlQAAEH4AABCVAAAQegAAEJIAABB6AAAQkgAAEHQAABCPAAAQdAAAEI8AABB0AAAQkQAAEHQAABCRAAAQdAAAEJEAABB0AAAQkQAAEHgAABCOAAAQeAAAEI4AABB0AAAQjwAAEHQAABCPAAAQcgAAEJEAABByAAAQkQAAEHQAABCOAAAQdAAAEI4AABB2AAAQkQAAEHYAABCRAAAQeAAAEJQAABB4AAAQlAAAEHoAABCSAAAQegAAEJIAABB8AAAQjwAAEHwAABCPAAAQegAAEJAAABB6AAAQkAAAEH4AABCOAAAQfgAAEI4AABB6AAAQkQAAEHoAABCRAAAQgAAAEJAAABCAAAAQkAAAEIMAABCPAAAQgwAAEI8AABCBAAAQjQAAEIEAABCNAAAQfAAAEI8AABB8AAAQjwAAEHgAABCOAAAQeAAAEI4AABB8AAAQkQAAEHwAABCRAAAQgQAAEJMAABCBAAAQkwAAEH4AABCVAAAQfgAAEJUAABCAAAAQlwAAEIAAABCXAAAQgwAAEJcAABCDAAAQlwAAEIEAABCWAAAQgQAAEJYAABB+AAAQlwAAEH4AABCXAAAQeAAAEJcAABB4AAAQlwAAEHgAABCWAAAQeAAAEJYAABB2AAAQkwAAEHYAABCTAAAQegAAEJMAABB6AAAQkwAAEHYAABCWAAAQdgAAEJYAABB0AAAQkwAAEHQAABCTAAAQdgAAEJEAABB2AAAQkQAAEHgAABCQAAAQeAAAEJAAABB2AAAQkQAAEHYAABCRAAAQdAAAEJIAABB0AAAQkgAAEHQAABCQAAAQdAAAEJAAABB4AAAQkQAAEHgAABCRAAAQfgAAEJIAABB+AAAQkgAAEH4AABCQAAAQfgAAEJAAABB4AAAQkgAAEHgAABCSAAAQeAAAEJQAABB4AAAQlAAAEHwAABCXAAAQfAAAEJcAABB2AAAQlQAAEHYAABCVAAAQdAAAEJQAABB0AAAQlAAAEHYAABCVAAAQdgAAEJUAABB2AAAQkwAAEHYAABCTAAAQfAAAEJUAABB8AAAQlQAAEHYAABCVAAAQdgAAEJUAABB6AAAQkgAAEHoAABCSAAAQfgAAEI8AABB+AAAQjwAAEABSwi2ARABEA0QlQsQ9b6eZBCl2I3lARDo4YLXARCQhPJ/EMj/67oBENTc7L0BEKHWteQBELbBhEYQ2/z1xAEQ8aWjgQEQpZmVaBDgsveaARDYopp6ENm5q6kBEPD62bQBELv43poBEJTD59YBEOG9wqQBEOOykwoQjonBmAEQ5+O/tAEQuOj55AEQsMXcYBC4l9lJELvm8JwBELDum2MQsIzzfRCs++AlEK6wxUMQ1MSEQRCdo/ChARDu283bARDl2ftsEIGPjaIBEMjjuaIBEIW3pF8Qj4fwfBDP05e7ARD+o+pcENP316YBEIyfnWYQ2uqbYxCH7Yy9ARDEv8KbARDp2aovEMy912UQ5ZrinwEQ4ISj3wEQlIS9hgEQ6YfkRxCG19lEEJSCgUYQ7oa2fRDA1KmgARDk8ouaAhCnvrW/ARCCwsVBENS4lEMQ0qT91gEQ1rqQahCToa6bARDM5vRHEID0+KgBEPP2tMQBEJfgxcYBEKSAwEMQ/ti3fRDVhZtPEO61+9sBELThwUwQqJCinQEQ0JCbnwEQos+/2QEQ+abYKRDSs8qFARCg/OP9ARCTpePdARCB2bOfARCJnuq6ARDl3JwqEK6z0b0BELnb9XoQwdaluQEQ57Dl3QEQpoGtuAEQv4Ge3gEQ/L6aYxDF97QlEPmdyd0BEJKpgbsBEKPokEgQ2fPIuAEQ7seomAEQ2+zpowEQ84iXgQEQ596RtwEQ64mbtgEQ6J2xfxC24dhoELLIgksQ07H0LBDj2tGjARDE3olIEKqB5IMBEJ7b96gBEN2GnrkBELml2acBENzD5TAQj//ApgEQ1ubTfBD71YqiARDmoo/1ARCJ2rSDARCa5eOIARDc993dARD9gcdlELLJqSwQs/SUYxCHvazcARD6q61/ELuNuIYBEN+WlaQBEPqs0oEBEK3jvoQBENO2wmUQvMWNZBCYttj2ARD+jO2mARCEhKtPELuhuXgQjJuR2QEQ5aWZuwEQ8bmuTBDgpLC7ARC4ub2PARCTtd6IARCa2t9jEKzE6oQBEJaJ8WYQk5PUahDplqL1ARDPvaXZARCz0K99ENzw9v0BEKGI+kUQ9NSYfxCR6PteEOju6aMBEMiqxI0BEL/0qWMQl6DthwEQ0v/qigEQxdXwwgEQya2WggEQ36K9mwEQyMe6SBDLj6GGARDRqqFhEILH5DAQ27DpbBDMutl8EJ/n5k8Ql7SraBDG/chMEPLSxeIBEOSG4bgBEJHyjG8Qm+X8tgEQiZ3QRRCAnvgiELT7liYQ4ILQahC05vteEPKxrH8Q+ujhiAEQi7/hxAEQr82VqQEQw53E+QEQ478MTFAAWwjOBRAAEOAsEJULEISzARCUmQQQpP8GELTlCRDEywwQ1LEPEOSXEhD0/RQQhOQXEJTKGhCksB0QtJYgEMT8IhDU4iUQ5MgoEPSuKxCElS4QlPswEKThMxC0xzYQxK05ENSTPBDk+T4Q9N9BEITGRBCUrEcQpJJKELT4TBDE3k8Q1MRSEOSqVRD0kFgQhPdaEJTdXRCkw2AQtKljEMSPZhDU9WgQ5NtrEPTBbhCEqHEQlI50EKT0dhC02nkQxMB8ENSmfxDkjIIBEPTyhAEQhNmHARCUv4oBEKSljQEQtIuQARDE8ZIBENTXlQEQ5L2YARD0o5sBEISKngEQlPCgARCk1qMBELS8pgEQxKKpARDUiKwBEOTurgEQ9NSxARCEu7QBEJShtwEQpIe6ARC07bwBEMTTvwEQ1LnCARDkn8UBEPSFyAEQhOzKARCU0s0BEKS40AEQtJ7TARDEhNYBENTq2AEQ5NDbARD0tt4BEISd4QEQlIPkARCk6eYBELTP6QEQxLXsARDUm+8BEOSB8gEQ9Of0ARCEzvcBEJS0+gEQpJr9ARC0gIACEMTmggIQ1MyFAhDksogCEPSYiwIQhP+NAhCU5ZACEKTLkwIQtLGWAhDEl5kCENT9mwIQ5OOeAhD0yaECEISwpAIQlJanAhCk/KkCELTirAIQxMivAhDUrrICEOSUtQIQ9Pq3AhCE4boCEJTHvQIQpK3AAhC0k8MCEMT5xQIQ1N/IAhDkxcsCEPSrzgIQhJLRAhCU+NMCEKTe1gIQtMTZAhDEqtwCENSQ3wIQ5PbhAhD03OQCEITD5wIQlKnqAhCkj+0CELT17wIQxNvyAhDUwfUCEOSn+AIQ9I37AhCE9P0CEJTagAMQpMCDAxC0poYDEMSMiQMQ1PKLAxDk2I4DEPS+kQMQhKWUAxCUi5cDEKTxmQMQtNecAxDEvZ8DENSjogMQ5ImlAxD076cDEITWqgMQlLytAxCkorADELSIswMQxO61AxDU1LgDEOS6uwMQ9KC+AxCEh8EDEJTtwwMQpNPGAxC0uckDEMSfzAMQ1IXPAxDk69EDEPTR1AMQhLjXAxCUntoDEKSE3QMQtOrfAxDE0OIDENS25QMQ5JzoAxD0gusDEITp7QMQ5O3wAxCktfMDELSb9gMQxIH5AxDU5/sDEOTN/gMQ9LOBBBCEmoQEEJSAhwQQpOaJBBC0zIwEEMSyjwQQ1JiSBBDk/pQEEPTklwQQhMuaBBCUsZ0EEKSXoAQQtP2iBBDE46UEENTJqAQQ5K+rBBD0la4EEIT8sAQQlOKzBBCkyLYEELSuuQQQxJS8BBDU+r4EEOTgwQQQ9MbEBBCErccEEJSTygQQpPnMBBC0388EEMTF0gQQ1KvVBBDkkdgEEPT32gQQhN7dBBCUxOAEEKSq4wQQtJDmBBDE9ugEEOT36wQQ5MLuBBD0qPEEEISP9AQQlPX2BBCk2/kEELTB/AQQxKf/BBDUjYIFEOTzhAUQ9NmHBRCEwIoFEJSmjQUQpIyQBRC08pIFEMTYlQUQ1L6YBRDkpJsFEPSKngUQhPGgBRCU16MFEKS9pgUQtKOpBRDEiawFENTvrgUQ5NWxBRD0u7QFEISitwUQlIi6BRCk7rwFELTUvwUQxLrCBRDUoMUFEOSGyAUQ9OzKBRCE080FEJS50AUQpJ/TBRC0hdYFEMTr2AUQ1NHbBRDkt94FEPSd4QUQhITkBRCU6uYFEKTQ6QUQtLbsBRDEnO8FENSC8gUQ5Oj0BRD0zvcFEIS1+gUQlJv9BRCkgYAGELTnggYQxM2FBhDUs4gGEOSZiwYQ9P+NBhCE5pAGEJTMkwYQpLKWBhC0mJkGEMT+mwYQ1OSeBhDkyqEGEPSwpAYQhJenBhCU/akGEKTjrAYQtMmvBhDEr7IGENSVtQYQ5Pu3BhD04boGEITIvQYQlK7ABhCklMMGELT6xQYQxODIBhDUxssGEOSszgYQ9JLRBhCE+dMGEJTf1gYQpMXZBhC0q9wGEMSR3wYQ1PfhBhDk3eQGEPTD5wYQhKrqBhCUkO0GEKT27wYQtNzyBhDEwvUGENSo+AYQ5I77BhD09P0GEITbgAcQlMGDBxCkp4YHELSNiQcQxPOLBxDU2Y4HEOS/kQcQ9KWUBxCEjJcHEJTymQcQpNicBxC0vp8HEMSkogcQ1IqlBxDk8KcHEPTWqgcQhL2tBxCUo7AHEKSJswcQtO+1BxDE1bgHENS7uwcQ5KG+BxD0h8EHEITuwwcQlNTGBxCkuskHELSgzAcQxIbPBxDU7NEHEOTS1AcQ9LjXBxCEn9oHEJSF3QcQpOvfBxC00eIHEMS35QcQ1J3oBxDkg+sHEPTp7QcQhNDwBxCUtvMHEKSc9gcQtIL5BxDE6PsHENTO/gcQ5LSBCBD0moQIEISBhwgQlOeJCBCkzYwIELSzjwgQxJmSCBDU/5QIEOTllwgQ9MuaCBCEsp0IEJSYoAgQpP6iCBC05KUIEMTKqAgQ1LCrCBDklq4IEPT8sAgQhOOzCBCUybYIEKSvuQgQtJW8CBDE+74IENThwQgQ5MfECBD0rccIEISUyggQlPrMCBCk4M8IELTG0ggQxKzVCBDUktgIEOT42ggQ9N7dCBCExeAIEJSr4wgQpJHmCBC09+gIEMTd6wgQ1MPuCBDkqfEIEPSP9AgQhPb2CBCU3PkIEKTC/AgQtKj/CBDEjoIJENT0hAkQ5NqHCRD0wIoJEISnjQkQlI2QCRCk85IJELTZlQkQxL+YCRDUpZsJEOSLngkQ9PGgCRCE2KMJEJS+pgkQpKSpCRC0iqwJEMTwrgkQ1NaxCRDkvLQJEPSitwkQhIm6CRCU77wJEKTVvwkQtLvCCRDEocUJENSHyAkQ5O3KCRD0080JEIS60AkQlKDTCRCkhtYJELTs2AkQxNLbCRDUuN4JEOSe4QkQ9ITkCRCE6+YJEJTR6QkQpLfsCRC0ne8JEMSD8gkQ1On0CRDkz/cJEPS1+gkQhJz9CRCUgoAKEKToggoQtM6FChDEtIgKENSaiwoQ5ICOChD05pAKEITNkwoQlLOWChCkmZkKELT/mwoQxOWeChDUy6EKEOSxpAoQ9JenChCE/qkKEJTkrAoQpMqvChC0sLIKEMSWtQoQ1Py3ChDk4roKEPTIvQoQhK/AChCUlcMKEKT7xQoQtOHIChDEx8sKENStzgoQ5JPRChD0+dMKEITg1goQlMbZChCkrNwKELSS3woQxPjhChDU3uQKEOTE5woQ9KrqChCEke0KEJT37woQkN7yChC0w/UKEMSp+AoQ1I/7ChDk9f0KEPTbgAsQhMKDCxCUqIYLEKSOiQsQtPSLCxDE2o4LENTAkQsQ5KaUCxD4v5gLEIymmwsQnIyeCxCs8qALELzYowsQzL6mCxDcpKkLEOyKrAsQ/PCuCxCM17ELEJy9tAsQrKO3CxC8iboLEMzvvAsQ3NW/CxDsu8ILEPyhxQsQjIjICxCc7soLEKzUzQsQvLrQCxDMoNMLENyG1gsQ7OzYCxD80tsLEIy53gsQnJ/hCxCsheQLELzr5gsQzNHpCxDct+wLEOyd7wsQ/IPyCxCM6vQLEJzQ9wsQrLb6CxC8nP0LEMyCgAwQ3OiCDBDszoUMEPy0iAwQjJuLDBCcgY4MEKznkAwQvM2TDBDMs5YMENyZmQwQ7P+bDBD85Z4MEIzMoQwQnLKkDBCsmKcMELz+qQwQzOSsDBDcyq8MEOywsgwQ/Ja1DBCM/bcMEJzjugwQrMm9DBC8r8AMEMyVwwwQ3PvFDBD46cgMEPzHywwQjK7ODBCclNEMEKz60wwQvODWDBDMxtkMENys3AwQ7JLfDBD8+OEMEIzf5AwQnMXnDBCsq+oMELyR7QwQzPfvDBDc3fIMEOzD9QwQ/Kn4DBCMkPsMEJz2/QwQrNyADRC8woMNEMyohg0Q3I6JDRDs9IsNEPzajg0QjMGRDRCcp5QNEKyNlw0QvPOZDRDM2ZwNENy/nw0Q7KWiDRD8i6UNEIzypw0QnNiqDRCsvq0NELyksA0QzIqzDRDc8LUNEOzWuA0Q/Ly7DRCMo74NEJyJwQ0QrO/DDRC81cYNEMy7yQ0Q3KHMDRDsh88NEPzt0Q0QjNTUDRCcutcNEKyg2g0QvIbdDRDM7N8NENzS4g0Q7LjlDRD8nugNEIyF6w0QnOvtDRCs0fANELy38w0QzJ32DRDcg/kNEOzp+w0Q/M/+DRCMtoEOEJychA4QrIKHDhC86IkOEMzOjA4Q3LSPDhDsmpIOEPyAlQ4QjOeXDhCczZoOEKyznQ4QvJmgDhDM/6IOENzlpQ4Q8P6pDhCE5awOEJTLrw4QpLGyDhC0l7UOEMT9tw4Q1OO6DhDkyb0OEPSvwA4QhJbDDhCU/MUOEKTiyA4QvPvMDhDM4c8OENzH0g4Q7K3VDhD8k9gOEIz62g4QnODdDhCsxuAOELys4w4QzJLmDhDc+OgOEOze6w4Q/MTuDhCMq/EOEJyR9A4QrPf2DhC83fkOEMzD/A4Q3Kn/DhDsj4IPEPz1hA8QjNyHDxCcwooPEKyojQ8QvI6QDxDM9JIPENzalQ8Q7MCYDxD8ppsPEIyNng8QnPOgDxCs2aMPELy/pg8QzKWpDxDci6wPEOzxrg8QgIuzDxCU8bUPEKTXuA8QtL27DxDEo74PENSJwQ8Q5O/DDxD01cYPEIS8yQ8QlKLMDxCkiM8PELTu0Q8Q4CxcaABwAIABmQuIAQCQAQGYAYAEoAGAEIwBkwEKAzIuMBMIgAIVgijmvxV2qee/FZLG6b8Vss3pvxXAGem/Ff4n8L8VIEPrvxXH8O2/FfjQ6r8VRyDsvxVfJO6/FYcc9b8Vyv/yvxXaW+y/FRX1678VNvbtvxV0i/e/Fd0P/r8VxzbyvxWHhu6/FUjP+L8VTpr1vxXa4fa/FbG0AcAVtHr3vxV/bAfAFdHT9b8VnfH2vxW0qe2/FRuV8b8Vtp/vvxUWbO+/FRKd/L8VdZgGwBV7r/i/FTskAMAVP/0AwBVRGfi/FZUlAMAVOigAwBXEEQ/AFeZIAcAVxfEFwBXtePa/FUXoNMAVYu73vxUoEiXAFXY2CsAVD2EKwBWoMgrAFS/LCMAVWUEcwBXBEAzAFRIW978VaYP2vxUhJgbAFSW+/b8VtpP2vxVofPG/FbsbAsAVXyD5vxXMZR7AFf7ZA8AVn7jzvxUKB1vAFWidA8AV1Wo9wBWv+wjAFUsKSsAVZFwHwBVmeRDAFQW2CsAVx44hwBXNIh7AFaTpCcAVsVc2wBWvdk7AFTiMGcAVkJMbwBWrERrAFR21A8AVFDw/wBUBhBTAFaM6CcAVULwGwBWNRyTAFR39FMAV1TgswBXKnQTAFdz2O8AV7O5pwBXG+gnAFSSFAMAVjL4lwBXSJi7AFXe4GsAVuqEnwBXJcAzAFQZCTcAVjTEhwBV4NC/AFW51JMAVAFAfwBUvp0zAFYOzMsAV+Y0uwBXKrw3AFc9EGsAVN7MKwBUmfybAFSS2DsAVBtgKwBVSNw/AFatGBcAV5xQNwBUi9AjAFV+KDMAVBkYEwBXeLQrAFckgBsAV66cUwBUjnADAFaBbHcAVnvIjwBWHa1HAFYdFHcAViGMpwBUWbQvAFUgzR8AVG6GBwBX6WbDAFQ4KVcAVxTIpwBWAnl/AFbdah8AVh/YjwBW9Og3AFSBPkcAV3+CfwBWtJR/AFZRqOsAVZ2MgwBVckxHAFY4/DsAVXS0TwBWcr3rAFXCRN8AVrnOGwBWx/oPAFRn6ocAVNhJFwBVVO3jAFWbgasAVh6VRwBXoUX3AFRichMAV52wrwBWmIWbAFdG9HcAVj4Y7wBXDUh7AFSftgMAVvYsFwBVqwUvAFSWFisAVxKYVwBVvIWfAFfWWGMAVaMcYwBUhaqLAFXQICMAVuuNpwBULyT3AFcfPzsAV9sJ8wBU4bTjAFee9bsAVuQmKwBXBCDLAFfSgQMAV4E1IwBUEtZ3AFfweisAVtHJCwBVvNCDAFfPOH8AV3XFcwBXaTnDAFUMwdcAV9C5IwBWlikTAFVy2PMAVpLhTwBW63XXAFe7TSsAVSxU5wBX1UonAFRnhCMEVRoJRwBUODUHAFfopS8AVJ0xYwBVs14XAFbunV8AV5p0ywBX1AzbAFd+CVMAVsyjawBX8wJDAFUDvRsAVnB5LwBVSp1bAFbZMNMAVgBg0wBUyt0DAFe4ua8AVFkyiwBUeSwzAFa/UNcAVoH6GwBUktzjAFVEP2MAVZWAgwBVe/Q/AFVs6IMAVxcE8wBVE2zbAFeo8JMAV4sExwBVtD8DAFRwRh8AVarc+wBWT0o7AFfsPOsAVpgghwBWzK57AFe5PHcAVDdsTwBXBWxbAFTrfFsAV2cEawBXntiXAFTmUHsAV6MMMwBVvTb/AFZJnL8AVMpwkwBWuQ5rAFcxdJMAVZTOUwBXgbFjAFXPCPcAVy2zIwBWNa4rAFe84UcAVeNMmwBQbCIMBEAIQkQsQgAIQguY1EL+LSRDwpgoQ+OQHEIW6SRDv1lMQ1albEMjdexDm1wYQot8bEOvTIBCE2EgQv5EiEJy8ZxCmmz4Qyo4NEJCVKRD7kHAQ5IohELPDFRDXtxwQkeBQEKrgVxCdtDwQ46sbEO6iQhDR408Q77cbEP6bZxCTkjkQxMI/EJOmeRC7iSAQurA+EMXjKRCYkCEQ5ItPEKqoARDTzxcQh5tJEIORFRCwuUgQ5YoWENS4FhCPtSUQ/cgZEOvDbRCTtDEQkKhGENeOXBDikmoQj/BZELyZYRD+3iUQ3+FgEJjBVBDMj3IQ4JRaEOnYMxCL0XAQ55Y7EI3JSxCJ2REQk797EMzLOhD45QkQ1+MNEPq3bxCTkB0Q195OEMHSExCo9DYQpJZNEIaAWRDAinYQzasQEKKJBBCM7gYQkcFPEP6LDBCc2DYQtIZmEJ/kLhDmmSwQmpsvEOf+XRDUigUQo51EEKTPGBDuiDYQu7UIEPXgARDp7FoQk+4IEOCdYBCvsDQQ2KIJEILUBBDhqgkQrotjEIv6ThDeuVwQucYhEPWBIhDjuXYQ0KFwENHjQBDp5RAQ6P5GENHHXBCSxC4Q6uopENKhNRDyjkoQntRcEPOmQxDb2jAQhvMMEPvIUhDZtjsQl8pMEOzwOhDxn2UQu+BvEJKTRRClt3oQ88NkENKZehwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFow6L+gLCbmf4TlAGTAQoDMi4wEwj/ARXDQOO/FT9B5b8VTAbmvxXoYOW/FRB56L8VUIvmvxXfyue/FSe66r8VGZjlvxXztOq/FWVX7L8VUyrpvxVexei/Faw56r8Vq+TnvxVfjfG/FUKDAcAV7ZHqvxXnZu2/FUGK+78VcpzxvxXzX+y/FUVx8b8VElPpvxXFxeu/FX1z7b8VfGbwvxWP0QLAFUKTB8AVafD1vxXixum/FYy98r8VSan5vxVm7A3AFdh8A8AVLRXvvxWaiOy/FYW/878V/Cv9vxVKrfy/FaTyCcAVPnkDwBUK4/6/FfEUC8AVCZnuvxW3D/u/FTDxCsAV4W39vxVwXvO/FQANEsAVWu4CwBX2SwfAFYQn778V+j35vxXtkwvAFcQ6A8AVeaYPwBXfUwjAFZD/CMAVeNAFwBWh+0jAFc0DHsAVF/jwvxXOfg/AFT+1FcAVuR8swBWeDAHAFQKII8AV1JFAwBWHODzAFY3sCMAVTUz9vxWqZw/AFap69r8VMmkEwBUkjwnAFd5mAsAVlf4uwBUbIC3AFTxtA8AVmk4mwBWXVhLAFdudJ8AV2XUNwBUWGiDAFYbSEMAVVnsRwBW7xCnAFX9KQMAV1CsIwBXdZADAFT34JcAVKIf/vxUFHQ7AFea6DcAVascYwBXO1xLAFbbBCMAVRU32vxUpBRrAFbNeOMAVsisbwBXdLFDAFcfuCsAVENwIwBVnOQPAFdkRAsAVN7kKwBVFDPu/FUr6NcAV6UMMwBXTFQfAFYK8DsAVkfZPwBXsOi/AFYh8CcAVA89mwBWEkhHAFatTCcAVf1wbwBW5hh/AFR0IY8AV6JBTwBUzQyPAFdudbMAVeusrwBUl//e/FRatJ8AVa38VwBWtGiDAFcknMcAVlrhgwBUclTzAFUnJBcAVs8ZRwBXhsprAFU1cesAVy9xiwBUjU47AFQdOPcAVPSRpwBUQoLXAFYU/OsAV+ShUwBVtnCXAFcCs6MAVaeBPwBVtoDTAFU1XHcAVxyEvwBWbTyDAFXX1FMAVCXodwBVNjGDAFTBxBsAVOpVVwBWjunjAFcDYYsAV/jk4wBW0cxHAFbCAQ8AVoF2YwBWm9CzAFTQItMAV9eIZwBXq+SzAFcAaMMAVd0wWwBUM6iXAFeGFI8AVQ8QhwBUc2hPAFTn8V8AVl1WpwBW5BoDAFcaMjMAVHbU1wBUDqkTAFS/DUMAVzMWzwBVSxXjAFZV4BMAVStarwBXuSjjAFRhwpMAVFqnhwBUKJzDAFeYDksAVzeUfwBVIm1jAFdALHcAV8PZQwBVJnUPAFbwoysAVPqdBwBXj+jPAFTPDesAVEcSNwBUubGDAFdPtNcAVKITGwBV966DAFdLUVcAVXZ+ywBUNA8TAFb1cjcAVu+BvwBU+v0zAFSV6MMAVW4hKwBWCOwvAFZe2EsAV+u87wBWu/Q7AFdr+TMAVIS03wBXX5TbAFZIEE8AVowlowBWvdKjAFemAf8AVj3gVwBXr/UrAFV1UCMAVIIoNwBVpVyHAFcNPN8AVCfbWwBVAylnAFQdxucAVZIY3wBVvTQzAFf5MNcAV5i51wBWgnYbAFVAE08AV4/M6wBWNvBHAFdLHGcAVoWk4wBX2CJrAFaspLcAVG6EowBXEOWrAFXC4ecAV2sSBwBXgo8rAFbVjQ8AVt3czwBVmF7XAFTkOlcAVFrSNwBVHqGHAFQS5tsAVCVYCwBQbCIMBEAMQjwsQ/wEQztUYENe7DxDfoy0Q0JtGEMi0bhCt6FEQxuRdEJ+RQRDTvkwQv4EOEJnkNxC35RoQy70hENuwBRDYqDoQg/dMEMfTAhDntVkQ/ulGEPnEPRCmgEgQ9f4VEPrlOBDYmBoQv+kBEO7XahDI+yEQocM7EJ7QLBCNxjIQ2MZXEPnmehCkyQIQ84V4EK6nahDusg4QivdxELDGCBCAwl4QguMLEMm1ThCX2GMQvpZ6EJb3XxDEpjMQuOxPEOXlFxCcoRkQipgzENGPXRC34wMQxe0dEKm5NxCl2mkQ1MF2EIO6KBD3kGMQ7r5FEL+mLxCa60EQvrx5EJapQBDFj1kQhdoNEN+VdxDfkgwQk5ptEK+RbhCFonMQ+KxhEM30XhDUuXMQoIU1EPmGERCYigIQqohTENmuZBCRym4Q1YhXEPLoEhC7qCwQ1tlFEOL6WxCz7FcQvaI8EKDCXRCP+wkQ+M0ZEPOPSBC0mhcQ3b0IEJDfXxDgzwUQpex4EL+CIhDUnAkQjr9vENLHRBC5kHQQ/eFQEI6/PhDipFcQjoNhEJjIJhCClh4Qso9rEOnlMhDKqG4Qv94iEIadLRDa2iYQlNJWELqCKRDsn3gQtbkZEIiNURCxw0kQirg9EILAaRC5vWAQj5tTEM6aehCb2zsQq+dBEK2qaxCzi1oQ67NsEPAKHCAAMP8BOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWis8KCFhZbWggGUAZMBCgMyLjATCIACFSij3L8VafjdvxV6i+C/FU7K378VSAvfvxUwHeG/Fa6H4b8Vbg7hvxWJOuS/FTui6L8VR6bgvxWtte6/Fe/t478VkBnmvxUk6+G/FUyN5r8Vgp4AwBWo2uW/FdR6678VvdTzvxXNmOy/FTzh4r8VDQnyvxWQi/a/FXe7978VFrvrvxUz++a/FQxO9b8VzCr0vxWkzvC/FR9m/b8VAe7pvxXdKATAFaP3AMAVqHEDwBX9xwHAFR1X6L8V6SoEwBUEK/y/Fb1DA8AVQ2kBwBXZFPm/FZXs7L8Vou3yvxVPKvK/FZ0x878VmTIDwBUMHyLAFfnOGcAVD28BwBVCORLAFc82+r8VWgj8vxVB3gHAFQGqA8AVZ2P8vxX7phDAFdztAMAVMP/4vxUe+v2/Fd0nBsAV15MDwBWbPAnAFckj678V+2wMwBVXiQnAFVxfBMAVluIEwBXVIBPAFZQjEsAVAjFYwBWYLAXAFeASHMAVZSgpwBVdg/6/FXFXBcAVWA4cwBVHohDAFU14CsAVUN4IwBUbTA/AFWCgL8AVsfICwBXt/gnAFa3O+78V8AcDwBUVOfa/FdjaCsAVnpsgwBXSePO/FfKf+78VimU8wBXV8A3AFblxG8AVxTEIwBWjnyLAFRfWQ8AVBuImwBVg6R3AFfsoBMAVsicUwBXEMyTAFanKGsAVglMSwBUJWBDAFdFzDMAVIIoOwBWpjA/AFeBWEsAVgLM3wBX2fxjAFZmDAMAVUMUFwBVmlRTAFfZQH8AVEacswBVT/A7AFR2XAcAVpXYbwBWreAbAFVNk/78VXqAXwBVw5xjAFWY2FsAV7pAMwBW7EhDAFV+xKcAV2JoHwBVOc0LAFYOqL8AVecfNwBV5kzPAFWwaQ8AVCo0/wBUzQoPAFbudHcAVjMpLwBV8jc7AFaQhOsAVld8ewBVkxhPAFbMlmsAVFyytwBUGXT3AFcqjH8AVi38rwBWw80rAFUoUP8AV4GkuwBWT5BjAFfPwCcAV2/lmwBXDHRvAFWN9xMAVmkQiwBX5QTfAFcMSWsAVwY5vwBXqQhnAFVOZEcAVAZ4KwBUSYinAFQq3f8AVAKxhwBUz54rAFc3NPsAVGGAFwBXllnnAFVU1M8AV7aQQwBV+NSDAFW+EBMAVn8McwBVsa6jAFbVLOcAVGE5swBUWLknAFaX0bcAVSvScwBVx3l3AFVVn/r8V8dYKwBUlVf6/FX4WbMAVcyBRwBXiwVvAFdZOgcAVq/A1wBW460fAFX6SFcAVxukNwBU/pGLAFT5EBcEVIGRvwBWl0lvAFVXnxsAVP6wAwRUcW5PAFUV2X8AVSH0JwBVo4xbAFaN9P8AVzgsbwBWB1yvAFfk3XMAVevBBwBU1gorAFfg+Z8AV98QXwBU9w1PAFRJIFsAVs6l1wBVsTT/AFXr2HcAVrOmFwBXE5RXAFfHFGsAVQhk0wBXCJDnAFZsCU8AV+ixrwBWGlSLAFTwbJMAVc0pewBUvpBXAFeGCJMAVNrEawBWhCyDAFQUgGcAVtNtkwBVup2LAFe0YNsAVSB6WwBUJXxTAFUeaIcAVD1tHwBXYdCTAFSOmzMAV9oNVwBU3YAvAFQYkdMAV4DIJwBUp5TPAFXUkI8AVDFJ/wBWZIHPAFVKKI8AV/Z9iwBVGYHLAFexlUMAVBsabwBXs4HbAFX/WkcAVuqdJwBUeR2bAFatLSMAUGwiDARAAEI0LEIACEPXrcRDhux0Q4dBYEKa3dhCb/BUQ9K5fEIPXLRDy93IQ1ewQELufZBDHuVAQ25sJEInkbRDarQ0Q2O4/EKrEdBCYtTQQ0alaEI2LehCb0xMQkoEVEIr/WxCR+RcQrbZNELSkXhDJ9k4Qn75RELy+JhCH4hQQz5tKELe+UxCBxRsQ89E5ELOGKxDS9m4Qi7k1EM2YERCV7zQQzYFoEN2EXxCnkmQQ09hsEKnVAhCRqBYQm+tFEOOpbRDw5yoQ+/QbEOPVcRCn2gQQtYcdEI3bdBCb7x4Q5c8yEJPlZBC4qyYQ0KdDEJXVMxDQ9nMQ8q44EMP7PhCj9gwQ29EWENfbehDl018Q/ggQxbRyEO2aBRDZlVgQoeNaELs7EM2MAxCdyXMQk/gQEN/EERDYuxsQvrpFEPiDBxDUkAcQ1rIHEI2DbRCq8moQipMeEI3MWBCR+wcQ6KcgENDbVRC4glUQ3PtaEPq5VhCVtxcQ3dZ1EOnwBxC80WMQ8NEYEPCvCRDrsxkQgd4CENSuNRDW+xsQl6dYEIScJxCWuXcQ5Y4sEMuWChChvmcQ6o08EPKhdBCUjR8Q8fo5EIvqYhCMklAQx9AMEOjuVRDahCoQ6YQtEKCrGBDy5RwQhfIzELT3NxD/gm4Q9MA+EJStbxDH2mkQ8PJmEIG6ZRDJ/moQ0rR7HCAAMIACOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWji6YLM4s3K64IBlAGTAQoDMi4wEwiAAhW/I9q/FfxK278VEb7bvxXBk+W/FYfv4r8VkkfevxXqhd+/FQZQ6L8VhOXnvxVMeOS/FUbo5L8VRtTevxV9hN6/FRwv8r8VKhTivxU9U+u/FTJP9r8VKf3ovxXswey/FV4G578VKODsvxXrDeW/Fbcw678VDa3lvxVfN9+/FayH5b8V33z1vxXSIvu/FXUoAsAVvsb2vxVtleO/FTky8L8VoeD+vxVByxnAFTc6CMAVDigGwBXslQnAFXhd+78VHqMXwBWMrAHAFYEE678VY7f2vxVLWvK/FcXxGcAVyXrovxWFG/y/FdZ2AMAV0pnzvxVh8+2/FaDe4b8VshbivxXpjwLAFYm69L8VlJsDwBV8o/W/FXmV/r8VHEspwBUxwA/AFQ7aCMAVz84DwBXt4vy/Fc85AsAVBCvnvxXlefC/FWG18b8Vjz8CwBULkAjAFfRMNMAVQ/UewBUAlxPAFUIrGMAVrKcXwBWPrQ7AFUxhKMAVC7gXwBWunRHAFSfzAMAVN1BLwBVfOCjAFdolIsAVjVgCwBUUdve/Fbls+r8VJMwGwBXqfPq/FelCB8AVNgIGwBUtFy/AFX3DJ8AVId0ewBVv1SPAFWqPBMAV4uMFwBUWPCnAFT6vEsAVNscxwBUNlw3AFWun8L8Vy38bwBXbU+i/FeiGCcAVSEgJwBUBNvW/FZ/mDMAVWXIOwBWaQALAFUzS+b8V6RglwBU31xDAFZx3978Vh5cbwBVz+gTAFbukAsAVT74pwBXdDFfAFXIzHcAV9/APwBVfSQ7AFXWcEsAVddA3wBV2BxzAFcV0AMAVvi4XwBW0fgLAFRQvEcAV7G8IwBWguOu/FR8ECcAVEpyLwBVcdUjAFavcvsAVn/YJwBWZhibAFSWUEcAVEE0QwBW5D3/AFeBVScAVVjw3wBXYJSLAFQ2bdcAVxLUxwBVyCKvAFQyEtsAV6T1JwBVTHF3AFaK1mMAVYeAqwBXnrVrAFe/xMMAVnvEbwBWgnkXAFdofqsAVgggSwBWFFRbAFXE0YMAVzHFbwBVbKmTAFbHIKsAVBtKBwBUQaE7AFU+kfsAVjRc5wBVQESHAFWKrNMAVanotwBVJkWHAFU2sgcAVt10gwBWUZoDAFcnZ+r8VZCIrwBUkUBzAFdgAEsAVslcmwBWpY5HAFbhNY8AVcwwxwBVXFEzAFce3UMAV/S9iwBUmumTAFbHJncAVP38wwBVhvwvAFVghUMAVi7xBwBU/XQvAFSw3KsAVxFBXwBWzRxXAFRZ6E8AV/RKDwBXzfDbAFYXoqcAVDUBzwBW/lkDAFfE8PcAVbtduwBWzCTvAFRHIYcAVHEsbwBWGZUTAFd6JNsAVNjE/wBXFTXfAFUDaLMAVkiIswBU+OUfAFZyVVcAV4AcRwBXXIS7AFWN0MMAVaQYVwBXlyCDAFYWYKsAVNwMowBUFcnnAFcnFasAV8aB6wBXWoBDAFWxLIcAVhgpvwBWgaTXAFftvDcAV9SuWwBWRHw7AFV+xJMAVlsNDwBWNuGjAFTUNjcAVvKZbwBWSkIrAFURMHcAV7NsmwBWDR1nAFSnKUMAVB6xiwBUGMB/AFY2hRMAV4MVWwBXJsY/AFWFcJMAVlpkzwBXrHznAFez4sMAVS9QrwBVrDlrAFdySIcAVcvAYwBVdTYTAFbfpcMAVtRiUwBUIxVTAFfC4hsAVuNRJwBViqDDAFBsIgwEQABCQCxCAAhDDoFIQ1PsoELWKXBChziAQ3okqEMyRBxDArXcQxs0OEOitYxCWjl4QhqxEEIWuHBDYxiwQsJAtEPXZORDMqEcQ6NdkEPWqXxCo+WgQz7RbEKutGBClyXYQ5qsnEIudIxCvARC9mRIQpOZ3EKvrVhC26S4Qh8kCELWmVxDk1XkQy4BAEPCuIRCO+10QsYcDELLOPRD412gQvo1REIbXHhCExgMQ54sBEJGoaxDewFoQy8IxEObuMxCgyUYQ2OobEN7kKxDnmWkQ17RMENGKTBC4ry8QwaJYEPiwJRDOmDIQiPUpEJjXQBCor1QQjtFjEPK5QRC6ywwQ6pdgEI7seRCzyA0QvaAnEMTkBRCdvAEQ9NsiEOy3ehCG1g8QrJFLELKfTRDc7ncQofQ3EMSaTxCWoUoQuLApEOzpWhCok0IQ8rc5ENKoSBCL2nUQicsUEJvoORDGwkAQzPMLEJ+IFxC/xzMQw4IMEJCzIxCI6z8Qx58IEIfmVBDm+BoQxdkEEIPdFRDl4TYQos5kELX6GRDlyx0QwPhlEKW2HhCumVcQ6asbEJH6LBCqjHUQi7UKEPbhBxCoo3UQ6NQkEKDIehCIpQwQ6e8vEPDNNxDLgE8Q8vwvEN3fThCSizQQ6Ik6ENudVBDygVEQork9EIjKPhCh30QQ1rVaEJ72bxCb+HscIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaOnexf6fhsLPNpQBkwEKAzIuMBMIgAIVhPPSvxV4mNO/Ff97078VrPXTvxUcZ9W/FYNQ178VN93UvxWY/9O/FVuY378Vz/vavxX5edu/FQpm278Vi8jYvxXtR+m/FaH93r8V5XDWvxXSKt6/FTP6AsAVVILrvxWOLdy/FZBM/b8ViRzfvxXMP9+/FR7P678V2Q3kvxVlN9u/Fa2y2r8VmEnwvxVTMPe/FewI6r8VejvivxV5oNq/Fc6U578V6N3svxUuQ+W/FbodKsAVyKQZwBU0iP6/FbQZ7b8VcVfivxWiyfG/FQziDMAVVfQIwBXr2eK/FXXe7L8VAqDwvxUBDea/FTPh8r8VZUH0vxWTbfS/FUnJAMAVM6jkvxWd5um/FQ913L8VvCvpvxVXnAbAFTT48r8VCuoSwBXB4/e/FZ+u9b8V9ZAGwBUzBOy/Ffou6b8VcO3bvxWtoQ/AFUt1AMAVf+YVwBU3XxrAFWMkAMAVwYlBwBVCWuq/FUwbRMAVDrSYwBUlrx/AFbbuOsAViVgDwBVIrkvAFd2H9L8VtzT9vxUM/vy/FZSODMAVKWsTwBVkzBTAFbDuIcAViOoXwBVKKCzAFc8fFsAVEC4XwBWEIRfAFT/sC8AVdmQDwBV4ewvAFbMPAcAVU0L2vxVJ7/m/FVFvAMAVEPD+vxVqCgLAFYJTGcAVW+X0vxVt6/S/FWk5HcAV7egWwBUsTv+/FTK38L8VcIP6vxUXUvW/FfM4AsAVR7/uvxV4mD/AFVhsGcAVGdgGwBXP/gzAFXWLOsAVSogqwBV5KSXAFWg9H8AVwckQwBWCuwzAFdY4C8AVX8YLwBXWPhTAFcyRE8AVxZMBwBUpqe6/FQlmEMAVUgYXwBVEL96/FVJqc8AVcpBbwBWShHXAFQUj/sAV+SwTwBVeoh3AFehWHsAVxstHwBUWsMHAFYCLT8AVk04bwBWOy1XAFYrBksAVeJgIwBXAmALAFQwnhsAVtwmywBU6nrHAFS946cAVg+EmwBUvuH7AFXKVp8AV5LxxwBXk/EzAFZ4nBsAVs5hPwBX6QJ3AFYrOF8AV3jS7wBV43SfAFbG3GMAVOeuHwBVLmyPAFYZwcMAVrMEOwBUO7VXAFbmRkcAVlyOQwBVOXmfAFUAdK8AVnD9AwBXkw8PAFbaBhcAV8qljwBXdClPAFVAWdMAVV5kiwBXNmx7AFWqAL8AVupRdwBX6wT/AFT2Pd8AVzYU/wBWBJGTAFSIVK8AVR6BWwBVybgzAFXYvYsAVxSUGwBVHyyPAFTi9JMAVEJgOwBUMySvAFU6zJ8AVNIAQwBXsMEvAFQ+FJsAVIGVgwBWjRB3AFRa2JsAVfc8pwBUjWwvAFRCsAsAVOiUAwBX8WHTAFRSYO8AVK017wBV2fjLAFR0nI8AVBOcGwBW/RlvAFXvhVcAViVv4vxXUOUnAFUAgOsAVbyo+wBXzvljAFVNVO8AVzx4EwBXBnsPAFQq79L8VxGpLwBX2ToLAFU42iMAVh4MZwBVngqvAFcsGeMAVkmyAwBX96czAFbjKgcAVZv50wBVktyrAFYsgMMAVAZzXwBUJyXLAFdHpUMAVJ/AuwBVxczTAFWdtYsAVXCVBwBXaUyLAFWXMHcAVuWQ0wBWyZETAFcaDbsAV3VQawBXwzj3AFZlkOMAVhLCpwBXNkBzAFWohBcAVCXQAwBU0hsTAFYaNKsAVp0I5wBXq7mfAFVWeOcAVBbHhvxQbCIMBEAIQjQsQgAIQ7O0MEMy/BxDuwUQQuLVzEPbbDRCnzUMQj4BSEJ7ccBCpkB0QtuRhELTlcRD0gkgQ+IMiEJnychDquHAQmOV3EJiTLxDVjwYQnLgYEK/xTxC8ny4Q+7hGELmeMxC1+WcQmaluEKGSHhCb70sQqNxfEPnfLBD57VcQjpBQEK6OeBD/qA8Qru8FEIjcQxCY6A8Q9btEENKAeBC8o1IQkPI5EIqORRCFuwMQxsIbEM7/LRCimDUQ18UYENfqAxD/0SIQx59hEKLCRRDTnFUQsMpgEL7nQRCitjQQ4o5HEKbHNRDFnCgQzJ05ELayLxDk4mkQxPoMEJavWBC/jHEQ9Y52EO1/EJ7DNhC9/l8Q8ugvEJ7UYhCrzUoQwbIVEKLfZxC+gBAQk7VxEI7eBhCp1yAQuME0EISTZRCthCEQ4Od6EOigVBCI8U0Q6uJZEOWJFBDHymsQnOdmELOUUhDmz3kQjrgMEOKnKhD2xxMQxpQtEIvQRRDxw2AQk4M0ENLdRxDntSAQ06gJEJG4GhDoiy8Qv71DEKzZaRC/8wkQu7BSEN/zShDLj0MQ6+0xEO3RURDmsiIQvp9HEN2vMRD+oUAQnvomEKebQRDIrzMQzvMqEOrdCxCJ6j8Q8ZkoEI+BMxDJrjsQ45NoEKi3PxC9l0YQuPxPEPGoWxDY4nUQ0od7HCAAMIACOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWj5i63S1K6+/d8BlAGTAQoDMi4wEwiAAhXtt9a/FUOx2r8V9QbXvxUXv9q/FXuz3L8V3IDXvxWeDti/FR+P3r8VtM3jvxVeGd2/Fc944b8V693svxUGc9m/FcG54r8Vh8DavxUH7O6/FeyM8L8VG7bqvxUDr+m/FUY/478VdjvjvxVANe6/FWCk478VR/71vxUSa/O/FTmw4r8VOBHfvxU1QQDAFddeBsAVXbMAwBWPXvC/FRFK778VVFD9vxVeSgDAFf6a878VLen8vxWMfvG/FW6A778VaEX4vxWlxOS/FXzqA8AVaC30vxUVSee/FTBr/b8V53r+vxWD4QfAFXBe7b8VHkkZwBXZq/u/FaGw/78Vznv8vxVM7Pe/FWnAA8AVLi76vxWLOv2/FR5LIcAVmd8JwBUzdBnAFbqZE8AV3lQBwBWW6AfAFZEnBMAVUMn9vxWpUu+/FScSAsAVouoPwBVp6Q/AFRTpCMAVBVQBwBUheEPAFeBQPsAVwS0GwBV41AnAFZ31C8AV72kSwBU1BjnAFavJD8AVkk4FwBUQ4QnAFfHL778VTO0DwBXf+CXAFUZOC8AVOKIHwBXn0gTAFXEpDcAVD3QEwBX+PgLAFVGTCMAV1M0JwBX+kATAFWLmVMAVZ4sSwBVVDjDAFYQYA8AVSsBHwBVgchrAFZQeFcAV1WcPwBXHgAPAFbg5McAVa9AHwBVavAzAFZROEsAVdhwxwBXtjgbAFVBqXcAVE+4qwBVXYR7AFYO9AcAVSdElwBUtcCTAFV5XUcAVbiwOwBUTODrAFYhoLMAVbOUrwBXSJRrAFcpPGMAVBdQdwBVf8RDAFaASCsAVE7kKwBW6LwTAFSVuEMAVODUPwBXH1A7AFY1m9r8VSgGIwBXC/qnAFeNnIsAVRgxwwBXjPJfAFUagwMAVdgA3wBUYAg7AFXQBSsAVOj5jwBWhgQ/AFVceq8AV0QeDwBUucl3AFYmMZsAVRj4+wBXIpU7AFXt3TcAVJkZcwBWQiifAFYbyKsAVSJclwBUte2jAFXqsv8AVQ/SgwBXoWS3AFXmElcAVH5Y8wBUKOynAFSBjE8AVnsmHwBXJ8wnAFShTSsAV26CUwBWmCBrAFRK9kMAVzlIwwBXlgzPAFS4egcAV6jmEwBXOZRDAFaxRQcAVgjoiwBVCZRnAFWqxbcAVkuuAwBWKtSjAFaEhdcAVZCEHwBWc1j/AFYbeGMAVcoQ7wBXluYfAFXU6JsAVI5oFwBX0W1/AFfWbdsAVH1pNwBXrni7AFSS8fsAVNssxwBU19A7AFWe0S8AV8UdPwBWTSUrAFU1jJsAV2pMnwBVu907AFVUpv8AV8jY7wBU/GhzAFcLIg8AVEoVHwBWkH0XAFbP0dsAVIQY4wBUcUCLAFdjSKMAV8voWwBVdGjDAFT8EIsAV+yiEwBXEQIvAFdi1JsAVzBsvwBUtzZ3AFWGWZMAVXMRIwBUSpo/AFbhM8MAVtKVQwBUiiRzAFR0CaMAVqOMlwBWLVXbAFWoHZ8AVjRdtwBUx4rLAFeyJYsAVHr+awBWoaEHAFXLkgsAVsvKAwBVVRHLAFZpmRMAV6952wBWYwj/AFd/MmMAVgMB6wBXy4WXAFWZ2hsAVPsMewBUftY7AFf+oE8AVnXI9wBXFkL/AFc9xeMAVHTVnwBXNnzbAFYFPDMAV/9OIwBV8kiXAFXEsFsAVaqk9wBUIC0LAFb3BncAVd/MjwBW6Mfe/FBsIgwEQARCUCxCAAhDozAIQjtNFEJHnJBDa91AQxcoTENWVNRDtNRDVtXwQxfteEKHpZBC3FBCIoicQg98dEM7KIRDOlmkQtNR7EP27KxCanyUQibBAEJGWExD7xGgQudMIEO7iTxCBzGIQ5NAPEMjsWRDjhnUQru4nEO2qSBD65zAQj91HEN2vJRDmzA4QoN1nEN74NhD04loQ5cgjEOrnchDr+HsQ45wsEKGVLxDL9BQQpJ9jEMrRGxD6z1UQotpcEPS1TRDoq24QxLNtEKrlIBDLizQQs6AfEKCMXhCv5lgQ/a4iEJDQdhCz3mMQjI9XEIDucRC6wUYQjoQ2EN6xPhDWnVkQn7l6EIfSUhDvww4Q3qx0EMGLDxCb1TQQiZw3EI7scBDSkWgQr8E0EOzuBhC8xmoQsPVkEOWaCRDA9gwQhqxnENrKARDTnWgQ0pZsEOaEFBCkjQgQ64pzELywHBCLlXkQ9KwgEPjrCBDuxxsQp8tdEJnwbRC4sCQQ7cgJELqWGhDt4wkQq+0OEKvQNhDqkVkQ3ZcKEMbRThDAxmEQy7JDEOC3BhCg3kQQ8L4hEKWYIhDM6nQQnLktELefZxDx0kwQorcXENf9CxCwphcQ1e9vEK6uQxCJzmgQqdQsENeYFxDvoBIQ0JMxENKxcBDz4zcQ7/5sEJ3BbhDz1l8Q+s1eELCOfBwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFowprvnNee8qnqAZQBkwEKAzIuMBMIgAIVrXzRvxWvs9O/FaOX0b8Vr4fZvxWvPty/FWSY0r8VfZ7TvxV28dq/FQk1378VFX/jvxVn8/C/FdaH1b8VLvPUvxW539O/Fcrx3L8VnK7bvxXLwuS/Fc3x6b8VOZ3kvxXpTf6/FWW+5b8VKV73vxVtgfm/FXAZ5r8VJUzWvxUaReW/FQE3478VBpbkvxXgb+6/Ff4n6r8VT0njvxUZ/N2/FYI44b8VqyHlvxXL2uW/FRt/8r8VeULxvxXUu+2/Feo+5r8VScULwBWfDQjAFdek7L8Viq3vvxUnUgfAFWwsA8AVhooGwBVbYgHAFZfo6L8VIUEFwBVO4/K/FfBA578VbGUIwBXS2+m/FTSh778VxdQCwBUPBAjAFcT8+r8Vq3wKwBUz/wzAFcrL9L8V13/wvxX72ea/FX6l5r8VABHgvxVzY/u/FUWbBsAVo47hvxWmS/u/FbUJBsAV8F/9vxVa/CfAFah1BcAVYAoAwBW3YTjAFVl5GsAVvXcTwBVe0w7AFVPlSMAVQo4MwBWsZhXAFW5zHMAVCvUZwBWb6SLAFRJf8L8VAnT6vxXwDwzAFRmoNsAV4t8IwBW0ZiLAFf9vE8AVOpsGwBVTuS/AFUgxDsAV6m0FwBXuDi3AFY+U/L8VAfLpvxWaVwjAFdQ5BsAVDH4bwBWITiPAFbiW8L8VzAoiwBVKzDLAFXzfG8AVpqAJwBUJdPO/FcvZCcAVhTj8vxUhRAXAFQ67BcAVdA1lwBWxZy/AFbDZA8AVSu0HwBVRph/AFVTgEMAV9kBBwBWd8SDAFUedF8AVITwZwBU+HwLAFVnj878Vvw8EwBVW9wTAFQ47GMAVTvsUwBUUnue/FUJJEcAVAWkdwBWkDVHAFTV4PsAVT3+KwBVB5aDAFdEwS8AV9ccGwBXjJxfAFbv4HcAVeHc3wBVwJADAFScvEcAVDXVHwBUZHjDAFW+0PMAV/hA2wBVBpivAFXvDAMAVmEBLwBXwv8bAFZH9hcAV7aslwBX71mXAFZH7p8AVs6yKwBWf5IzAFQENW8AVkKqFwBU3K4jAFeFqG8AVlM80wBV8Rx/AFfQneMAVltQgwBUvIkTAFRRzcMAVFwF0wBUzPUzAFWPoS8AVEPILwBUjRQDAFUokFsAVk2kPwBVYRjTAFdzUYMAVngYRwRVxsjHAFYUjM8AVQe40wBUa1DHAFZ81Y8AVkhkawBUHpQjAFYyuZcAVWogzwBVTrzfAFX9tDsAV86M7wBVIj1rAFbRbIcAVPs5EwBWNpkbAFRk3l8AVpMWbwBWt/GjAFW4tU8AVCQ81wBWxyi/AFTRQxMAVjsQmwBVBXyzAFWvfRMAVsM5JwBXu4y/AFRolDsAV5Q9BwBVyD2/AFaCmKcAVyLekwBUtsD3AFVuCosAV0DVCwBUR2iLAFYoRH8AVP/86wBWrlxbAFVCqVMAVTdJrwBX5hnTAFWuGT8AVkQUXwBVKWhDAFSgtQcAVdvoMwBUatpjAFXw/d8AVoIjXwBXiEErAFcc/QsAVRENfwBV2MhDAFfXDLMAVFfaawBU0l7HAFZr5FcAVHX45wBUAPoXAFdLxmMAV5S5wwBVb1inAFfM4W8AV1sc7wBWbdKvAFfZWlcAVLoWDwBXqTCzAFVIOHcAVKzUqwBVH7LfAFRj2PsAVeQKKwBVo2pLAFcrIV8AVbfSWwBUin3jAFTtdVcAVcaEQwBQbCIMBEAEQlAsQgAIQs4BbEN6gDxCF/QQQ+c4BENT5YxDk8jwQwbZZEIKsEBDMkxAQs8FpEOryPRDJ6hoQuJRUEMDLPxCHkkEQyud1EPKcOBC6wBMQl6kYEKPPcRDJ5jkQ9OAHEM62LxCy8HgQgZoEEPHObhDU9XYQi69PEJyHMBDX+lQQv+FSEJq8cxCayE0Q6bFBEIStWxDbzwQQzLd6EIz4EBDMyhgQqq1WEIeZGxDf+xUQgNILELyEFxDxrF0Q+LtAEKCJRRC9n3UQ9451ENbdLBDe+ioQnp9pEKqjIhC58ysQsrtqEI2+NxCLmRIQlfkuENahGBC3/zIQqoBvEIfbQxCcgGsQjLl8ENinDRD8wg0Q3LQ6EPLaXhDL4noQy5QGEIPzDhDO9HMQgpoPEPPXHxC3uhEQqfh7EMLvPRDKwxIQpeASEOX/PBCdvGcQoIp8EIGaYRD31QEQhdEMEMPOAxDBnD8QvYwXELqfBRDz7i8Q378YENDeMxCNpggQ5qgCEOWOexCMigQQzsQsENXJTxCrkWoQw6tlEI2gERDkgygQvKwLEI+WeRCA00gQwKEjENnXIhCIhgoQ3swjEI7xIBCJilUQr4Y+EIvMKBDu8jcQ4q0rEP26SBDDzloQlPxmEKPaOBD9tDsQ5PMzEKqsOhCCy2cQwMwQEPrpHhDFhF4Q9rADEMjrexwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFovL+8sdzghdLwAZQBkwEKAzIuMBMIgAIV28XjvxUR6+S/FaX/5L8VZ+zkvxUweOW/FQpE5r8VddTlvxW9kui/FQOp7r8VmsnlvxXRI+y/FUwg7L8VRo32vxVB9OW/FRKC5r8VYF7uvxUtJPi/Fd8D778VRzz1vxX6Cuq/FYQZ9r8Vugf5vxWZse+/FUzgBsAVW2f2vxVNPve/Fe5RAsAV0B7rvxWDYPe/FUNw7b8V78zpvxW2kfK/FZhf9b8VCIwhwBWB+gTAFZCfFMAV1CoAwBWwz/a/FYDEBMAVkZT4vxVRCO6/FQ9g9r8VZD4OwBV3VALAFbWH+b8VdHEHwBVCRQ7AFZzoCMAVJSMJwBUdxPm/FRBCBsAVg9MJwBXxNAnAFUbmCMAVgyUNwBX3Wva/FVi6C8AVloQCwBVZLBDAFQsk/78V3momwBX7Fvy/FdUAAcAVEWz5vxXLNy3AFb8qAsAVAvkEwBXGnCfAFZrpIcAVG9sZwBVonRrAFS+DJ8AVdFspwBUSYhLAFaNiE8AV6doCwBUrufy/FRefEMAVIE0LwBUuBgTAFV0eDsAV6MTwvxWOK2HAFYyZKMAVxUEAwBUG+w7AFVHSU8AVVtAJwBXwqAnAFa+YAcAVWmgUwBXr6AjAFdQqP8AVRIoQwBXIuxfAFTZAEMAVi5ELwBXdvFHAFccjDMAVbocdwBWuHBjAFTQXJcAVDGsRwBW3djnAFZEBLsAVYQlFwBUI/RnAFaqPNcAVNudNwBUrBhLAFX3VD8AV4CMWwBXREgrAFfoUM8AVrk8iwBXB3QzAFd24JcAV82ESwBX0pUTAFUnwAMAVDZEOwBVawyzAFQmTL8AVP2gJwBUiOwbAFUboIsAVyQASwBVE9wLAFRxSjcAVcS42wBWSuFHAFe8FR8AVaXQ6wBWTPxPAFeqaEsAVD74zwBW1jDbAFdtHisAVwZ1BwBWU2B/AFRFHfsAVJ+RewBUNymHAFUaqoMAVkJlbwBU5XTLAFXCQuMAVGuKEwBU5T8HAFZPGpMAVoCEYwBXa+xLAFYtUKsAVUugOwBUAmXvAFYqsasAV4ksZwBWkFCLAFR/5FsAVEshywBW7UD/AFaeJLcAVqA8nwBXYaSzAFYBPUsAVT52CwBViV4/AFVmKPcAVwgyswBU5UtTAFfjIAMAVlKMXwBWuq7DAFRHnccAVqwhxwBXwRB3AFXYRQcAVo9sSwBU7sEzAFYGMFsAVKzeOwBWv3FDAFdC7fsAVLFcRwBUQpBfAFaXCpsAVMYZbwBXg0S/AFT6Qq8AVadxqwBUQCjzAFZe6e8AVobIjwBWwJU3AFaJMGsAVjNRiwBWvh4XAFaISE8AVh5NhwBUJOjjAFUwNK8AVufawwBUAr0XAFdkOSsAVD8c+wBWh06DAFVFWEsAVcAw/wBXgYkDAFRTlssAV55SwwBUwpI3AFVeOhsAVhlhLwBViNGHAFV3aO8AVf7O1wBW+dJ7AFdQ5ccAVYluSwBXDoyzAFVbbfcAVlktNwBW8XVzAFbkTLsAVi3hJwBUvmBzAFbNgasAVuBKxwBU+SSTAFbEiOMAVQvsQwBWVPE/AFQVDYMAVqSItwBWONkfAFYK3VcAVyRV0wBWd9I3AFSU4H8AVMS0kwBUDei7AFWeAJcAVo4qHwBVGMVDAFbF+YsAVrciOwBVIZjfAFfTfE8AVluwQwBUKZKDAFXt4VcAVuF+QwBXibizAFX0ig8AVPH8owBQbCIMBEAIQkgsQgAIQkfB6ENG4BRDs81oQuuoFEMjSBBDY7FIQm7o7EMWDaRCh9QIQo48ZEKznbhCA8zgQ6uwVEMjHLBC0yDkQjfV7ENjJYhCXgk0Q7egREOG1LhDb2FMQzPxzEP30AxCh0lIQk5MBEJHjLxDdmycQgJYoEKvAPBDw4HQQ65hVEIuYGRCtzxkQh6koEOvjDRCSj24Qo4UuEOP8ERDl2UEQsblXELWGbRCHuScQ/NsqEKOoFRDYuVIQ0+x2EJu3bxDjq0MQqcEaEJPxCRDwrHUQ/P0eEKqcdhDy6SIQ281HEKf/TRDqsy8Q3/ZAEMSHUBCL0jsQk7xREOHIFhDjhHEQtbd6ELjWeRD7ll4QkZVqENrgDhCW33EQ/d5cEKP4DxCKwUoQrbI8ELG/RRDJukwQ2rMBEIeFBxDtq3cQkZ8HEItkEJf0CBC3tgMQj8g+EKb2axCgmlAQ6dkUEKvMTRDcpkoQ8ustEPC2FhCMlDMQ36xvEMKJRRDtq2QQp+VyEICjFRC3wG8QgYkaEKXyNRDp4hAQ2uFoELrOHBC+zQQQrIBZEOGyHxDC9hkQ5v9UEPnIZxDlLxDykkYQofRlEPO9KBC1pAsQk88rELecexCEnD4QhN9nEMazRxCMtHAQyH8QnPFJEP36SBCvmFcQj8Y/ELOHQhCw/FIQw69LENbJexwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFo5rze/eLvitcWlAGTAQoDMi4wEwiAAhUew8+/FU6R0L8VH37UvxWSPNS/FRr21b8VNi7XvxWYAdW/FVx21L8VfUzjvxWMLNi/FU151r8VEVzYvxWwpde/FdJL1r8VJwTWvxUgB9+/Fe+f6L8VJ9L7vxVs5+u/FZKf+L8VNUrcvxW9re+/Fahv4b8VgibZvxUHHN2/FcBl578VbdravxWPB+a/FWeu2L8Vq6DqvxUVxdu/FSvW6b8Vqgv6vxUq6APAFTzHCMAVaWwJwBUF/w3AFcm68b8VUxbuvxVqEgHAFfwG+b8VU+rtvxVUq+G/FVk9CsAVhpb3vxW5ieK/Fb7z678VXzr+vxUNieq/Fb4jEMAVXSf2vxVrDeq/FdtsBcAVnUL7vxX0ydu/FUu/8b8VRJ70vxVmQwLAFThN378V61IKwBU3dQHAFba1E8AVQ0QDwBXcsu6/FdUELMAVS6IVwBXH2wbAFRQ/EsAVmfMewBXXiQ7AFRNsFMAVTe4KwBXMhRfAFb24GcAV3nMTwBUK5RvAFWEMLsAVjXsWwBW8RhTAFXSdDcAVPisnwBUK3wHAFW87AsAVAYr6vxXxL/C/Fb5zE8AVVLwBwBWi0S/AFX/+EMAVp5L9vxVEIfi/FQtJ778VFmYfwBVuJP2/FVNFH8AVjQMYwBV5SwjAFUTvJMAVW9oYwBXt0hvAFa6GLcAVhNYtwBU9NQLAFbh1HcAVaeP7vxVd/zvAFeD8F8AVsIghwBVBcCTAFflc4b8VpnTevxWVcPa/FbjIE8AVVLAfwBXPShbAFYzVHsAVd2EUwBU1ggTAFd60O8AVKOAkwBXMBw3AFdP9E8AVsu17wBWUIiHAFdjBFMAVVwh2wBXj1HfAFYoP8r8VWBmdwBVQplvAFeL9WcAVsw8WwBWfyTzAFX+/GMAVA4BPwBVPDFDAFay1UcAV/swCwRU3i1DAFTETY8AVTisbwBU3CGDAFfdVRcAV06+WwBVy7SDAFY6LHsAVBXgnwBVuTbfAFaWzOsAVMYqOwBXffxrAFRdk4MAVEE2AwBUfmjXAFSxygsAVn7dbwBXhR8bAFUX5S8AVaAwawBXoSH7AFao4a8AVoQxcwBXxPV/AFS5mZcAV4GJUwBWTDzXAFaRLBMAVOuJQwBVBvwrAFWvsDMAVygJ9wBUhyiTAFVrSKsAVCV8HwBXDxkbAFVzDQcAVL9lMwBWjZjHAFV9PF8AVVrlQwBXD9P+/FWh2tcAV8sclwBW5g2nAFarIEMAVjORSwBUi6TDAFalZJMAVs4NxwBWFWznAFZygZsAVkxEpwBX/f3jAFUPnZsAVybMPwBUMxnDAFeVghsAV02olwBVMJy/AFcpJg8AV0aBKwBVVEZfAFY0Hv8AVIM6NwBXT7lPAFU34PsAVz8QYwBWkQbzAFbeFPMAV7NEQwBVvr3zAFWm5esAVw1h7wBUZWCTAFaNQH8AVbcyYwBUbHsLAFQQ+MMAVubMtwBVmH1PAFXRc478VU4gNwBVZh/m/FbZoI8AV2t1BwBX/hkXAFaQKNMAVmzlywBXfaCfAFSBfQcAVgV8zwBWPMibAFdGxRMAVlowwwBXzsGXAFWhfOcAVockNwBXT0jvAFd2mPsAVCJ+MwBVMqjDAFa1DNMAVQutawBX4QIHAFXo3psAV2S6UwBXpVYHAFeG9R8AVoJa1wBXv4FDAFWpTNcAVeynEwBXRoH3AFRKzl8AVDWJ8wBVOPwLAFBsIgwEQARCUCxCAAhDlm3UQzc42EJ3nSRCkohIQn8cyEKXPWxCStFsQjY95EJSANxC+hUUQwPJ1ENa5cRD5ryMQgJsyEJvpQRCyvHkQ6IogENO9XRCbuU4QmFsQ3Jk0EI2SHxCZjUkQp+tAEJ75XxCviGgQkuM/ENuTSRC460YQuYxHEISCSRDjunwQ5YlmEPrdDhC6ilcQu74lEMbhGhD+u3gQgKNXEJKgIRDfuAMQi/UcEIadFhDCy3cQjOA+ELqXLxCTqCEQucBNEJO9XxDIzmkQlZsoEOWMbBCO1CMQ8cUzEMCPLRC3xQsQwJVxEKDKMBDxoXQQhOdCEI+7VxD+yEgQ2OclEJuxdxDRnmAQguJREOq7ehDBhgYQy7dGEMS0ZRD+gzYQ8s9cENaxcBC+lWcQwelJEIbgXBCgvhoQ8Nd7EIb/NxDqyGUQtq8dELL5IhCaqToQ1c1vEPKiERCsmxYQ0dYWEI2qeBCZARCJ3k8QyccIEOTWeBD2mCQQ7sEaEIOJChDYqVoQrrNVENz/PBCwlBsQzpQBELDJHxC6oUcQleUhEM/tbBCk+V4QzNkJEKXTAxC9718QrZ1OEPjMJxDViykQoetDEPaXKxDmqywQpewyEOaMCRCJ5DAQsrZ0ENGHCBDEnlUQwr87EOuGexDcvEIQ3+NaEPnRSxDI5k0QzIBWEMmZfBwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFohK3e9uusp60ilAGTAQoDMi4wEwj/ARWuS+O/FSIx5b8VQS7kvxWGGea/Famz5b8V1JnkvxXuWOS/FdMn7L8V0FHovxXHju2/FeI96L8VIN3mvxVY9uW/FWRG6b8VH9PkvxWMh++/FQJo7L8VxfzrvxXnA/O/FdAXAcAVzxXvvxVra+i/FerU+b8VG0UKwBXXWvy/FbSj578Vol/wvxUP7fK/FWKm9b8VXRbsvxV6O/K/FbFcA8AVmp0DwBVswQDAFRCP9L8VT6r1vxX/4e+/Fc0o/b8VYuQJwBVEaQHAFfSBBcAVYT/wvxWTlu+/FZqF878VWBQIwBUgnQHAFe3IJsAVDAsWwBUojw/AFR4XFsAVEOYIwBUvAfm/FbNACsAV2WMOwBUNdiDAFeRcBsAV8BorwBWTWwjAFUG19r8V6hDzvxV5cvO/FT1J8r8VIUv5vxWtaBHAFTzoCMAVC4IowBU1MwfAFcVjIMAVyflDwBWVPh7AFfl+EMAVgNwHwBVqMBjAFQXo8b8VgesOwBUyMgLAFVqLLsAVqt8MwBUeWQrAFdntQ8AVhhsdwBUezCjAFacWCsAVI0pUwBVDWg7AFTcDBcAVk1YowBUDPA/AFe6IHsAVGvMUwBV69w7AFXGGAsAVb7kawBXd7zzAFZSMK8AVzzIfwBUkQBfAFeVNH8AVhl4SwBW/VB/AFTEBF8AVrXwfwBViZRDAFYY1AsAVPLf6vxXT0QvAFe08KMAV3yQswBU06hHAFVDUeMAVXWopwBWavxHAFeQcFMAVsQstwBWlujnAFZ34FMAVCi4fwBVb0gHAFSKxAcAVIWIKwBVuWgPAFSQHHcAV3x4+wBUvwAzAFeZF+78V6ekbwBUrfA3AFdiPIcAVHMuMwBVmyQ3AFdfBIMAVBo5jwBWdoT7AFQREhMAVUJIcwBU2R5DAFbtAnsAVAWtjwBXCzknAFUqwXMAV8aFhwBVBU4DAFe8LXMAVKw4fwBUUa2DAFYwWHsAVu7stwBX6bCbAFdV+GsAVDBqbwBUYShXAFTsTBsAVKquRwBXIRjLAFQm+NcAVQct5wBUjinTAFePvG8AVotxuwBVp1pDAFVpndcAVHWp1wBWPzSDAFbFER8AV6TA+wBWw+RPAFclMHcAV0WuRwBW/EoLAFYpxFMAV0QQRwBUeZBrAFUaJTcAVbVw2wBXz5ZfAFUgXIcAVNigQwBVtZXrAFQNHhsAV9CuNwBV70ITAFXTPYcAVKB5OwBU/UBHAFeVLLMAV2hokwBVW+0nAFUJan8AVJgvhwBVOtF/AFchOYsAV47w0wBUc1InAFfxsZMAVX/9KwBV+jS/AFdI1XMAV3zAbwBUaOxnAFfsIKcAVAZEwwBWLlWbAFY+oKcAVPVl3wBWUuiDAFQ2wOcAVCz2UwBUHBJbAFaSAY8AV2MlNwBX69IjAFd58JcAVOVkuwBXEL2rAFZjnNcAV4pZwwBXQqzbAFdlbfsAVIKRDwBVtTYXAFZDKjsAVrq+kwBWCklvAFYJANsAVBGRQwBXWPTvAFSW9UcAVQx9swBWqyHjAFdordcAVRthvwBXDUk3AFf9gKMAVw6ufwBUtmSDAFY2QhcAVCgMPwBXkvAnAFXn3CsAV2UAPwBVoGE/AFZkuE8AVaGgZwBVXamnAFS4GoMAVi0h5wBU7C4vAFTrGQ8AVU7SZwBWb2AjAFUvNE8AV55eSwBUSM4LAFQ30ZMAVYB8uwBQbCIMBEAkQkQsQ/wEQhYMNEJjROhDyph4QxO5BEKCoWBDz5R0Ql/srEJ3BWhC88SQQybZwEL/QFxCUskQQzaZiEIL3QRCFwygQgaA5ENG9MBDbyysQheUfEMrNBhDS1jAQ9fZmEJXnCBCQ+G8QtfduEN/8IBCdrmIQ0csqEMqgZhClszUQ9tVMEMHzURCckwsQ4at5EIGpMhC27SYQmYIHEOH5HBDBMhC0uxMQ8egTEILASRCh3RcQ3dZFEJb/cRC59ggQoM9DEKDPORDdrCsQ1pt5EIzKIBCq/lcQnsYLEOK2eBDyiCgQnr4uEND3WBChnUcQgJBHEL/5MxCGokYQ7/o/EL/XUxCH9j0Q39dWEPD/dBCfuwEQhM4/EIjVbxDzoTUQqIEZEOvABRDctAIQw88lEKzDSRC9mCUQzIhGEOMBEP/0EhD5tBMQmskTEKveZxD/2QYQ8skUEJ+FFRD70VQQguceEMTuVxCb6kgQ+pRjEPLyCBDfsWYQjbwqENvwDxCw6GAQ16xLEMa7CBDKqBEQ6K8zEN7VeBCckmwQhOtuELq2IRDEqDAQrLRqEJTmAxDEyQkQwc8oEKLfJxChhFAQ4eEoENDVKhDkzy8QsKBrEKCmLRCxn0oQqoZtEMmIDBC22jMQ/IhVEIuMNhDFkFUQ0vtWEM2ySRC7jHUQ195QEIW7YRCICxwgADD/ATiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFomqPk3YW889tSlAGTAQoDMi4wEwj/ARWGeN6/FUWI4b8VjlLfvxVr6+G/FSmQ4r8Vm4zjvxXnUOG/FfTY5b8V+3PmvxXz0ei/FYPf5r8Vk//svxWWg+W/FdW04b8VFzDnvxVgnum/FYf+7b8VwcgDwBVMzOi/Fb8b6b8VZI/pvxU7f+6/FZgM6r8VTf/yvxXgr+6/FfqQ5r8Vi+8HwBWEK/e/FeeB4r8VjxjwvxXy+Pi/FYRj+L8VZc7rvxXyafy/FQiLBMAV8x0TwBWA3AnAFW289L8VUSz2vxUlhvK/FV0qCMAVyyTzvxWdy/m/FYlnCsAVfoz4vxU1CAXAFf5W8r8VROn/vxVCkv2/FZSE/b8VODv8vxW7U/i/FYBc7b8VPAIjwBVpBhLAFURB+L8V/fL6vxWg9ey/FWQ05L8V7moDwBU3/w3AFSV0AsAVN4X7vxUCkAPAFXqvBsAV+tUvwBWvpwHAFa8rDMAVs+gSwBXmwgnAFQW1HcAVu7c6wBV3cBzAFfFjIcAVWNwLwBUNmxTAFVt//r8VY6YWwBUO8hnAFV5oEcAVC+T9vxWI3SXAFTRZDsAViOgUwBU5xB7AFUmrA8AVrMANwBVU1h7AFTv9UsAV2i4awBU0QhHAFdsXOMAVajJNwBXJCve/FWU7DMAVx6VGwBXhkhTAFXMwCMAV7ksbwBW1DQLAFeSMA8AVDPkPwBU7CQvAFXxRCMAVnCkEwBVYvgHAFQyp778VcIszwBXjND3AFTxpHMAVTZ4YwBXwE0DAFdhp+r8Viz0BwBXChyrAFWu5M8AVHrj4vxXdveq/FW2R7r8VPwgIwBWPaQjAFcPlEsAVYnIowBWGmwTAFbsvYMAVinoLwBVsogLAFbI1D8AVRJSYwBUpnwrAFd/rYMAVvVHNwBXD0aTAFQu+LsAVztgBwBXTFhrAFUeJb8AVl0gewBWbsmLAFUJmEsAV3JwLwBWJCFjAFZHIZMAVb4tzwBUYUYPAFTcsmMAVzcecwBVEn4/AFax8Y8AVOH4ewBXvHinAFae+eMAVEVGGwBV9jBLAFS+JEMAViZAdwBWB8jbAFTggP8AVCgmtwBXNMk7AFfbTc8AVZ18JwRXpjQbAFWc+Q8AVJrZpwBV9lCnAFYlnGcAVMc0bwBXDFmzAFalDwsAVSjBLwBWjJhrAFR+IEcAV3wImwBXUShbAFaFGccAV58Q1wBVJo1/AFdbXccAV08xHwBXU0bDAFcP8KcAV/T8swBX8xZbAFThIVMAVjtRrwBW6QX7AFWkBJ8AVycd9wBULP4vAFRIlRMAVEfpMwBW+OljAFVdOQsAVqi1iwBV09z/AFTWpRsAVFvg8wBUD4XnAFed/DsAVEScRwBVQtxzAFVbOKMAVcN1YwBXfFCjAFYhYJMAVAmYiwBXN5CXAFflVocAVf0AHwBWoGRnAFZQ6+sAVXBAcwBVIUFHAFWlncMAVnsg4wBVZHzTAFWZkQ8AVwi5nwBVMnZ3AFReOY8AVoW0/wBXbt0XAFTjOCsEVLOhhwBXqTZnAFersQMAVoNwwwBVchyDAFarlQcAV9Os/wBW8zoTAFYAGjMAVK9NjwBXVR/2/FemW9L8Vm+crwBWdmw3AFeTMhsAVsXAawBUWbhLAFRN8H8AVAJwIwBX1ZRTAFY32F8AVRAenwBUCSkPAFZStCcAVe1eGwBViRW3AFUntaMAVQtdMwBVDEB/AFbUGhMAV7jMGwBQbCIMBEAsQlAsQ/wEQ4KgFEJAFEPeYCBChtgkQxdUGEICeJBD1zm8Q/tBfEO/PIRCHuGcQzd8wEJfvWhCqrVYQ6O5IELztUhCOzwwQgLAFEPvXahCT2RAQlP0+EMfubhDK9EsQwYdWEI2RAxCoqEIQ9/shELWURRDpslIQtptkEIfwTRCg6UUQkJ0+ELfqdhCHoQ0Q6rd5EKXccRDB2UsQ/8cKENC1XRCV4TUQ1c02EPnTFBDXmAYQqa9JEPCAFxCK/DUQxqNwEKHJQRDZizgQz+sbEJzFXBDY5lwQ8s8jEKaVbRDs0HAQ8aspEJWeShDV5lkQs/xoELGKNxCJlV0QnMpGENbtdRC4jDsQqqhPENeGTBCM/zEQgZhNEKKadRD/3mYQ944REI/pTBD6zlwQv98dELvsbxDdwDQQvqNoEK2HJhCKozsQ/dUhELzRBhDXgmYQ5cpnEI6LHhDF4CsQgZxHEI6QWRD67XAQhZNKEMaCXRDYnSUQiIQrENuZVxCQ8XgQl9VIEKKAYhDYqi4QxNoIENTlDRCk0V0Q6KgzEKnaaxC25B8QuPNfELu2WxDr2kQQl6IpEJvhbxDx9GoQorJPEIWFZxDXmF4Q55YqEMqiWBC04ywQztlRENbVPRCUmDIQxs5iEKrkNhDPq10QxLA8ENPYcxDkgkAQ86JNEMTBWBDhuHUQiQscIAAw/wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaPv1g+m+78fFxwGUAZMBCgMyLjATCP8BFfIm278VKX3bvxXfZt6/FWky3L8VGz/dvxXKceC/FbXn3r8VF7vgvxWck9y/FQkV3r8VDKrivxX1B+W/FQGo4r8VmULhvxURKd+/Fbtf6L8VbKbjvxUPSeC/FdX7378Vq2T1vxUDut6/FYLo678Vv8LivxXh4ey/FZO+7b8VTS3xvxWUV+W/FTeA4b8V/zrqvxX1B+S/FTDm4b8VHHcrwBWNGOq/FVSA8r8VaNHjvxW/Afa/FQ4sCcAV4YsVwBUdrei/FYN39r8VaKP1vxV3HuK/FfOkC8AV4YD4vxXPHQvAFbzlGcAVMBnvvxVQBe6/FcsOA8AVxz7vvxWT2fC/Fc0VAMAVUwryvxUZQADAFaBv7L8VoxbkvxW52hzAFdOfJsAVDwjyvxWmxea/FcR4DMAV33T1vxWfHOm/FQE5McAVy1k9wBUouAPAFZB9/L8VLV9dwBUTiw3AFV3z6b8V7NUJwBVSFTrAFQjVFcAVB0krwBWLBBvAFcAdLMAVfFR2wBVeIwLAFfgaDMAVC18JwBVqdQbAFW74AsAVVj7/vxVDMgDAFVNUBMAV66EdwBV12RLAFceaA8AVqIwQwBUcIC7AFcjuEsAVwQowwBUA2jLAFUka778V+GQUwBVq6BLAFVPQDMAVsnIHwBUB/yTAFWVs8L8VadD6vxU4iQrAFbNrGMAV9Q4awBWO8BbAFXcWHMAVaHBvwBVQqwnAFeiDAcAVLfNCwBV9T/G/Faw6+L8V7LgMwBUjyx3AFZCEK8AVXi8vwBXb9DnAFb4BA8AVMN0AwBXBZB7AFVx6BsAVC7knwBVpKR7AFQzMFsAVkPkfwBWfACbAFbsj7r8VvSY4wBWT9WvAFRwAUcAV7j14wBWtT6rAFRVnCsAViYgjwBWs/H7AFVNclcAVSdFpwBV9YkfAFd6nIcAVxkHvvxUamgXAFfegWcAVl8EjwBW3q2TAFeC8VMAVB9MwwBVKOTHAFfDzgcAVraI6wBXW3SDAFeEJPsAVhXo5wBXpn3jAFQAsycAVYJCPwBWK2zjAFSdyHcAVZTSAwBWmNBXAFXWYFsAVGFV8wBVgcy3AFd9VdcAVdwMtwBWwbgXAFTHBAMAVImU8wBVfdCjAFaHsCMAVzLuGwBVVV7bAFfIGT8AVO9BRwBXF6wzBFceaaMAVPgAfwBUT0T/AFQ0BTcAVQjU0wBXQ1QHBFYWdg8AV1p8YwBXbUi/AFe/NNsAVmJjgwBXZcz3AFYR4i8AVB8g+wBVmTxLAFdSkLMAV1o67wBUYzBjAFQN2IsAVWjJ1wBWIB0fAFZeLB8AV41ALwBWRUlfAFbECQ8AV2pwJwBWmZQnAFUEr/b8Vp7GWwBWCMCfAFRfvgMAVhAa7wBXBTBrAFRepG8AV3SttwBUn5DvAFVg1N8AVZqPKwBWYqIfAFTnvs8AVdlSZwBXrqhDAFbWiDcAVMuYMwBW0QDnAFTtnvMAVuu9EwBV4AfS/FUp2K8AVd1Q5wBVV6C3AFZGNXsAVKycgwBWAozTAFRP4VMAVeqorwBXv7zLAFQ75McAVkzuEwBUPvnfAFa2dUsAVXwItwBWNhwjAFQdDq8AViJQfwBXrZcnAFQOdacAVW1UYwBUjyjfAFdx1fMAVZ9krwBWaFyXAFZctMsAVhVoawBVR/mvAFdoilsAV4m9HwBWaH2DAFU93RcAVCnVHwBUD5vS/FBsIgwEQEBCOCxD/ARCpjmcQtLwJEIfbXRD3/RUQzqlREMqwIBD7wycQ7/1UEJunDxDYzk8Q38cWEPDQZRCg9wgQ2Kg/EKrxPRC700sQ2OcEENmNcBDwtVUQnZEREIzpMRCVlFoQ+eNCEMyfUxDym2EQpb43EK/XahDVn1sQoNwREKnQIxC6x1cQj+1MENfEJRCyvD8QgulKENuNVRDliBoQ1aRAENfsYxCBjgYQtYQ4EMqqHhDlgwIQ/9VNEPHmKRDG4QMQ8f1IEO66TRDf1XUQr6IdEJr9dhCY4W8Q1tcvEMn0chDPlW8Qxf9zENiWKxDY2DYQi65AEM/+OBCXj2oQ8J1PEO6/YBCZjDMQ0LgEELPHBRCfBhC6qhYQsMwxEMvSQxD6URDqgQ4Qw6UWEMehdhD/rAoQmbgFENebbhD4xwgQ+P0QEKSEcxD8yzIQjLNYEIivBhCnyTYQ99V2EN7NShCT33AQ/vFGEIW3FBCQiE4QrbUZEOF0EKORPBDVsyQQx8kHEOrQUxDvhwgQ45RxENHnOBC5rkoQ+NkdEJzkcxCFggEQgJttEKezKBD91yEQvNQzEKbVIxDA4T4QhqB1ELX4DRC+vmcQ8IkoEI6BQBD/+TMQ541fELWwMhDMpjQQrb5jENT7cBC0tT8Q78ZEEPb5ShDlxU8Qh75REIbrCxD+om8Q/gocIAAw/wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaOSk+/L3kt3eNJQBkwEKAzIuMBMIgAIVRt3dvxVeLt6/FcUN378VcVzgvxWuft+/FVgX678V7VffvxUcDOG/FZNe4b8VifjgvxWK9OS/FTHk7L8Vos/rvxVQkt+/FUZT4L8VivvnvxUHNPq/FaLh778V2jfivxVKKfS/FS/j778VzCznvxV3dua/FRpM9r8VnbkIwBVENP6/FUApBcAVRTfwvxX22ATAFa4z578VltblvxX0GP2/FSFHFsAVCd09wBWlegTAFaGZ9L8VYmXxvxUvnfK/Fbnj5r8V0fYHwBUjTPm/Fbda9L8VqmP8vxW/a/a/FRuf7r8V0Qv4vxU85gDAFQA1AcAVYV8JwBWJPg3AFZawFsAVnMYCwBXW4gfAFZ+vBsAV5IIKwBXTzgTAFYAG/78VVt8KwBVj6wfAFX7VA8AV/n/ovxUVAw/AFeLR+r8VVWIAwBXbZwnAFdPhGMAV9l8cwBX71oTAFeZzVcAVm40kwBXO5hvAFf0laMAVjgL6vxWLERPAFZ3ZCsAVq4YSwBUIVADAFeQlFMAVGgs/wBUMZInAFVS/EcAVf8ILwBWekgrAFWACAMAVZVz+vxUYjQjAFTHiDcAV2HgJwBX8FSvAFWPZHsAVPsT8vxWYcB3AFfnUEsAVztUewBXBLw3AFQ1FJcAVfH0XwBVwBBHAFYNuDMAV3l8YwBWR7g3AFR2UN8AVe8EhwBXZFyvAFYNrFMAVcUkRwBXatBjAFd6eEMAVi/IdwBU2ZRPAFf07EMAVfq0VwBXhmWDAFTZ1EsAVC0EVwBVcKRbAFSX5DcAV5HYUwBX4zg7AFd8oEcAVt4KFwBUMVvi/Fe1i7L8VXdwPwBXTIRvAFfUcDcAV1V8jwBUHVAfAFf3nJMAVcPYVwBWhg7zAFQmmDMEV6scowBXaz0bAFfogNsAVWh6bwBWBhLHAFWormsAV6f5ewBVjDGnAFZ+IcsAV0IU9wBVJHmPAFYuk0sAV8wiIwBVtJQjAFQp8/r8VkMI1wBX9UsTAFcK3PsAVviiAwBU+oTvAFbfOVcAVapNCwBXO7LXAFSdWJcAVZIqlwBUFvE/AFb+EcsAVid+iwBWiCJXAFTLpgMAVhoM0wBWfJQ/AFYISFsAVglAUwBXXxUXAFSZFF8AVPl8WwBWlZSPAFRiJgMAVmb2BwBVkcELAFetuaMAVFrpRwBWMMxXAFWX7NsAVfUunwBV5cCzAFVYXUMAVUe9XwBWoSUzAFXDpOcAV3sCSwBXWpnjAFV6RHsAVDNUowBU6G8LAFQcGc8AVuzKCwBXKgVnAFY6TJ8AVRxFRwBUp8C7AFStVpcAVSeiUwBVYWEfAFQRcS8AVJJCawBXhOEHAFZv6AcEVf/gjwBVu1hvAFXYbdsAVTwNbwBWDnrjAFQIRRsAVHKQ4wBXjbX7AFZ6pFMAVpf4awBVXXk/AFRCvc8AV/6IgwBUTWy/AFa0gHMAVa0KXwBXxo6jAFRDUiMAV7eBVwBUxYCDAFcOzFsAVG3F4wBV3pVHAFaDBJ8AVDN2JwBUk8qPAFRW1ScAVkBa5wBWnpFHAFU+KKsAV36ggwBWSsDPAFfWuY8AVylUSwBXIChXAFQx/mMAVEKGhwBXvMKTAFYxPUsAVZjB2wBUQd7vAFa80s8AVnVUqwBV1s0jAFY99JsAVdZQGwBVb40jAFY6ILMAVTXc/wBUX1eHAFbF9IMAVBYJFwBUE0TfAFRwgdcAVJLMnwBQbCIMBEAsQkQsQgAIQgrNlEMD7dRDD2WMQjb13EPP3XhDG1DgQ2qtPEKrSeBD5pDcQlIAIEKevbBDSg1sQufgyEP/yMRD97DoQ9rV1EMOUPBDK1EUQv9ofEMvKIhDy1hIQj7VgENq2FhDW3wMQ5vAIEI2mHxClsiMQx/wzEOXxWRDg7jQQmrRHELKvcxDz/hwQspQfEJrnbhDsjlYQqcAPEJqdEBD4n1AQrNpzELjwFhDq9lgQgcNxEP73UBCPtD8QmPpnEJPuaRD4xUIQyZ0bEK2yWxDw83IQxokJEMPKIBDW4igQ7ZMnEMSYUhDx6DIQ9opfENKcQxDwgjUQrqJqEM6jdhCS30wQpLt4EKfWHxDmw20Qj7g6EO6QDRDaowsQwP8BENKgXRDf9k8QjIxqELyoBxCdzg0QjuNsEJyMXRCs6xAQ0NkhEJ62OBDCnQYQw5kcENPWKhDp7iEQzcwSENnCHxCpglwQhqMlEIfbFRCS40IQlOsGEJP2WxDfmzYQ+JcBEPaqWRCRujAQjYBCEOTICBCytHMQkutrEJ+yBRClinEQ4fMdEMb8TBD84B4Q36cOEI+lcxDkyVkQ7tEBEN3TTxCFwmMQoZ1bEOOjCBDXrUkQ+eEpEIuWRBC/4TIQ8LJAEI3CPhChykkQ7cVDEKDeORCmtlgQ5IlBEPWBcRDHh0sQ07FhEMuZehwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFov8OZw/Cx+f9LlAGTAQoDMi4wEwiAAhXEot2/Fdek3b8VvODdvxWT/d2/FT6i378VDPLfvxXzqt6/FXVl4b8VTjvhvxVTbOG/FfcF4b8VRUzhvxVxQeW/FVsk4b8Vkq3kvxUovuG/FaIS6L8V13HrvxURRfm/FUaS4b8VCmnkvxU8L++/Feor5b8VwR8FwBUEuuK/FfgN6L8V3v/pvxUD0uu/FWk36L8VTbbsvxVsRui/Fc9P5L8ViBL1vxX1W+i/FQLnEMAVubYJwBWPCAXAFbk0/b8VU+sCwBXSG/C/Fb4n7b8VnAkGwBUiUf+/FWHsBcAVpsLwvxWEFvS/FdRk5b8V2hwJwBVZ1CvAFTll7b8VCyHovxWArATAFbYJAMAVaowEwBUxkgHAFePtHcAVcCbyvxWb4vW/Fagv978VpkwFwBXclvm/FecC8L8VClbwvxUEceW/FUT4FsAVHb4xwBUbQwnAFa4+F8AVY8QCwBVNOCzAFaJJGMAVU4gLwBVBfQzAFeyNJsAVy5AQwBU2fwjAFbVGHsAVTnoVwBWubR7AFWks/78Vb7/1vxV1OhHAFWZsBsAV+f4HwBXVzBTAFTMrRMAV43QOwBUTsBXAFeltEsAV8Pn9vxUHPAPAFZ+WOMAVO4/+vxUXd/W/FYAn878VhpIQwBVGEhLAFYiGSsAVZwAwwBXccvm/FREaIsAVrYH7vxUKvw/AFfLWOsAVCFcUwBUz3AbAFX3HP8AV47QgwBWMfwjAFQXuDcAVjNkBwBVGg0TAFZALN8AV7ykgwBWPCAPAFWmtBMAVclQ0wBXKrDrAFe+fAMAVra8YwBWyhxTAFSodAsAVGk77vxU4HATAFfEBNMAVFYsVwBWz8gjAFRKb8L8VexkywBUuinXAFTMjRcAVxxlJwBWI8DTAFW5XNsAVxXdCwBU64mHAFXaML8AVTic9wBV26irAFXBVaMAVSdKGwBUI4xzAFT4dwMAV/IVMwBU9jI3AFWO4WMAVEVonwBW6Jj7AFfRVPcAVfTGgwBVzgxXAFapDHsAVl/zdwBUJu0TAFb8QLcAVfIU6wBWtSh/AFbXVasAVP9+CwBVafCTAFTN7UMAVLz79vxVL4RPAFewMJcAVsZh4wBUkcmHAFSQkPMAVLowWwBU+/kXAFVCkJsAVumMswBVztHXAFQcgasAVZzEawBVtJ4vAFSfaKMAVPBlTwBUmthfAFfsKNcAVduWMwBV7OhLAFbjMEsAVHJSQwBValErAFeemd8AV5zoKwBVUryjAFVmecMAVK6c/wBVmgXrAFcALcMAVZ/s1wBVQ3hLAFV+LS8AV1gC2wBWmxGvAFVlWcsAVoIZNwBUK4KfAFf0NNsAVVphtwBXVEj3AFU7sNsAVu/8QwBUGiwDAFeNkHMAVZwsdwBV+G4HAFVXPiMAVa3IbwBWbL6fAFR8xbMAV9XZjwBWbDETAFa5th8AVJxBrwBUa6UzAFS7bb8AVJAqFwBW/HH3AFSHvY8AVfaQEwBVBZjbAFeK2UMAV4KiKwBUL1nrAFVIOjsAVP4tgwBWEMqDAFa+CN8AVnP8LwBXFiEHAFZ5SHMAVqSV8wBX76jbAFd/wlsAVM/1NwBXDEiTAFXRPKcAVSiVwwBU3mj3AFc4FL8AVOzXHwBUpgyfAFa+fBsAVE/M6wBW+m9TAFSTRHsAVBUacwBUDOV/AFa8xe8AVu+SKwBUdrtXAFVp6msAVJFUJwBWBOhjAFBsIgwEQCxCQCxCAAhCpoAQQ0/4WENq1JhDl0TAQrfYuEKjGQhCayTEQ08J4EIjwKBCDjRQQhPUCEN3YHxCZy1MQ5fNMEK7/VRCv7nIQ8IoIEPG6aBDm6RIQpJIUEMS5YhCUwGkQu9EeEJ6oHRCPnyEQpM93EOrhMBDTvwsQ35c8ELXFOBDP8E4Qp9J4EIDHXhCn31EQjbAwEJ6PcxCIlzoQiZl5EPrmGRDusT8QsrFiEKeYMxDcsDoQu8QXEIDWPRCkvEAQvsJqEJuhKRDV9x0Q1uY8ELztWxC9q2AQoMc2EI+QXhC5mWoQqbM2EPuRXBDL9S0Qoo80EKiudhDA3ToQ3OlQENf3WhC92UkQ8MljEPmfBRDmnCMQwJdiENPLBRCz3TUQ8cQPENepUBDW+hAQx7gGEMuOKBC4sG0Qwu8EEIifExCR7jwQ9+RLEJTFBxDspwcQmt8zEMjnFBDw6S8Q2uAkEL+UFhDKh0kQ84FyEIiHPxDYsAgQkJJuEJX/GhCnrx0Q0NkbEJ6eSBDh5RwQpTQQy44OELCcBhDm7XEQwvo8EJfTIhDd8QcQ1pVKEKiZVxD0vV8QqIA3EOm1OBCRs0IQvqJBEN8bEMmPYRD0mSsQ5rZwEMi5ARC7w3YQo/xSELb9ORDQ0QEQipsMEOXcOxDkz0gQ6qhVEMPXVRDktFUQ0fRqEIn8eRwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFowbmQgbT1hJsllAGTAQoDMi4wEwiAAhX0Kt2/FSGn3r8V+xvfvxW5IOC/FRvX4L8VvmPfvxWLteO/FeJX4b8VjcHmvxXwC+W/FaVJ5L8VezrkvxXDSOW/FceT5b8Vnt35vxVAYeS/FWsE778VjpXwvxWxROi/Fdlb6b8VnIn3vxUsOOW/FYBc/L8V4FrtvxU+JPC/FYZM7r8VYi/uvxU0hu+/FRJK878VLp0IwBUJ5QjAFRyO5L8ViGTrvxXfzgLAFYso+78VAc3+vxVp5CHAFSDg+b8VOOz7vxXMzeq/FdDz7b8VEFv9vxVPEAXAFddh778VSvbwvxX6swbAFbdLDcAV8HDwvxXA2/W/FaqD9r8VfGzxvxWs5/e/FWqgAcAVxlX8vxUcX/i/FQ6YCMAV3/0NwBUbrvq/FUe//b8VSVAVwBX2ABrAFXNoC8AVKssVwBUpUea/FU5vAsAVCF3vvxVebgrAFWlbIMAVuXQUwBVVtwbAFRRVC8AVw0kRwBXC3AvAFeRlOsAVaH8swBUV3hDAFfwSBMAVLw1qwBVVpxjAFR2yA8AVAGEZwBUJ+gfAFRXUD8AVkUIIwBVVav6/Fd93I8AVIeQJwBVtvxvAFZHGA8AVPA1XwBVuNEDAFeQdCsAVq8wjwBVprBfAFQLtGsAVpzYGwBXMPSPAFZDPHcAVpVMAwBXTjwHAFY0AKsAVJen0vxV/3ynAFSsEB8AVU5IbwBUFZQrAFYwyVMAVSbckwBVTYybAFWnYFMAVGd8OwBXXHRrAFaPLXsAV3X4qwBUZ+xHAFQY+dsAVBbsDwBXRnDfAFVUSDcAVYk0kwBWiLiLAFca8eMAVttc6wBXtExvAFWAYEsAVf7MYwBXH3BXAFWxIFsAV4a5QwBXb2CfAFR8QBsAVEnbyvxX3yWzAFcYuzMAV+Cy5wBWvX2jAFVwFNMAV9VAXwBUPEjnAFQ8kUMAVMCkVwBXlJILAFTzNpcAVWymDwBU2cUPAFXhTEcAV6rhGwBXmKkLAFYD6h8AVnRU2wBVOVzvAFT/QfcAV6cPRwBV1bhDAFTLxGsAVjy6SwBW9D4nAFXzlVMAVmGUuwBVYNwXAFWZiBMAVTAscwBVNShvAFZQJk8AVCmxawBUdWUzAFSHZLMAVigYewBV54DrAFYtWMMAV/rIlwBUqSZjAFQFGU8AV+RqJwBUp3B7AFWRWYMAVMXkkwBVLoSrAFdrdOMAVZ0VXwBUCF5HAFbQ6ScAVNceQwBV1QZTAFfGQXcAVc7xRwBUfNX3AFWOPgMAVn4gzwBXa+hzAFUOcV8AVLxhVwBVb9KXAFeZuScAVnzItwBXskATBFRO7YcAVtAoNwBXjfyTAFek4hcAVq1cPwBUso23AFY0MNsAVW49JwBVY1oTAFdUCoMAVIV1awBWt5EHAFTCSJcAVOIM8wBUmcWPAFejXNsAVGv2ywBVv5GfAFeJ0lsAV36gywBWkYzXAFcVaS8AVhqg9wBVL/inAFVpPdMAVRsA0wBWwiUfAFcKxMcAVy8A9wBWBHK/AFZGsqsAVR9VGwBXj/DTAFa1KJMAVQ6RQwBV/RoLAFeEXC8EVCuoWwBWhDyzAFZgZYMAVG8xxwBUrQTjAFZb+E8AVVdMqwBUIXk/AFURRPMAV5t8lwBWmTMHAFc0uy8AV5DfGwBVYOmLAFf38b8AVXSBlwBXFX1LAFSlUQ8AV5/hvwBV1RlLAFQehmcAVdAskwBVMuSPAFBsIgwEQABCTCxCAAhCLsTcQtqMzEPHIMhCow3kQ2rxWEKLEDhD18i8Q5rxzEPOeGBCV9EoQ94okEObrJRCuqkUQjvJpEPHCTxDZy3gQqPpiENPcKRDl6BIQ4ZljEPGeehD4hxIQhJ0+ELSwZBCr3TMQgoFhEN2iXxDii3cQ5NdwELveOxDY83cQs/lyEOTIWxC+kDYQhd5TEIaDAhC69l8Q6eJTEKe5MBCd5FsQ5JlaEJSWahDO5BwQ5cd6ELK9cBCE90QQlcEvEITDGxDkolEQ0swdEJPsIRCM0iwQyqkuEOTnJBC7tF8QjpNZEN3dKBCDpi0Q8L4zEPP0ORDN+TsQ94ZRELq6YhDponwQ+t1mENubDhDRp14QwcUXEMOTDxCBviAQx8EnEPTNJhDXuXQQkvVeEM/gBhDynXcQwPALEJCDQxCUnF0QxoZ3EN7mMBCG+SQQ37I0EKiYTBDU1wcQ5cw3EMHFJRCO+lkQwukCEOqIIxC/2DQQrfJWELvhCBCPuggQn+FnEKn6RxC1oDoQ6IocEPCFThDRhTkQrO10EIapORDfymcQ8dQfEKfwARCRhlcQ/tp4EMeFKhD6/TQQr58kEOiPPRDzrnsQsqkMEIT7XxC1iDAQqotbENzlLxCo6W8Qyp41EJaCUxCb0DoQxbp3EOKbVxCjlUYQtshIELiXZRCBi2UQpLl8HCAAMIACOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWiGoLrZkeegsU6UAZMBCgMyLjATCIACFT+N2b8V7sLZvxU4Utq/Ff1y278V9E/avxXQfty/FR9a2r8Vl5HbvxWUvOe/FaRN3L8VE67cvxURr+e/FfFK5L8Vok/bvxW4+du/FaL7478V+JP2vxVUt/G/FcS+578VdR3jvxURZ+W/FQLq3r8V/afuvxXs9fm/FVeo6b8VhkrpvxWWj+2/FS/53L8V1FDovxXJI/a/FVqz3L8VtQfpvxU+G/S/FQ4RBsAVwFv5vxXxf/m/FeVS978Vu/PuvxU2PATAFeUqBMAVMnnmvxVsKwDAFYCX9b8VyIn9vxU41O6/FUY+/r8VJIf6vxXXJ/u/FcAC/r8V0dXpvxWkjxXAFREl6r8VNRYDwBXdQAnAFckp9L8VKQT8vxWfowvAFbfL/r8V/eYFwBXEdfu/FdJY978VkY3evxUntPG/FdoA6r8VV635vxWkUgPAFQLFAcAVK8ExwBV3HDrAFbBIAMAV2DMEwBW39CXAFVZGF8AVJfM0wBVHWADAFWRVDcAVM14ywBUffgbAFTLdCsAVnNVUwBXhCRjAFdY6BMAVSMTovxUkQBzAFZG1CsAVbbsXwBVykP+/FaECAMAVAtcdwBWsfva/FdGu878VeSspwBUDmjDAFXMXAsAVzucMwBXNUg/AFRslEcAVMZYOwBXKwQvAFbAxLMAV60sAwBWfIxbAFUNIH8AVdacUwBVOZgbAFUUxJcAVqNcEwBUw2ynAFWZkJMAV83EGwBUYVhvAFfp9CsAVpu8XwBVcehrAFeTiD8AVXTcCwBVncgLAFfyZFcAVv6YvwBXFBTzAFYLaM8AVSMsJwBU66v6/Fcs3678VfKIiwBUNZgnAFVd7AMAVQ43rvxWqIJfAFcOKLMAVpOZcwBVf7DPAFZr7EMAVd7ATwBVEzwPAFdYSmsAV7sRcwBVDF8HAFVlSccAVJetBwBWQWWzAFU/bgsAVVQVrwBVHAJDAFX6nwMAV7mBEwBXAI1jAFUJwQMAVKVA5wBXOygLAFaVgOsAVTjowwBUqOlHAFcm4lcAVcVxpwBXjsVXAFX01QcAVjRBLwBWi6rjAFRWBZMAVj1KqwBWHKS/AFXVXLMAVYAoowBUTIbPAFUXABsAVTloawBX2837AFT5nZMAVDsY7wBWF/TvAFetULMAVGcFNwBUi3z/AFTMeIMAVhhoawBVACwbAFfawNsAVjE48wBWPVxLAFf2HA8AVXVAVwBWfmUTAFSEBUsAVIY9FwBWtf37AFVN1dsAVPGyDwBXZsjvAFXxzGcAV/s9MwBW23z/AFdwTl8AV4KuIwBX3jiPAFZb7M8AVaicewBUZrzHAFamcGcAV3u6WwBW3d0DAFfYiZsAVm44TwBXQxYfAFbZ5NMAVyMRMwBW7xm3AFT8li8AV/5FRwBV9+SHAFeZiKMAVH/51wBVvJU/AFRDSJ8AVX0uDwBXnVDHAFewzOcAVZa45wBW0UYrAFfdfCcAVIDhYwBUAyV7AFcLQNcAV5woMwBWYPELAFWlBXMAVRT9NwBU5VU3AFe4vOcAVhnKZwBXZyj/AFdrrEcAV3ShowBVwEBjAFay0BcAVf3pHwBUqozTAFa6ag8AVVa1QwBUIHVfAFSwngcAVP4w+wBWn63nAFZqiHcAViiEfwBXLywXAFTzeFsAVF+NmwBV4SV3AFfy7KMAVzuWLwBWyKxzAFcsomMAVKZ9qwBW4KxPAFcPmLMAUGwiDARABEIoLEIACEP6FDBC1yDsQ8bpFEPm7OhDD1T8Q6qthEMznPhCU2HUQ2N8HEO78FRDMgFcQrbwcELfgZhDaljMQpZE/EJiQbRDTllMQkcwjENWoNRCdw0YQo6osELaZZRDX9FYQwLNMEOSjJRCp9iMQ//80EMeTCxCTiVUQvqVwEIXsTxDnwz0QmNMsEK/MCBDh3QYQ1eYhEBAQvPxnEN/PehD1rjQQi8VUEPbmeBDHjjIQ+akZENHkOxD3qCEQuoYaEPLOTRDC2jAQ8J9dEOewIhCQjlMQq4cwEJWuOhCuq2YQluxLEO+kLxDU+lAQr6kFEMrtAxDbmFgQ7YVgEO+EWRC++XcQlooyEMLyBRDhuFAQhb01EKXGehCinjgQ5sYGEI+fDxDckGcQ/4IQEIqeXhDBn3QQ28dQEPqLBxCYnwMQyvI0EJv8ZBCatQcQ46g8EMOMQRC29xQQnLlqEMSgYhCewUwQoNNQEO7kURCKgRkQ1+IDEOTVGRDKkCgQlDoQibJHEPrzeRCv/hwQzv54EOmtBBCtiR4Qn4QlELbUZRCW9EoQ5q0xEMSmNBDN4x4Q49tvEJDbDBD/4goQzqRmELOGKxDY7CoQx/4tEL2OFBDK5jMQ4+E2EPjSbhCDmlUQ9r0CEM/KBxDHpUMQkq1PEMKVSBCYtU8Q1tZVEKqRaRCS3HocIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaLqbjNyvhqeBDJQBkwEKAzIuMBMIgAIVeTzmvxWn8+a/FQ3f5r8VK17ovxUBP+i/FTPn5r8VmqPnvxWHceq/Fb7N878Vni3rvxX6IOy/FXSd6b8Vof30vxW/He2/FeTa6L8V2lPrvxUxfeu/FZVS9b8V8cj0vxW/euu/FcU/8b8VY3XyvxV/0+2/FV2S7L8VYMLpvxULTxHAFdx3A8AVwfzyvxWghfG/FWaI678VtmbsvxVcfO2/FRkmEMAV16HvvxVJ0P6/FTRBA8AVsOD4vxWwjQXAFf5f/L8V8UXzvxWVWvi/Fat5/78VfqkNwBWpNgvAFam4DcAV4Vn2vxWb8u2/FejX/r8Vg5wAwBUY1f+/FZA7878VCvIiwBVOBhXAFXgBBMAVzckHwBVppPu/FQlR/78VkU8RwBXoHg7AFVz+AMAViQf8vxUHqfm/FVRyCMAVhF3vvxUd4/q/FQydFcAVwzQvwBX2tPy/FRzlF8AVuNEKwBWvKhPAFYioA8AVk7sGwBUGt/q/FQdPWsAV644PwBVYIiPAFVW7FcAVETgSwBW6PAXAFbomDsAVu/0ZwBXgxg/AFV9BA8AV9mwPwBWqhDbAFXY1HsAVJAINwBV7x0zAFdqaD8AVRxMuwBV/6hXAFWelB8AV9fcGwBVCA0jAFenqS8AVfBIBwBV7lwzAFTAMGsAV3hQFwBWH5RjAFa43YMAVNso7wBUhNyXAFUUBMsAVvVExwBW4TFLAFTKdCsAVsgcdwBVG3wvAFWqwK8AVFKUBwBW24iHAFXTlAcAVNe5ZwBXyMhzAFXGIF8AV2h4PwBWyl1LAFaz8CcAVCVViwBV0KwDAFcVNDMAVTzoEwBVrTjXAFYAdPMAVRWJGwBUGcPm/FbghJMAVXtAXwBXEyznAFUNkH8AVpRhBwBXLO6zAFcCthcAVgvYlwBWIQxPAFQGUJ8AVuekjwBUedSrAFfzVEsAVCHkgwBX8RSrAFciyDsAVdp0ZwBW64j7AFbmkRMAV0CkKwBVnYCvAFfhglMAVNyZfwBW5bYXAFbjtK8AVwxhEwBWdwmbAFdUaGsAVni4ewBUBpqPAFdmhTsAVwxJLwBVPgBDAFVFBEsAVSDJRwBU4/RzAFcmaZ8AVmEErwBXwXx/AFRWWDsAVzX5bwBVwV3XAFW6rFsAVfDY9wBUQeqjAFaqtr8AVdglkwBVqkhHAFd5tKMAVgMbBwBXb8mrAFd8+WcAV+k5bwBVeZMfAFdfbTcAVcpBHwBXj8jTAFTa7F8AVMO60wBVMTFrAFcPzE8AVrGJkwBVZaKrAFay4nsAVjk9RwBXgn0nAFdO5DMAVVlRlwBVlftHAFXAjSMAV6688wBV+uCbAFfwUG8AVbEJ5wBUr6STAFZJcfcAV3iGDwBUcT1zAFUQsU8AVJYRtwBX9VjzAFSh5NMAVzpg6wBVsGTzAFQeyOcAVuE5mwBVemVLAFVtTS8AVvYIawBX0gm3AFRXRJMAVt5iBwBU8qBzAFdoTWcAVWw9DwBW1WhbAFaNtH8AVfk01wBWjVGLAFVhHcsAVu8oHwBXj6HLAFfIhZsAVyolAwBWHrljAFWvBgsAVYM00wBUnVVXAFaOekcAVosJVwBUJbdvAFfU7QcAVzAoawBU5xaDAFUzSa8AVfzZZwBVHAyPAFWB3HsAVobbQwBWDbIHAFR+xRcAV41ZywBWmI0TAFRkWRMAVdBuCwBXbzk/AFdX7VsAVQXz/vxQbCIMBEAQQlAsQgAIQya0ZENjsExDV7VMQwb91EJnLaBDx418QgIopELSVFBDhwA4Q2/1EEPeABRCwlCMQk7dXEP6pcRDm9jQQ9ZlvEKrNORCvmWMQwMhSENbaAhD1dhCX+AcQ1NUXENykIxCBhy0QqqhmEN2iBBDvmWIQjqkrEJjrXBCA0U8Qt/14ELi/OhCFtUEQxoFeEL2WHxCc+hYQx99tEIPnUhDDiC8QrMFuEOaJShDZgVwQ/uRyEP26FhDquF8QvvNrEMGxORCEpxsQjrRbEKvPRhCCkiIQuvMJEKKUYRDnjicQ7P9KEI7dChDx8jMQyJYyEMDsLhDu/G0QqetCENOsWhDFxXoQpscFEL+ZXhCA3AUQz7QjEJzSchCXzwIQ7Ms0EJSOYBDclwYQ+Jk5ENP0DhDY+xYQyslqEL/PBhDg5gIQub9REJGkXxCg7CEQ87ESEPj4EhCWp20Q08xBEIuHDBCEvRUQz+5JEOS/HhD33hYQj40/ENWwAxCB8lYQ1JZ0EPFXEK6mYBD8gWQQo6ZZEPHjARD8wGYQzJFwEMTUWBDn/yIQ3OIJELCCPRDrty0Qm+k1EPS8JBDflmUQtfBwEPPjQxCS+hMQvOdQEOq9MRDnjT4QzZtfEK3zBBC56mUQjs4uEKuoGhCX+CIQ+pU4ENfGPRCV5WwQrvhJEMmnaxDP93scIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaJjz38641/mx8wGUAZMBCgMyLjATCIACFZ6T3r8VlDvjvxVlO+C/FY0k5L8Vd6zmvxWvdOC/FZaw4r8VsH7kvxUW++W/FTNd578V3p3xvxWkbeW/FYp34L8VccfivxV3eua/FSOt5r8VcA7svxVE8fO/FYSh8L8V7ajuvxUcpum/FQbD8b8VRULzvxXMW/2/FVHT8r8VzuL3vxUhVuW/Fbwd778VO8/5vxUmzfW/FZbG5r8V2P7qvxUGcAHAFauo778V5DL3vxX/ogbAFSdL/r8VXcz2vxUyjQHAFZ1T+b8VXuT0vxW8Pg7AFTXLBcAVEC7yvxXr9Pm/FZJ7AcAVaZj5vxXzCRHAFa6SBcAVJlgNwBXQsAHAFVIh+L8VM5kSwBXakOW/FVAE+r8VWQ/8vxVKxvK/FcAL/r8VvcD/vxX9g/+/FfLxC8AVwigWwBX1F/i/FUIq678VHrwHwBVihwXAFbJbVcAVFScdwBWOwBzAFfHwAcAV9MoIwBU6ghHAFc7zK8AVc6ABwBVdoRrAFVbE+r8V6yYAwBWG9SPAFR+9DcAVlNkJwBUggRbAFTsJE8AVIicRwBWKIifAFVsmHcAV1u8GwBUtQBnAFXD2JcAVMHsPwBXTiQPAFfgJIMAVjMwQwBUr+gzAFX3ODsAVxc36vxVV1xXAFe/yLsAVlSQJwBUd4BPAFcc0FcAVWO0gwBV1VRXAFdFVF8AVeus3wBVIcQfAFSA5LcAV3F8TwBUvtAvAFdVrDMAVuzUywBUDfCPAFdGrC8AVarocwBUp/wbAFeJoAcAVOmIEwBXjxxTAFRfLBMAVvOkMwBUI4QDAFfjVBMAVtXUvwBWplQ7AFTAcGsAVpMIiwBV6qATAFapOF8AVm9juvxUIMAbAFRRAiMAVdBUewBVt1CHAFS00g8AV+BlXwBUnN1nAFSDKPMAVvZImwBVPiovAFZtCIMAVU6kbwBXLDQ3AFYARDsAVdvAOwBXqhx3AFWvxg8AVBQh8wBU5xS/AFTvBCMAVzzNGwBWH4DXAFSzgHcAVQww4wBUwDgrAFXfNI8AVbJFQwBX9OizAFWaqSMAVXoomwBXOTU3AFQ1WZcAV55EpwBXD4UzAFXilHsAVkEiQwBXhGprAFRaiY8AV4x09wBVSeUTAFe2AosAVVI9mwBXgelPAFaNvHsAVisEcwBWDLh7AFajVqcAVTs3PwBWacJbAFfziRcAVu0BdwBWe0nLAFdKpNsAVNqA6wBUHLSTAFZamUsAVQKGnwBX8/0zAFT8ipMAVgsU4wBWoNs7AFcJI/r8VL6kHwBUeWK/AFcdFI8AVlA46wBVw21TAFYHIL8AVMOEXwBVMChrAFeX5pcAVcNdEwBVtoEXAFW49iMAV2hIpwBUAZG3AFWY1gcAVHgU4wBUK72LAFU3YPsAVzwtfwBXSIUvAFT+ZJcAVBZhnwBWtYF3AFZX1IsAVdS4fwBVEOoDAFfYSZsAVuhtgwBVQPHjAFVZme8AVIn6IwBWrRDvAFS20McAVPtdEwBWmLxjAFSk5IcAV3/yWwBU7mDTAFdkwK8AV2NFEwBUyvyPAFYvij8AVRQc9wBWTKyPAFbJnUMAVopoxwBWEi0PAFb15W8AVSrA4wBWg7EPAFQNtLcAVw6V8wBVL/VHAFUJ9MsAVkR1twBX2sEbAFQxr0MAVyD1CwBX9DVHAFcjlN8AVpJMtwBWP9y7AFbSLHcAVropwwBW6BCvAFbBlGcAUGwiDARAEEJELEIACEIHkWBCJ+UAQ9NUBEK6GZxCwkRQQydEbEKqCAhCd8gUQk7prENWPCBC03hoQn6Y7EP3VMhDKtA4QjIFGENmeexCpvi8QjcELELTlERCmgB8QhNdXEJcNEIPYHhCRhGUQpvU3EK26BBCFuCEQ5t1JELOqdBC2pWAQw69TEIi1eRClt3kQqs9hEILDSRC0iQkQyMBHELa9LRCvijsQ8dMSEMnychCClk0QxepEEPG5WhCDtRYQ84RaEIyjGRDMURDvxmEQrIV2EMCaHBDuzzEQw9dzEJ2+bBCh+2UQ5qZ5ELXNMxDBuEgQqtBWELmLUhCrxUoQwM9pELfhcxDjunoQsoJxEILbWRDGy00QhoIPEJXzdxCZtzQQuPIPENCfLhDFshAQ/JIZEOmjMhDq0BoQ5PkBEP6HBBCemAcQ//NIEMftFhCwh04Qy8AgEO7tWhCtui4Quo9EEImVKBDcSBDKpHQQqOE4EMe2KhCp610Q6JRKEODucxCElhoQiKNrEKHeYhDMtQkQvqlyEI2bShCl9BcQ3tZhEJPFcxCg+1UQxcUXEMGBIRDh/kIQ/6wKEOX6IRCNp0QQp/YEEI2zDRC2zyYQz84oEPeyNBCc6zcQjoZUEK7YYxCq8VEQn68MEJOYPBDLnVQQzcdCEKbQThC7sVUQsfZgEML/chCHtXscIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaJCZtaW4qJXU0gGUAZMBCgMyLjATCIACFaMl4b8Vit/ivxWrjeG/Fbvy478V4lTovxUdI+K/FU6j5L8VZk/ovxUWVfO/FYit6r8Vh8/svxUp/+q/FVIN678VNerpvxWMDuy/Fa1g678VZrz0vxXso/q/FbnS878VZQLtvxVJjeu/FZQ99r8VOabyvxVo5O+/FYf0678VmVHvvxULXu+/FW5s6r8VBqL8vxWmKve/FeNlAMAVaGjzvxW3Zfe/FUSx9r8VrI8DwBX7aQHAFUDABMAVJGgGwBWnNPW/Fa0I8L8V0qoAwBV/whDAFWnxB8AVSVX6vxVQ2xPAFS8g878VI/b9vxXe6vu/FeS9DMAVU00BwBW6t/y/FTr7IsAV7BwKwBU6jQjAFR0o8L8VcWPuvxVWs+q/FVTSCcAV3Iv+vxUJ8gXAFehhBsAVFcQYwBUizAjAFR7d978VkgmQwBUEKjbAFbJAJcAV3Zv8vxULBvy/FZhYDcAVhYoIwBUpGxnAFaasIsAVF/wHwBUGoyjAFZiFF8AVoRMxwBUcr/a/FSnSCMAVATn7vxWUbPW/Fe5KBcAVik4EwBXJIUzAFVPNGcAV79JlwBUdvCrAFSY1BcAVktgDwBWQgVnAFVwHFMAV2r3/vxUNYiTAFfuDDsAVgmdIwBWpPAfAFVUtG8AV0wkVwBV/nhnAFVkNRcAVz4UHwBXLkSDAFW6yAcAVFWgtwBXSfIzAFRxsMsAVSagcwBX8ASDAFXl/E8AV47cWwBVlIx3AFYgkAcAVGGITwBW81oHAFWzQD8AVV/APwBUgMAzAFXjXAcAVz7MOwBXp0UDAFa3gIcAV4XoXwBU7xSfAFZtPMMAVIoE0wBWjUiXAFbiOFMAVi3v5vxVN9F/AFYHdn8AVFS62wBWfYHHAFShIZMAVP2ZPwBUsUCjAFa6uWsAV9D2YwBW8mzjAFY6TB8AVpdw3wBXmCpjAFfp9X8AVQhImwBVkviLAFSPtIcAVOE4nwBUx/2zAFccZGcAV+optwBXB00vAFQacW8AV7P+ZwBUmBknAFTZ3l8AVjYWHwBX9CDjAFTTFJMAVpKNZwBVGBBTAFSuRWMAVZcFMwBUwP0DAFU6DDcAVfDM0wBXKX3PAFROTDcAVnjGkwBWmfHDAFSJBnsAVWdIOwRWzpXrAFS4SlsAVVCqkwBW+QU/AFfZXoMAVZ0+UwBU4iZXAFcchHcAVBguEwBWQqV3AFVGJs8AVFRBNwBWPbRnAFfmti8AV3cYewBWr1U/AFRnOD8EVobUcwBWSJoHAFWk1rMAVmhCkwBWI9R/AFQOVFcAVcNk+wBXm5mLAFaLkesAVH0N8wBWD7onAFQtXV8AVyb+DwBWd8WLAFdBHHMAVclRwwBUU8UDAFc1ThcAVYuEtwBWckU3AFTOgTMAVmYg6wBUpBJ3AFU2JjMAVQA1IwBXaA1vAFWrHOcAVcfEmwBVFXIXAFSHEO8AViHo7wBUOTk7AFarqJMAVpgUzwBWyB5TAFfwMLMAVSEQawBVxIDjAFWSdX8AVVYowwBWWfYzAFY1wj8AVOOwxwBVs/lTAFduKVsAVNJUkwBUizh/AFZ0WlsAVVwZ5wBUFlETAFbZ3HMAVYSNowBUSHEXAFcmeVcAVU3y5wBV9hm3AFdIMQ8AVFFdHwBXaaDvAFTrtXcAVn6xNwBVN+TDAFRUOdsAVuIy+wBUjbCXAFR5BOsAVCWyAwBU4KGrAFRcJ/L8UGwiDARACEI4LEIACEO/nNBCs/3YQy71eEK/BWBDyoh4Q9JQEELShWRC87loQ7rRNEKitLhCr8UIQqtQqEN+gUhCV2FcQ3c1yEJbPeBCG6SsQnOULEMKjXhCru2gQotohELmIDhDlhmoQiLIbEOvxUxCh3VkQw8MhEN2YNRDexDAQucA2EKfbYRC9m3sQ/qp0EOHgIxDm3C0QlIgcENz1EBCuuBEQ06ZAENDLVRDg3E0QptlgELumCxCHywMQ9+MXEIilXRC8kCMQ3qJiELCPHBD4q10Q07smEKe/NBCXzAQQ55N1EIygWBCtrXIQ17pEEPTjLhCp2zkQ8JszEOL8bBD120wQkpVrELWNdBDhrVkQv88CEPSmChCR7Q4QhfRXEOi8HxCCpAMQq0gQ/P4IEJWFJRD5+2sQhJx3EM3JBxCBu1QQ/I4IEJn2ChDa624QiYtZEMj1eBD7x0kQt8lbEKq2YRDxmScQioYXEOjBLBC21hcQn7xTEIHUbhDU5QgQ98cvELCDWhDznjAQq7EREJzmIhCQjCIQ96wKEKT0CRCZ7yoQuc8fEMvuZBCYxT4QvIcmEJGlZBDG0lUQmdxuEJ/1JRDBkWMQoZ5KEO7YPRD4kC0Qve5sEKW3TxCN4GgQ2IY8ENyqcxCliFwQurksEKiAQRCO3EEQ/sZFEJ7DZRCMuVgQ3JJTEJa/dxwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFokbPeptWQiaGJAZQBkwEKAzIuMBMIgAIVqbzdvxWvuN6/FRJk3r8V9mHfvxXoCOi/Fed2378VkwDhvxXXNeC/FZLC4L8Vh73ovxWkou+/FTI85L8V2O/hvxXAiem/Fbb74b8Vd9bgvxWtEfi/FcrF678VWsjhvxV13ey/FTqL6b8VdGT1vxVAUPO/Fb9H6r8V7ajyvxW22Oi/FX1Q5b8VE2frvxWg4/y/FaUO5b8VQrnovxWy1+W/FXs/A8AVbl37vxW1XAfAFbMxB8AVJSn5vxUXLvO/FcYK7L8VCYYFwBU9iv2/FeFo778VchQPwBUoQgfAFQHqIMAVKSICwBVRofm/FWXW6r8V3ZoTwBXgjwnAFWR6B8AVkooEwBUJ4Pq/FTXXKMAVVfjwvxVImQHAFWwi9b8VU88EwBVMQzvAFSPE6r8VM9HqvxUzq+u/FQLP7r8V8jnuvxWa0yLAFVpVCsAVek0cwBUv4ATAFfvvDMAV+o8HwBWc8x/AFV2QCcAVMGALwBWye/q/FZeA/r8V/vr8vxVN5CTAFSzk9r8VumQYwBWEMhXAFdKxOcAVDW8hwBUJpVbAFau+778VwmUQwBWNzhvAFVXrG8AVJScJwBU2+FzAFaxvL8AV4uElwBUSxArAFSE3CMAVkzEswBU2bx/AFe2R8L8VrkkDwBWzJCDAFchYLcAVaSkMwBXZng7AFWfqccAVpikrwBVF/yfAFa+jIMAV9ig3wBWShAzAFfDtKcAVA1UtwBVk4xTAFUU6D8AVQApIwBUe1RHAFWqnAsAVZO4CwBUXdQ3AFT0wD8AVwkM7wBVIOUXAFWor7r8VteAWwBVcCBPAFY23FsAVQMV4wBXYdwXAFWFQBMAVMcH+vxWrTBLAFYkpl8AV8GpPwBXOmErAFXfNkMAVUv8vwBXi1GfAFWTNi8AV8ecNwBUTeSfAFWccEcAVDmmMwBUnjxXAFSYHfMAVn6CCwBXV24XAFff+KsAV3eszwBXlsj7AFdX3ksAVtp1swBU8fhHAFW6pU8AVgStVwBXzFTrAFSsvc8AVKg4vwBW+50/AFRW1V8AVpBMswBUWkqrAFb5iPsAVQuxFwBUb0mvAFetZhcAVk2GIwBVYwHfAFRr0NsAVJltywBVOMLPAFWpG+L8V2I8HwBULjSrAFV+WMMAVm05EwBWBEVPAFcyBLMAViDRDwBUoQXjAFcuaP8AVNsFzwBWE0V3AFa8TU8AVsV1mwBW3ImXAFRyqLsAV8aaWwBWRFBLAFabeiMAVsgMWwBXbnEHAFezfTcAVviuTwBXM8TTAFbUpCsAV8Yw1wBXcgUbAFVZDNMAVsmIlwBXQQUTAFR6qU8AVJce7wBXTIEPAFdL3FMAV5NZxwBVGqoDAFRqrfsAVbyt6wBUrEmrAFZ/bPsAVm8aEwBUvMlDAFeEGXsAV0LkhwBUr/znAFU1oX8AV+oIlwBUwHBHAFbzmjcAVeLGFwBU/zqbAFeEJacAVGps1wBUPjmbAFTUuZMAV8iY4wBXJmIfAFW5+jcAVrIzjwBWdcT7AFaZzC8AVKVVNwBWvXAbAFS/9ncAVat2JwBUfP2fAFbQKecAVA/0swBWrLIjAFV1hqsAVX/tlwBUjLXPAFVHrP8AVMYAfwBXgsknAFY0sOcAV/mEuwBXtfRTAFTLKPMAVimhCwBWajK/AFTRUecAVnV8OwBXG9zbAFX4up8AVMj0bwBWmFT7AFZFLOcAVadAWwBQbCIMBEAoQjQsQgAIQ1rREEOL0dxCd8VsQuJ5YEOL5ShDC2i4Q1PJXEPfGcxDViwkQ0KA3EKOWIhDUw1EQ2tdZEMKvIxCXhjwQ7JsEEN+ODxDn9QoQsu8GEOd0EN6qThCEglsQ8YwIEJHrLxCz30sQtL0gEIbGJBDK+SoQ0qAqEJiWWRDpoFcQqdd5EPSGMBCOhTUQmMJyEOm4HRCb1D0QrLMCEJ/ZIhCcwGUQ9uIGELCOcBCI4RQQh/ACEP2xTRCgzTEQ0McZEN7ecRCelRsQ5pNIEK2zShCRpRcQ7tRCENzeZRCY3k0QlrcKEPn9KBC22wwQ8d9FEKCoWhCl/jQQgo1VEPmpDRD4wHkQ36NPEKKJZRCcow4QwPpyEOSybBDM+GgQ+LIoEJT+WRD8uVEQgZ9fEIaLTBCxx0AQkIVoEJCrKxCKiRIQj7pgEISVWxCC6xIQ2LxaENCQShCFnhQQv71YEM7gPhCVvF8QxMRIEIz5AhC4jhcQ4sREENnfHBDfmAgQw6gpEOWFGhD0qRoQlfQaEO6ybhDzxz4Q9tx3ELijHBCs5zQQkq1nEMWIAxDouWgQ9tdcEMb3LxCJi0sQvf8fENj5VxDwigQQ7NxuEL7tIhDPjjAQyvskEJHjJRCA/2QQz81VEJ6TOxDUoDkQ7LwmEOugaBDcmEMQ6KNMEK3MYxCKwW8Q4P94HCAAMIACOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWjyrpzCuvb5vOoBlAGTAQoDMi4wEwiAAhXUo9u/FZmj3b8VNZbdvxXxD96/FWJ63r8VHLbdvxXi7OC/FeeI378VUujjvxXpEOS/FVaC4L8V7YrevxUeSeK/FYYy4r8ViTPlvxUpXuK/FfVe5L8VmtgBwBVh8+O/FQN56b8VqV7ovxUFceO/FUUv4b8VMo7zvxX5fvO/Fequ5L8VYQTrvxVnYgDAFfmq478VRCH1vxVOouq/Fdwc5L8VTwv1vxWHvum/FckD8r8VbH8SwBUqtAPAFbTIBcAVhk7mvxUlQfm/FXDcA8AVEHTsvxUWUeq/FRsD6b8V4gXxvxUZEvC/FdO07b8Vm6YBwBX5zg3AFWOl/L8Vs7H3vxW7Y/e/FSp+CsAVUO73vxW6fvi/Fa75AcAVeHAVwBWl4OS/FRHx7r8VT2oEwBWzTP+/FaPSBcAV6f8AwBVvgvC/Fd7KAsAVfdhPwBWXNQLAFblIBMAVOuXqvxXaJhXAFfvSAMAVuEsYwBXT+x3AFe5bFcAV9W8PwBV5eBTAFaATHMAVPGQGwBVLx+m/FSC7EcAVfBMUwBU0bhfAFfGLQMAVUAAFwBXmzvO/FU60CcAVEKkrwBWzKu6/FbuS678V1X0XwBXyevG/FQgX/b8Vq30awBWRZPG/FeWvMMAV2I4PwBWg5AXAFUVeNsAVJ4QTwBVq/RvAFfKsDcAVNykNwBWbzAHAFVfQIMAVrEQHwBWxUA7AFSQGD8AVsaEBwBXc8gDAFSfPHMAVSK0dwBXYpA7AFY/TCcAVrsglwBUlrh3AFezZD8AVm23xvxU9+fO/FZ7a/r8VMcgTwBXNKQ7AFYFVAMAV1o8SwBVXGxLAFa8oCsAVESpUwBXopAXAFRkx8b8VX/M7wBU4H0bAFbG1BMAVehZUwBVJnYnAFc+bKMAVGRIcwBVnsQ7AFdqMsMAVIV1NwBXaQe6/FadiZ8AVEPbzwBVFXkLAFZwilsAVQQpGwBXMMzjAFVHdKMAV36E/wBWbQxjAFYx4KMAVx5RSwBWgqmjAFR9UZMAVMu6hwBWGkrvAFc7hcsAVBCMpwBVdRLDAFVxfAsAVJbP0vxVhnCnAFcS/psAV4lyNwBW7llfAFV8fcMAV85s4wBV4MWTAFS31g8AVH6gHwBUMiAvAFUX2gcAVx8mOwBW9pSHAFdehN8AVPbPZwBWWeG3AFV06NsAVPZgTwBUVIDbAFb7C/r8VnYEgwBWkbTnAFd+gFsAVCFwNwBWCRkHAFb21K8AVfpBZwBUSiDDAFWenCsAVXNEYwBW/mpDAFShHZsAVXI+WwBVrtmbAFQ3VL8AVZeAnwBXrUFHAFSgIQ8AVu3NgwBVOwIHAFR3oisAVGIeEwBWRhx3AFRQcM8AVePVKwBX4g2vAFcZ+O8AV1jwiwBWl7mDAFeebjcAVcSMzwBX1o8fAFVmjEsAV0zxHwBWAHxbAFdkCQsAVubSywBWWbBDAFcbbFcAV15wtwBXyTrTAFYCMJcAV1yk2wBVwE4PAFdZ2H8AVIc1MwBV0IR/AFRZ/UMAVkfNKwBWJdS7AFUgIKsAV1RSDwBUhQxnAFWp+KMAVemU2wBW2FmDAFYk9McAV+goQwBUGLBLAFUBQD8AV6ZYfwBWd7TnAFaMKT8AVGFYfwBWEVx7AFSnEGcAVHm01wBWkLCnAFUwdN8AV3ESnwBUJehvAFdh5F8AVWQlVwBUUaLfAFbGihMAVC0tiwBWZ9/2/FBsIgwEQBRCUCxCAAhC8kEoQqd97ELmbGxC90RMQhfUyEIiOFxDS7E4QoNsuEIXLEhCQj3IQ5ckIEOWIGxCghUQQ+cdiEOmkThDo1QwQi/VzEM/RLxC1zU0QgrksEK65QhCopiEQp8tYEPXlCBDM1UUQv9k5EILJLxDPkF4Qs6ksEMW4NBCF/DwQ3HkQsJwtEJvUaRCS2TIQi4JJEPqVRhCi8kcQgPgQEOScdxCQxj0QkdQsENjcPhC/tgUQlqlHEJa6URDR/GUQsJNbENC/GhCkvhsQq6YJEPTDTBDC6zEQvYQgEOjZUhCyuCMQz6AnEP/BKRDJ9xMQpM0xEIneOBDDungQs99dENTseRCLhHQQhZoyEN6QDRDZ81oQ2qdgEKSLbBCy6kcQnpIVEKeJYxDv+VYQzLkJEPfwbxD24hAQ3f9oEPyGZxCeiHIQ7Yp3EIe+ThDa6W8QvpUxEOXCBxCqwxQQh/cUEMXqahDvp28QvG0QlrYfEKXiFhCOqnAQ+NhVEOGxJxC5+mIQrexpEO+yJhCG0AQQhd0BEM66GxCCqm0Q58MkENbgCRCqpTgQ1NAeEOH0GhCO8AEQx8YsELC1VBCA00MQtNtlEMfUJRCn2iYQvvdqENb0RBDWizgQsJl4ELu6ahD9nzQQ55VTEO7zOBCu/mYQgaxdEL7+VxDQnGcQn5d0EOfKeBwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFo4qvbk6eF55DWAZQBkwEKAzIuMBMIgAIVrfvUvxX1jdW/FbWt1r8VSJXavxULOde/Fdif178Vf5XYvxWSe9y/FRjF3L8VIUTavxUOi92/FbtZ278VUPXkvxWcHtq/Fd4d2r8VBVPivxXy4eK/Ffgq3b8VebzevxUbIOi/Fbk1978Vk1nlvxVTreu/FQ2t3r8VkE/cvxUwueu/FUgt878V2KrdvxVkJN+/Fb0b6r8VcpjivxV16+i/FcAs578Vn+XpvxUpaPe/Fd4g4L8Vnsf9vxX9POi/FbcU7r8VcBLsvxXF/wLAFYpLAMAV30X+vxVk0Pq/FWuF5b8VffvxvxUkPg/AFZH1/b8VOQ/tvxVLuuC/FbXl6r8VtJQlwBWhtPO/FR7wAsAVzoT7vxWMaQLAFdnO7L8VHYzivxXiTvW/FcleCcAVyGIGwBVcXQjAFQszAsAVczPtvxXzVvW/Fd6dA8AVl8bovxUxWeq/FUEDEMAVMd8XwBVRsQbAFeF7/L8V+AEGwBU+81PAFScd/78V9gsMwBXWWgbAFRp19b8VjPoBwBXToPa/FbAKIsAVptkewBXeEwfAFbiyFsAVaVtNwBVAcwHAFZsxCcAVzWr7vxXR8jPAFa49K8AVxaPwvxXZdgXAFdZqBcAVAFQ1wBUSZg/AFe3zGsAVDGcGwBXF+Pi/Fa5k7b8VlVIiwBXJeAPAFZr1AMAV0JIdwBWw41PAFV7tRsAVitNEwBW3nhXAFQGgH8AVNbgIwBWFuB7AFRzuCcAV+EMNwBXbcgLAFbJM/78Vj1H2vxUn9w/AFQB7/78Voa0JwBWu4fy/FQ3VC8AV7nYNwBU5KhrAFY1OKcAVsJAawBWa1RzAFaATLcAVb8w/wBXj1yLAFRA4778VNKkJwBV1AiLAFeVqK8AV/GMNwBXl8RPAFWc1IcAVxNoAwBUNzGXAFVTeEcAVMAMiwBXcpCbAFU6UXsAVuJo6wBXK/AvAFYGvKcAVQQ8YwBUsVFTAFZPfCcAVqQxswBWJbYbAFTSxScAVSFJFwBVKyi3AFUrXQMAVx1YQwBXOuDnAFYJ+R8AVH//6vxXCtTfAFWROecAV+NK/wBXHfb3AFYkOd8AV2bsmwBXuvlLAFU5gjsAV5JZEwBVtGAzAFSnTd8AVByRwwBULi3zAFa3lV8AVmE0TwBXtihnAFX1cDMAV0D0qwBU4/R3AFVElBsAVeb2DwBVNdYrAFfQMgMAVsJWLwBXvvivAFceqDMAVjc6CwBW72DfAFVFkFcAVap+WwBVyLk7AFbA2fsAVvqd6wBUav2DAFY6rY8AV23omwBWelzLAFbujdcAVzykywBXy0x7AFZgZX8AVkb2rwBUxpHzAFe8MW8AVxJghwBWoClzAFeIWdMAVrX8BwBUjVJzAFbhoO8AVJuZ1wBWI78HAFbkqhMAVG9GgwBUwYXLAFSZkUcAVtzAqwBX+xUjAFRn/RsAVue4/wBVCbzbAFfsg6MAVjy0+wBXv6HnAFXx/YcAV3RAKwBVMXGTAFSIdecAVjrkFwBXKl1jAFYgcdcAVUDMMwBVy0YTAFZfyTsAVlvMUwBVzSxrAFYvZFsAVVr2VwBW7K33AFd/xDMAVRnTJwBV7twjAFRF3McAVEwwTwBVW/SLAFV3SEcAVmq0cwBWWnibAFXLZrMAVdcO4wBU6a57AFaN9a8AVO2YrwBXyyk/AFXWfX8AVfvJhwBXO5JTAFTdfoMAVVcFnwBQbCIMBEAsQkQsQgAIQ7oRIEMv4URCwoEoQpYh6ELS+JhCdoiEQt8suEK+gAxCwz2AQopMZENDNMBDUsmkQnp0eEJ37EhCioXMQsLouENSeDhCvs1YQx6cUEJmMChDnoG0Q7+4mEIKJchCjw3cQqK4MEOuDZRCeoSMQ4c0zEKHaJBD5lEEQ8KhKELKbdhCnlW4QxcZUENb1CxCnpBkQo5FgEKLsEBDpt1oQqLsCEOmoJxDBpjoQppgbEJCkGxDIkDAQx8cXELW6MBCR6mIQgO4kELz2LhDBtwkQwcpyEKixChDa6kgQjc4lEMWqSBCZ/UIQv6JJEPrjMRCWtjoQo65jENavVxDp5HMQ7bV5EPCxWhCrqXcQ1IE8EM09ENvmJhDBj3UQ5LkFEPVGEOiWSRCigCoQptkQEKvJMRC041MQy75zELLhLRD13HgQx+4oEMazVBDM/2cQ/+ZyEOjYFBDVnAEQv91TEITADxCJmRUQ+to7ELyOdxDMxlIQnsELEI3udxC+yQoQsaYDENaPEhDQhQkQ5+5QEM6tVhDQ0HgQ0qdnEI7eYhC3mR8QxtQEEI26aRCzzEgQ/INTEJL6IxCq+nYQj+YsENLXQRDLkGoQxvxaEOuvLRCA0z8QhI8VEOGXYRD5u0wQqSYQnJFHEN/mQhD3skUQl85GENuJZhDenloQlNBsEMaZehwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFopojmqsLlm7Z/lAGTAQoDMi4wEwj/ARXOvu+/FeDO778VBvDwvxWWrvG/FXDa778VhfbwvxUuIPG/FagR+b8Vu2X4vxXHHvK/Ffz4778Va6LzvxUADvS/Fcgu978V95nyvxUGGPy/FfZL/b8V2IP4vxWGOgHAFaph/r8Vr2TzvxUSXvi/FQXO/L8V9qn0vxWICvu/FXMJ/r8VOpj3vxX07gDAFeY/+L8V/qH5vxXKFvS/FTe2AMAVYOMGwBV+oADAFc7wCMAV79wXwBVlZ/+/FZ2uBMAVug4JwBXQqgTAFZt1AsAVsn8AwBWGzP6/Fc69/78VaZoEwBWsvQDAFZxM/r8VincBwBWXgPi/Fc6NCsAVK2YLwBUzwQPAFaSHC8AV8LkKwBUEnhbAFYR0EMAVIK0LwBV3MAvAFXX9E8AVskoHwBXmpgLAFQ08+L8V1Y32vxUnrQ3AFSA/G8AV83IMwBUBmxXAFf3oA8AVkywKwBXJkyDAFbBxC8AVc6sowBWIDI7AFR9dMsAVjtgUwBWFkxHAFZEKIMAVzyw7wBWGQyTAFaLiCsAVxe4FwBWr4wLAFYqlDMAVi5oCwBXW/BHAFaE7C8AVKCIjwBWyYijAFXuAKsAV4DINwBVt6wbAFYKzCsAV5LI3wBW71QLAFdrRA8AVpIYBwBWcNgnAFQxSAMAVIAj5vxWD6RzAFaRwGMAVqMUXwBW9/RjAFb+hCcAV6IcMwBVu/SfAFWAFFsAVcbEowBV/+BbAFY4FNcAVfs0uwBXRNhLAFWTpPcAV/jI5wBUo2R3AFd11QMAVmRIuwBXSYBjAFenFHcAV5d4bwBUjsQjAFaaBD8AV+swDwBX3WwjAFQIcCsAVzhIcwBU3Kfe/FZrzHcAV1eMhwBXao4jAFWOGOMAVqoNKwBWwgVjAFVo6acAVatxLwBUR+5vAFdzICcAVJ0YuwBWb8R3AFQLCjsAVsYJDwBUYSErAFZVlVMAV0d05wBXCdYPAFZF+k8AVk8aRwBVyZDLAFWYaQsAV3M8awBWqOCzAFYx0JsAV8vdBwBUP3JPAFRb9OcAVOy6bwBXeiHjAFY3nX8AVKT1dwBX/CiDAFfSWDcAVoqpawBU6izDAFb3WOMAVq6cHwBWv6CPAFeL8D8AVP+wewBWDqyXAFbwSH8AVoaAVwBWw2D7AFdpNH8AV5/AuwBWHxCnAFaSBKsAVEvAqwBVhSTLAFX1zWcAVS2QQwBVgRXXAFWHLWMAVld8MwBVCd4fAFVOzJsAVfwuFwBUv1HjAFfOFNcAVYII5wBVZj5rAFfQYHsAVFZVXwBVhiBDAFUW2GcAVfmcQwBWAkUrAFdBcDMAVjsMAwBV9niXAFYSHs8AV/T5AwBWbSk7AFbZYUMAVnUkwwBWa4kTAFbRqacAVa9gkwBWCwzDAFVtYNsAVybs4wBUU84HAFV/nVsAVQ4+QwBV2MTLAFStUrMAVMNeawBWv/4PAFTTUrcAVtjZEwBVhegbBFS6FZ8AVkDVFwBX29mDAFXKKNsAVNTdJwBWOZojAFWtRhsAV5F6KwBVC0uvAFZe4NMAVv7w6wBU1yFXAFcDNb8AVqrFcwBUoSTrAFbN4KsAVDbKqwBW2loHAFRfHNcAVvnQ4wBXKtE/AFXlUMcAV0qkVwBUQ0TzAFRnnIcAVHnwjwBVmoAjAFU01JsAVzJVwwBUtYGbAFS7IJsAVwoSvwBWV8izAFUWv5cAVmdsywBQbCIMBEAUQjwsQ/wEQ57pOEKehEhDIvFoQ2bUxEKCvKRDps2QQmfp4EL+QdxD74hEQvdZUEK7NWxCEoy8Q3LI/ELzaBBCGxF4Q8/9VELaXXhCws3kQwYUOELrHcRDTsEUQuddYEOGAEhDLvEcQm91REJHeYhCctEsQmtVdEI6PXBDk/DAQ99BEEIDvYRCPsmIQvpIrENHRAxD48GcQjvN2ENjOEhCokREQ3ZxNEJLrBxDfvGkQ1pAXEPbzChDzqGUQ7tIbEJ2pHRCA+zwQqpwWEOblNhCwpHAQ98IeEPjSKBDulCoQj64EEPTZbBCQrTUQ6bs9EPbzNhC0q2MQg4pDEMusXxCs4V8QhvYPEIirchCngHkQodoFEJ3NNRCtmyYQirwQEIHPWRD3/BYQnqJmEMGGOxCFjhIQzukcEKauFhDMyRMQn6FcEI6/FBDYzUUQmbsSEJ3iaRC9rB8QgvcWEPHSbBCJ31AQ7+8YEIxaEJTVGhDxigUQmLtfEJHrCRCBtRwQ5OVYEMmsOxCnvAkQpeYZEMXYDRCh1EsQoZY2EI6jIxC0ziQQ8sR5EOz0NxDF1RYQvLcpEPqiBBDAnHkQo4psEI2JbRD12i0Q5oYuEKu8YxCwDRCI7lQQ48I0EIuYOBDE/jcQ9NRzEPrjThD/mngQ5wUQv79EELGKThCf03IQqKceEIoLHCAAMP8BOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWi8toHarsvj57oBlAGTAQoDMi4wEwiAAhUog+i/FViR6L8Vq97ovxW7O+m/FalO6b8VeJrqvxWeMOq/FRus+78VCOvsvxWT+Oq/FULU6b8Viy7rvxXcbPW/FSC0+78Vs5b1vxXuBwDAFT/x/r8VY3f/vxWHK/S/FVTw9L8VzeTvvxXLdeq/FZcu7L8VT5HwvxXAtfG/FUEpAcAVpl38vxX/wPy/FYJmAMAVirL6vxVcG/a/FccAAsAVQMUOwBXMgBLAFSGSBMAVppEDwBWRogrAFXyQCcAVZKAOwBV8lgPAFRr49b8VskMAwBWkjv6/FQ6K8b8VBS7vvxU9xvW/FTOj8b8Vfeb3vxVc8AnAFdXkA8AVKm/5vxUMeQXAFfUlBMAVadUFwBUpnAnAFRPHDMAVr74awBVAOAnAFYNyDMAVk337vxV0DwPAFTtIBsAVTwT3vxWWNQPAFYmsGMAVINwOwBXscSzAFXNNGcAVQVgVwBW/VCPAFe1yK8AVUAQFwBWgFR/AFSl3GcAVolEcwBV7L4nAFRaCIMAV2ZQ2wBULZT3AFfPSBMAV3v0iwBX81xDAFUHQAsAV868EwBUGvBnAFeh+AsAV/2gNwBUfWP6/FTiiIsAVsG0dwBUvtgPAFf5NKsAVpp/2vxUGwBDAFZdyC8AVR04NwBXB3Pm/FVrmLsAV9OoNwBU6UAvAFa7AEsAVraQ5wBVKhQfAFcC0JcAVxSoOwBU5ngfAFcB4LsAVZIAlwBXoEBXAFfO/H8AVmLMVwBXEhCLAFa5bDcAVnJAbwBUFvh7AFXDEGMAVX+4NwBUZhBbAFSjRDsAV1IMYwBVCzg3AFTNNDMAVU38kwBU8KwjAFeupFsAVRBr6vxUxh/m/FU+mCcAVz1KTwBVTITDAFWtOQcAVdlc0wBWWY2rAFdiSOsAVXJ1gwBV9iznAFYw9SsAV2r0+wBUhs1HAFWCAZsAVru0swBUzWDvAFbXqg8AVCf1YwBXgXkrAFaMwkMAV6vg/wBX22zTAFT7wOsAVA/I9wBUm/iHAFb6iicAVyqzMwBXSWHXAFdCVI8AVizqFwBWKUI/AFXGpS8AV3pRRwBUiEWXAFWmkJsAVOWxmwBUZEDTAFRW9dMAVuoxqwBVVfg7AFS4tBsAVLJQkwBWvA0bAFapMOMAVSzclwBX2aiTAFdAHJ8AVO/tZwBW3wpDAFRscA8AVNHcwwBU/p0/AFUnze8AVCFE5wBVnLjzAFefGbMAVK2OLwBUeXXXAFeDkYsAVKM2JwBULJhnAFYeQR8AVNBqVwBXi0RzAFVBzMcAVvOIOwBXDzCTAFURjE8AVXnRHwBXzAzfAFeuYMMAVGkRZwBXGhYDAFTO9iMAV/faZwBXACHnAFflSTMAVodBWwBX7c1PAFacfzcAVJypRwBV6GbbAFW1bKMAV8/lCwBXTkx3AFdkR/sAVGEc5wBVf9WvAFbVEOcAVoIIrwBULjj7AFaNZGsAVJ7ZTwBXwbyTAFc7JRMAV62lRwBUb+GXAFSFWWcAVHyFlwBXxpzXAFQy9KMAVLnInwBWBD7TAFTmYMMAVG25EwBXVURnAFfcEcsAVzJwxwBXTozHAFbibS8AV0SoowBXOFK3AFaC4NsAV7J4dwBXzGzDAFVBRQ8AVF8eQwBXIez3AFXINLsAVF2FfwBUmISbAFSqu6cAVwXg3wBVtmhvAFdxyTcAV3aMhwBULMYLAFaRc+r8VNPEHwBVji2nAFBsIgwEQARCUCxCAAhDw4GkQ+YR0ELX2bRDs8kcQifRxEMWTHBDRtikQtYJNEPjTBhDXvFYQtbQYEIqCbxCu+E0QoZlkEML7ZhCNxBAQ0MoTEJaxBhCEiBMQv9tREKnQQhCe4QEQl6VzEMGnDBDugwoQ9LlIEOe/IxChiVAQxtUtEJWmXRC+304Qz5x5EILrPxCr5RgQoOhIEKDIWBDIv18QtcdHEKRxEOmVCxDf5RUQjaA2EPimTBDPlk4Q0LVeEJu5WxC9nRsQ7Y8cELbBCRCfjwUQ879TEMPxWxCT9BMQ9NxNEMPHKhC+z0AQ8PM1EMqwKxDZ6zUQ/do4EMygOBD/0koQmdhhENOVdhCP61EQzrF6EPD5RBD2mVAQ1/RFEMrvdhC063sQgNA8EKGkBBDRjk8QmPNdEL3pSxClkCIQ7dQ+EObTahDfzQEQ8YpEEOa7FRC/iBYQ1o1iELLbDhCF2R4Qn7wIENmYWBDGtkcQ8eZMENDiahCh9HUQ2qNEEJLlXRCZlXoQzfgbENPQcRDjowwQwOkJEKeWXBCu3R0Qv48/EIPBTRCJp3oQvJ0hEKiWBhDcjSQQ4vkFENnVBRDutyYQmuxaEIaNMxDm7icQqYZEENjfcxC31kUQsLtdEN/INRCHsj0Q2JcvEIK/QhCbtTQQoZJlEIrOSRCExXcQoPlZENTwQxDruXwcIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaOTxxdOIuJ+4nAGUAZMBCgMyLjATCIACFZUY2L8Vy+navxXzwtm/FdgP3b8VJu3avxXmi92/FQIe3L8Vz+TfvxWEvd6/FUNt478Vi8PdvxVOUt6/FUpV478V6A7kvxXh1dy/Faby378VcvjlvxVPKN+/FeyH5b8VcBH7vxVPAOS/FYmh+r8VlgP0vxXKUOC/FYjX378Vt6nnvxV/dee/FWYy5L8VyFH5vxXMR96/FVeq5r8VMqHgvxU8Pe6/FdbZC8AVSmUCwBUoFwLAFVJS7r8V8FLvvxXhGeq/FREsD8AVRdv+vxUU0e2/FeOx6L8VW8sFwBVr6fy/FXDtE8AVurP7vxUObOa/FWkv978Vq8zuvxWTd/6/FdSy/L8VxQ7xvxXSE+y/FSQYB8AV4jwHwBXA4fS/FY9+CMAVThcZwBUK6eS/FURY3r8VGK7tvxWCCe6/FfXy4L8VWQv1vxXf4jnAFZEF/78VWgI0wBXYwg7AFbyvKMAVukgKwBWXcyPAFY4ZCsAVjjNfwBXotBfAFbN2IsAVxwUDwBWSTuu/FRJ58r8VmBNDwBXRVzHAFcfHPsAV6eISwBVQFhbAFaXPFsAVD1QjwBXqpBTAFQolMcAVAuQpwBXGOgPAFetlC8AVtBQawBULcBzAFV48P8AVjQUewBUbZADAFaNu5r8VuC0ZwBWN3QjAFVRCDcAVAlIVwBWn8S3AFS4XDMAVxS4TwBV5sk7AFdmcF8AV/XUYwBUyA/G/FeZKBcAVziYJwBUiiAzAFRMgJcAV4OEPwBXLx/y/FXyQAsAV9rEYwBUVag/AFbRoKsAVraUywBWcdfC/FdBjL8AVVR7gvxVANQjAFQvz8L8VfdJDwBXOvfO/FUc1GcAVvCnhvxU/tYbAFROlAMAV+umqwBX8Z4/AFRpPesAVA9AbwBW25S/AFfeTR8AVQ6dkwBWGWxDAFR7eD8AVpIs9wBVPB4/AFU47k8AV5wIxwBXv8anAFS8MNsAVJLkPwBW+JRzAFW9idMAVLGmFwBVd+mLAFVjxecAVomZAwBU4/GLAFZQxCsAVDP4SwBUdZKzAFf7qlsAVUKp1wBVnSJ7AFTxZUMAVVxtJwBU+kl/AFRPmXcAV+Lm0wBXRP6bAFelHVMAVQTAzwBVOfWPAFTNMCcEVmvMzwBWfCCXAFaYsjcAVwRSCwBUCz3nAFRF3PMAVf1R2wBULsDLAFbGYjcAVyNGAwBUc4kvAFW6oBcAViaFtwBXvamHAFXs5OcAVFu1DwBV2Oz7AFQnzIMAVTbNiwBUsRo7AFf18bcAVIkm+wBXDcDTAFSCuM8AVttgPwBWX6/S/Fdl5bsAV3kYswBXG1irAFRYp48AVbWQTwBXtwyrAFcjGTcAV5nkxwBVahJXAFVjjh8AVIuOmwBVtIRfAFfmGuMAVYSk5wBXrZ2TAFXJfXMAVKkeHwBUPXBjAFfJbJcAVzcyvwBWWtwLAFT/wBMAVzkuBwBV6wpPAFXCGGsAV/tRjwBVn1IfAFRcPEsAV0HElwBW3ADLAFcYFFMAVEh61wBVvWDfAFeT8VcAVvm4owBXtAFPAFVgDZsAVDY4mwBVupxbAFS6UE8AVzfhCwBXYZUXAFbZLVcAVdspGwBXjtm/AFStS8r8V2mgvwBVbJjXAFZxxFsAV2IQ/wBXG6RnAFYQchcAVOKo4wBWtiSHAFRRrScAVGbeHwBXrGx/AFS6O+r8VZKlBwBX1dTDAFVJtBcAUGwiDARACEJMLEIACEIiHGxD/mC4QqLswENKzNxDI83UQm7lNEJaWOBDP+00QhrpKEPeQNhDFwUQQtYoaEIPhVhDzfRDMmzkQ8oF6EJXaNRDmqnsQ6LpTEIupERC5kWsQz/5IEMqacBDK5iMQ/oocEKWLShD9viQQzaBUENzCaBDEyVYQzpZbEN+/dxC6BhDC91wQp60mENWDexDk5yAQgtNaEL2/VhDkjjEQhdpxEJ+hFBCzxF4Qif8DEJyhdhDdq3UQwI8FEJPqKBDL5FsQ1bhSENDFPRDu2CIQz+kmEKnzGRD/8woQv78mEMPcQBCZ7kwQp/1MEITCWRCnqEQQsJt5EPWlUBCF7HkQ6ecMEMSYbBD392EQitVSEOy7chCLzDIQ5swwEJXMKRCmzAYQhL0iENHUbRDv3RkQtM5IEKLJKhCe2FIQr5xhEJHnSRDMu0YQovBTEL/TARCDkDcQjrcpEOnmZBChXBDO2UsQmpkiEKDFFhCI6QMQhM8ZEJzhFBC80nsQ0fcIEN+hSxDlo0UQibsJEJS5chCO21oQxLo/EP7hHRCR0GIQx8YgEOeJCxCj5HMQrqAxEPf6dxD10goQxNQLEM3LXRDfogwQuK1IEKSfVBConCkQ0vRQEJaOUBCxogUQ57NkEID3ORC9qD8QnsxPEJX5QRDUt08Q88FNEISPcRDOiXwcIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaM/ryLba5e7UPJQBkwEKAzIuMBMIgAIVD9HSvxUPHtO/FSxy1L8V+mTWvxWcHtS/FWGz1L8VnHbZvxUNP9e/FcXq178V1d3YvxVLute/FRT91r8VCzbWvxVTReO/Ffx+2b8VjkzbvxUabea/FaA53r8VKknyvxUVReO/Fa9E3b8VJZfmvxXaI++/FaGA4L8Vj1vZvxVbQfS/FZM+9r8VIqHjvxX8K+q/FTHO278VBtTcvxVZhty/FRFY5b8VHHkEwBXu/Q/AFTqs778VRIjkvxVEzPi/FeJO978VbYwWwBVr9Om/FTmL6b8VZcn3vxWRLPS/Fb2D878VkOn8vxVkWv+/FYlpAMAViznjvxVL3ua/FWO04r8VuMT1vxXnagHAFV73B8AVCl8WwBXE/+q/FSfe9b8VYgwLwBU4PQvAFRWt7L8V3wLevxVGQQnAFVOt6b8VBYvgvxXz6DTAFRvWG8AVmHDpvxXtVCvAFbeiScAVr2I8wBVAGi7AFVh3DcAVUdn1vxVFHynAFRYVNcAVps8jwBUtU23AFRX/D8AVQT76vxXu6yTAFYPnMcAVEcUEwBW5yxrAFfM47r8VMgD8vxVe9WLAFXwE/L8VnS8nwBVGjxLAFRfVJ8AVbKAEwBXcJFbAFQcOLsAV56cKwBUeiwfAFYSGAMAVZv8JwBVz4u2/FYDqBMAVjYv8vxUWnPW/FdwbB8AV4ID0vxURHQTAFTrdGsAVREUowBV0OCTAFV72H8AV6IUOwBXwV03AFWx8KcAVp90jwBVDqwLAFc2wAMAVEAkRwBXMJCrAFXuKFsAV9fQ6wBUwIibAFZwPNcAVx5wYwBUgo/y/Fb+f5r8V+LQnwBUEsAnAFbXA7r8V+PVvwBWd9+W/FdBTf8AVF2A9wBVGzavAFV4qLcAVP01gwBXKJaXAFVkvAcAVH4uUwBVtql/AFT3ilcAV4ax1wBWl7U7AFbkUTcAVOCUzwBUb8UbAFZ0MPcAVoZGrwBWN+ybAFULvg8AVkuVGwBVl+3nAFUZDa8AVI1BCwBUbZi/AFX/9P8AVb+y4wBX1lHjAFe2zIcAVArwTwBVQKwHAFVqRLcAV+ulAwBXo3oXAFVLPXsAVi6Y3wBUazTPAFUqzZsAV8xchwBWRST/AFRJUH8AVVcEJwBUSoh/AFVohRsAVqeuJwBUFU//AFSHNF8AVFuUwwBVB6IfAFYTkKMAV3wc6wBUrWVHAFdFbP8AVl38xwBVLyDvAFVqKpMAVze5rwBV/FnbAFQkAkcAVZ0hwwBXGpifAFZ+KhcAVt9wwwBXikVvAFW2CbMAVOPiGwBUk8hXAFXW3H8AVQNH0vxWLi0HAFa7ZJ8AVv6VFwBUttg7AFfgGbMAV8ZgbwBXUvFTAFSKCEsAVqL8dwBV6my7AFVkuCsAVGWh5wBWE6gvAFaUZicAVMZgqwBUiRlPAFWqSQMAVJiQ4wBXX0nfAFcNXt8AVgRSewBU4XkjAFexpZcAVMl6twBVbJFLAFR1kusAVvmPMwBX2KWPAFTLmJsAVkN8MwBX6CQ3AFZUsRsAVJ3VSwBWeLD7AFXSuQ8AVZelFwBV2LT/AFWeEGsAVJAazwBV8pZ/AFS8WasAVsZ6cwBWdDIPAFfjvdMAVUdxCwBUm1FHAFY+tVMAVr0IVwBVNr0vAFQ1pHMAVYl3zvxU/r1zAFZlCOsAVxcMZwBVZaBXAFS6K/r8VyRVdwBU+O+TAFdglqsAVbfLovxQbCIMBEAoQkgsQgAIQhrNTEKqUZRDny1wQnOoEEOG1FBCvgBsQ/cE+ENrcYhCc7VQQ0p0WEM3KLxC15hwQ7L1EEPfdDhCjoQcQwaFqEKSVCxDW+j0QzO9cEPXvDRCtnDsQ+NchEIaGHRDVwEYQjd8fELL5IhCQ6woQuc5fEODoXhDQvGkQtIxPEJTGbxDm5DAQydcNEPHZUBCByg8Q8twiENKwAhCbyGoQ6eZEENLHExCvxF8Q4/ljEJPvdxC9mjgQzNMyEKSWJRCe3U8Q/6FYEOC5SRDRyCkQy6QCEJ+ZSxCprGcQwsAoEIaTOhD8pzsQ2o1YEP6pWhDU33UQ4N5HEJ7TaBCRtGAQ9b95EJX5FxDlyQIQu6d0EMxKEOuPDhCC2zUQ1z4QnaRREN/bFBDG0k0Q26EdEO3iXRCFiwYQhs1rEOSeCBD0wAkQ9ddyENucaxDbzHYQlIkWEMKKDhD7nxUQ8YYpEOx6EKLCbRDV/TMQpYtLEJmRKRCxmQMQks9pEPu5BBDFlgEQwLt0EL2kJxD37R0Q054BEPaWKxDN3l4Q3s8JEIjKChCAtGoQvPAvEK6pQRD37SUQ3KEnELqieRD39ygQr6o8EOORLBDYj1EQtoovEMb7LhCjhwsQorI0EKiMVxCc0zcQleo7ENOLVRDL8UQQhpRHEOLybhCqn14QncVoEPbEehwgADCAAjiAAkEAAAAAAADAP0ktQxzr4jYaP1AAWJgLYAFoipLuweb46otxlAGTAQoDMi4wEwiAAhU01ua/FVzi578V3i3pvxXsnui/FYJB6r8VPgfqvxU/Uem/FQQV6b8Vt/rvvxX0PfO/Facr978VnIruvxUIF+q/FW/G8r8VNlrrvxUgpuy/FcG6/b8VaKP8vxWMi/e/FS60BcAVr2v0vxVUMgHAFWrWBcAVj2HxvxXOr/C/FQf+8r8V58QAwBUAkvq/FRMz+78VnV0IwBWjSuy/FQQa7b8VglIDwBVdTQvAFSwhCcAVbnoMwBXHw/2/FbU1BMAV5GABwBV4fQjAFZr/D8AVLuL1vxU2sA7AFdObAsAV9JcFwBVWRRrAFQrlC8AVYVL1vxUDh/a/FZw7AMAVTZ72vxVWjQXAFY4pIcAVsjABwBUlkA3AFd+qBcAVDvABwBXo9QHAFXUE/b8VcZgIwBUj9xPAFZNH8L8VRbwLwBUvxg/AFUpX978VelALwBUQQQXAFaLbOcAV6q0twBXL6TvAFTN3D8AVeTMNwBU5WzDAFapxH8AVzsQXwBXOSgnAFcN5D8AVzp4EwBXxZSzAFSO6HcAVAHEMwBVXBiTAFSmLE8AV5foTwBWJtBfAFRxnUMAV21IjwBXdRgPAFcB5DMAVytIPwBVBZBjAFdadHsAVZcInwBVm7BPAFaF1EsAVrMsQwBUyywPAFT9JBsAVQZb9vxUdcjbAFXaXBcAVobgLwBWO4/e/FcwYDsAVzh05wBVoVSPAFRjFPMAVdKNAwBWI2wHAFQuSG8AVtl9lwBVrhyHAFaEjCcAVyTEfwBV/S1XAFZ4PL8AVtbskwBVfcBLAFVXXCsAVBhwOwBWOnhvAFVNyLsAV1pgWwBXIpD3AFeKKIMAV5a8lwBVTIjvAFeZvG8AVkWShwBWHWDzAFe/BRsAVlYBQwBVBWbHAFRxObsAVKFcSwBV+G6TAFXH2UsAVeD8wwBWEYZXAFVaKfcAV7uCcwBVgf5DAFQSjfsAVa00qwBU8uDHAFb1pysAVVSjXwBXqBCPAFSMqm8AVdC9TwBWFnpXAFUl+VsAVpvYcwBUk7lLAFU9uGsAVQEkKwBUdpg7AFe13jsAV1ctvwBWLhDzAFXZ4JMAVLZ8pwBWFEkTAFfcMQcAV/F4uwBVj3krAFRj+HMAVLMYPwRWrDBXAFX5tb8AVYrYfwBXbBHnAFcxVZMAVWsE/wBWyFWbAFSIqlMAVSYd+wBUHHj7AFUnslsAVPzkwwBVPuEjAFU/5isAVOLyIwBUrpXnAFRR2H8AVjBwswBX+uzDAFeJFHMAV2rhfwBUh8RnAFSjjr8AVHvwgwBXzDCLAFQPGBcAVR7MXwBUB8BXAFT1o0MAVl85rwBU/DDbAFUqrO8AV17JTwBXeEgrAFS4bMsAV7B2ZwBV50BnAFRDZI8AVVasBwBXwHyvAFfg1LsAV1ypfwBXXL1XAFW1OJ8AV4kAlwBUwTknAFSwwV8AVtKhdwBVYml/AFXXPE8AVPsRTwBUdXoHAFW6qKsAVNuVywBWD7HDAFazZKsAV4L4ywBWueQ7AFRD5H8AVMxQzwBWBFifAFYeRisAVdTiFwBWQBn/AFRnkMcAV7MtcwBVxgYXAFf2IFMAVEnoTwBXzlhHAFW4vG8AV2K4twBW6B1/AFbCxJMAVkkk1wBV2fF3AFfVVccAVSzo7wBV10h3AFbPjYsAVT7Y/wBXyBJ3AFZY+g8AVodgowBXzhXbAFZXwmsAV1klGwBUS4iLAFBsIgwEQCBCUCxCAAhCAxkIQv5NrEKTjTRCdywUQoI41EJSXaRCQkHEQ89xyEP/HLRCV6hQQuIwaEMnMJxDksm8Q0bJoEOyuNBD/zg4Q3KcPELGERBCxwxMQ2eBBEIGnFhDQzVMQxOF1EMmJHRCzigkQ98w3EK3wJxDPlygQ9qBREL6wNxCEhkgQnYF6EMTGDhDroEwQu7g0ELG0chC6rEgQwblBENH1OBCwyhkQ3aEVEIifLhCn+28QzYNkENrZeRDizUoQwaATEN+tahCosTwQydEeENDXMxD66TYQlYZdEJSsRxCsoHkQt5NZEO+haxCa/CsQyfM9EN7zMRCF33AQ6NZLENehUxDfnXsQz/oEEPrFSxCbyjUQur0+EPmMEBCpyw8Q9sNpEJcBEOK7BBCAjU4QpPNcEJ2QXxCyqHYQmtYCEKLUdxDZ3D4QrZRmEOunNRCo3DkQss4VEJOPYhD2xDMQybt1EOrAShDusQcQusAHEK/eZBCRkTwQ+PRJEKmbahCO42sQjrBbEPPQORD8nE4Qt9FVENiqdBC9zSUQy/sfEJSnOhDpuXYQqP4iEJ+ibhClsi0QysNaEM/tBRC6wXEQk5deEP/zShDH7SkQtq0qEOaochCkjBkQnpl6ELqXUxDxmWYQx59YEJ66XBCN4DUQoMk+EJevLBD630QQhvdZENWqDRDQknscIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaI7Sw6CB4NzQswGUAZMBCgMyLjATCP8BFRlR1r8VPKjavxV9p9a/FZvE278V7E3bvxV5Xdu/Fe+V278VkufbvxVi5du/FWC53r8V4h7dvxUTddu/FekJ4r8VpbXbvxW4FOC/FfyF5L8V4hz4vxUaPNy/FV5L6L8VjD3ovxW5k9+/FZHV4b8Vf0TgvxUx1uG/FSqD3b8VaVDovxV7buK/FTZ63b8VtzrnvxXiLPa/FW+V4b8VFXDyvxVha+u/FY7T+r8VTlX5vxW7JOW/Fa2lGMAVs/QAwBUgeem/Fb2l6r8VokzqvxXkfuG/FZrf678VzH74vxWMkPi/FfwLAsAVPrgCwBUmZuW/FUV4978VCbzwvxWXhR7AFVoFJMAVbmoMwBUtru6/FTar578VRqwLwBULwjDAFaI49r8Vp6bvvxU7wBfAFXNDBMAVs3MDwBU+uea/FU58CcAVnk8FwBVU3w7AFQhp/L8VUO07wBVPKQrAFQQS+78VxLAHwBUjOee/FbIxHsAVNRouwBXrQjXAFbm5L8AVRkAPwBUZtRjAFepl9L8Vk+zrvxXaNA3AFT7LAsAVWZwAwBUmrgLAFTjn+78VGtIYwBWeNwPAFTk7CMAVVvIIwBUiPwrAFZEv/L8Ve+YOwBV8ZCHAFZhkBsAVZtUXwBX0Hg/AFRibA8AVhD0FwBVjVQnAFfVMK8AV7ewVwBVedy3AFVSbNMAV62QkwBX6v0fAFXTsIMAViiJAwBVWjATAFZ+gGMAVAp8FwBVptAXAFfwgGMAVBIovwBVM2kXAFUPIPMAVxycOwBWivAjAFQhzKMAVo8T1vxXLsBzAFUmKKMAVuW0XwBXz1iLAFdmMJsAVXVp2wBXlrPC/FbbW678Vqo8mwBUGhzDAFV1dI8AVv2xgwBUR5XLAFX6jg8AVuk6RwBUOIVnAFaMxXMAVVRVPwBWor5fAFeE2KcAVm/FswBVwSRrAFVpbf8AV+6B1wBUBwgPAFXDKJsAVZGk1wBVfFynAFfoeNcAVS0JlwBVfCHHAFXobTsAV4rtZwBXwfIfAFbiWT8AVOVOHwBVnhCvAFcm+SsAV94mbwBXCM0LAFfmhG8AVTkMOwBXnWy3AFSD5HcAV/YgZwBUae5PAFQk4FcAVB7c9wBXMwbLAFX/UCsAVOUw1wBXQZSDAFcTiWMAVYcYdwBUBJB/AFRSPQMAVZ215wBWZqWvAFfhdSsAVtuc2wBWZKH3AFc53MsAVRFcNwBVaGwjAFcBjD8AVJ4J2wBXJ4DHAFS/cdsAVVoh7wBXLuQzAFVjzZcAV4tYqwBWtIO3AFRGII8AVWeWQwBXqdxjAFbbeKsAVs4YQwBWE1A/AFbJ4hMAVhTQ8wBV1lXPAFeAzscAVdSIywBUcPUvAFbnEgsAVUvBBwBWKTHnAFSKgT8AVe9BEwBXoL1vAFQ6sV8AVydCMwBXQG3XAFRA2jMAVwIBBwBX5y1nAFYeiKsAVMmQuwBX3eoHAFUbuN8AVCcbAwBX1/RHAFV4+jMAVsxlmwBXIwHvAFf6vzsAV1wo9wBUsP1nAFXPufsAVgVprwBVL4nzAFSe8H8AVVTBVwBWF8BnAFcwhOcAVTupAwBWWpkLAFS4DDsAVdYyEwBUWLU/AFfDbesAVcud7wBVKYMfAFeC4G8AVLCwfwBXueiTAFenqQ8AVd32XwBXK7TbAFaTkjMAVC72CwBWIT2vAFai0WsAVTLlSwBWRrvG/FBsIgwEQBxCECxD/ARCJrwIQ8a4vEI7VLxD44V0Q97QeEJWhXhCU4jIQppAlEMnnGRDoxS8Q3bwYEJ23JhCNxlAQyu88EJm0dxDOtmgQ65gKELqtLBCIp1UQm8dFEIbeSBCgxRsQkcBHELycGxDY5GcQhq0jELu+RBCf8E8QxdsEEOXQCxD0qHEQ4o8+EN7+GBCV2wUQ89c5EO2MBhD1q3AQt6BoEOL+NRDWARCngCwQj6oHEMWVHhDz83EQ6cNyEMf5IRCTzBkQ2O9aEJ/rHRCjzmwQr7I8ENL3cxC1VBCZxQoQ39ocEKj0QxDCqxcQ6KhgEJemdhCenW4Q+5QBEI3yUhCsxmEQq5xEEIftCxDgplAQ4M4FEKnqbBDf6AUQwsBCEPSrKxDBj10QqqJpEL3UEBDnnicQwIMBENH9WRCA120QputrEOSacxDzqHIQxpMqEKTZVRCgi1YQ44xIEMasUhDZhUkQl7gWENmqIRCimR8Q97oMEISUXhDChUYQxNU+EJiaGhCtoAEQx6FiEI3YMxCAqRUQpeMDENrlIRChrSUQiMFbEMzUIhCq8jUQ3OcjENipJBDT0UUQttAlEOquaRCK/V8Q87Y9EODJLBDW21kQxNVoELfpARD4/24QwKlJEN7/PRCw+kcQn69aEPG/SxDw91EQoPZQENyOXhDr3WIQwchqEP0KHCAAMP8BOIACQQAAAAAAAMA/SS1DHOviNho/UABYmAtgAWi5mPDD89W11y6UAZMBCgMyLjATCP8BFTWi3b8V09TivxUf4uG/Fde8478VGAvqvxUL5eO/FZTX5L8VLeLkvxXW7Oa/FZ6r7L8VWMT0vxXB/ue/FXSg5r8VTsTqvxVy/+u/FXxJ578V0QTzvxU5Euy/Ff7j/b8Vf7P7vxWeR/q/FZ3mAcAVFN72vxVvFwXAFdsYBMAVNPnwvxUfauy/Faj/6r8V6sfwvxX1a/C/Febi7r8Vr+cBwBXasu2/Ff/mCsAVtSYDwBXu2/W/Fb978b8VvOIDwBXfuAnAFdfQFsAVWL8EwBUXgAvAFe1xIMAVr8EEwBUZSQ/AFb7X/78VcZ8AwBWa+gbAFZ1SB8AVOFMLwBXDDgbAFRerHcAVUD/yvxUJi/W/FTGY878VD3sEwBUZUPe/FdPV9L8VzFoEwBWoThTAFX62878VCUw0wBXG8PS/FakGBcAV6EkJwBU42PG/FYBH/r8V5+gNwBWOJBHAFVuLR8AVd/QgwBV7VBrAFdO3+r8VSyD6vxUsmjzAFVX6EcAVoFcIwBX0WhHAFSF9KsAV7u4XwBXSaivAFZzTC8AV+9MFwBXVpmzAFW9xEMAVkPkkwBX+hDPAFciFTsAVMi0HwBUNrBjAFe5qIsAVtVcDwBXAhgvAFQRTBMAVm30GwBWViBrAFcrLHcAV030jwBWAYhrAFZp+G8AVD1YPwBVMzAbAFX6nEMAVaQ0nwBV4lkbAFUNLAsAVaQH1vxX7AhjAFdqQ+78VXK32vxWlFxLAFRLWOsAV4W0fwBUT7QTAFSfb978VHg9PwBXJgRLAFdhoIMAV7EgGwBUilSfAFZrzPMAVZif9vxU2jT/AFQD1WMAVjGQ1wBUmFRHAFa0b+78VAoUjwBVYpxHAFb4HNcAVfxluwBX3TTPAFQ9wdMAV9q8BwBWaARfAFd07JsAVRvBNwBUVr7DAFWbLFcAVZftwwBUkjJTAFSP6OMAVo6chwBU6RjrAFeI0jcAV9dsfwBXtSGfAFUBMP8AV5RIPwBWPAorAFYmsf8AVB6Z9wBUJFmvAFVP+EMAVeFMtwBUco3vAFfM2FMAV6DEtwBVCwEHAFZ53NMAVpjBhwBUDEzjAFc7agMAVW/UrwBUE/FLAFSc9GMAV0mapwBUIJ4TAFfKwmMAVqS47wBWgECfAFUDvl8AVe0+nwBXye2nAFfHlYsAVBB6xwBUpWJDAFdhsT8AViNYgwBWoxRnAFf9PqMAVW/CQwBU4/ybAFTbhfsAV+kVYwBUU7JbAFZv4GMAV7WwywBUrO1fAFX/THcAVc2MNwBWKYI7AFR1sS8AVfHQhwBUfc5LAFa2cRcAVJN3ewBX3OC3AFbZAcsAVVgY1wBVdMkjAFVk3JsAVG80bwBUZ0SnAFQ1RP8AVJpBMwBU2RSHAFUtIZ8AV/t4wwBXsBG3AFbGbnMAVmcA/wBU99lrAFVFVLcAVIRBYwBU2ko/AFTMoWMAVWSIDwBUXtX3AFb3EA8AVaY+awBVEAXXAFWDzdsAV0+GAwBXAAqbAFVKyU8AVsE4zwBV/pi7AFZiWhMAV/sz7vxUCeobAFbJMhMAV0E5VwBW5g6fAFThGKsAVnl1qwBU8+JrAFdt/DsAVAylPwBV8RTjAFaNOLMAVYolPwBUp0YTAFVsPZsAVwZk0wBUb/VPAFa3NcMAV6XmPwBU6auHAFZr+PMAVRKFWwBWxpo3AFRkKXMAVt0gGwBWSNwzAFBsIgwEQARCRCxD/ARDwr0EQ2p8SEKyTARCh4mgQjY8rEPsSEJegDBDauCMQ46QgEN3jchDP9CQQu99eEJSuQxCWp28QuIFDEPz4ShDfryAQo7AkEMeqaxDR4hQQvMt1EN6kaBDL7TsQkJc9ELK4DBC1smoQ48lgEN3iUBC4tToQs+4MENLrUBC6lg4Qpt4WEMiHahCc92oQ49AQEMPQGxCO/FMQ6/Z4EMuxKRC0v1UQ891lEJLWThCFoxcQ4/lHEPa7TBCjyHIQkjAQ/e09ELmoWBDB23QQiLRxEJ7WXRDNlzkQoJRBEO2JSBCG7mAQmcBKELKMOxCj0VMQsMREEITachC6zWIQispZEPizYRCAARD72xUQzqgLEKTEBBD912gQmowQELm2MBCC/AIQuIEREO3AERD73VoQnLBjEOCBZhDpziAQvt0HEIyOKhC64VQQsbABEJ/2TRDC9xkQssUWENWMWBCs91cQybBgEP2DHRDpn14Q/pNWEJP/TBDngXsQ5/hYEK+nRhDDgkUQ8vceEPKfMRCd4SwQ2JVNEMbKZhCz5W4Q1ep1EPuDIhDTikkQrqAbENeTbxCy7icQ+NRFEOuQUBDe3QsQ+6MNEKvPTxCNhTYQistvEKSBOhCUi0YQvuFJELrAehCc90IQyrpiELb3IhC2900Ql/FyELrHYRDTynsQkAscIAAw/wE4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaIbWw+mJg9/7rwGUAZMBCgMyLjATCIACFdV51r8VlNfXvxUdjta/FUpU278VnzPdvxXkD9q/FehV2L8VsiHhvxUPX+C/FfLS478VDDjdvxXlrNq/FQOZ5r8VAEfpvxUvRdu/FUB1478VWMTmvxWy7OC/FY3L7L8V85zqvxWSMuy/FW8O5L8VnebzvxUMyNy/FcYs4L8VUTrvvxXNn+e/FaZu+L8VvhHuvxX5c+m/FbZp3r8VctXlvxWhBue/FStDAsAVxGvxvxUBgP+/FU+E4b8VVe71vxUM3AXAFSby7r8VhbfrvxUNd/+/Fc/fA8AVFWwBwBVKLvi/Fbpb+L8VSaj4vxVmwN+/FfCtB8AVjVDvvxWfwRTAFWu4878VDdD8vxUC6/O/FRZ57L8Vps8FwBUeIQTAFXev8r8VhZLzvxVAQuq/FYd5AMAVfKD6vxUM4v+/Fdf28b8VuqAXwBX/IQ/AFXeU8b8VxLoLwBVXawrAFd6FAMAVdLIOwBW54QTAFY6aCMAVainpvxWpJ+W/FZOpHcAV5472vxXf3wzAFTdTEcAV3gdMwBVWVA7AFaek+78VfgMGwBWZmFzAFXtKAMAVTRhDwBX7FxnAFRc1HMAV0t8cwBU0pQ/AFfzCAcAVFZUPwBWc2AnAFVeHIMAVG5UvwBVMwQXAFTJg4b8VWCMSwBVC0QnAFenK8b8VTno6wBVc80LAFdGSKMAVuMX7vxUqzQfAFQoME8AV5z8FwBXyUgzAFTgxF8AVtaYBwBWfUALAFdmZIsAVe51JwBVjnSbAFYULC8AVkiULwBX1R/O/FX7o/78VE+0UwBWj6ATAFUsPLsAVgasUwBViXgbAFW0xGMAV9NwOwBWYMBLAFdm+FMAVqx/8vxUcERnAFXfcHcAVIUVtwBUUSbLAFca2EsAV2uMdwBUVFT7AFZeyPcAVUS0rwBVaEx/AFWz1TsAVZ92SwBXH4wjBFTgmGsAVFs8TwBVabCXAFSoMGcAVjfeXwBW6dT/AFTFDFsAVeaUiwBXFxfu/FeyPAcAV61uEwBXYhUzAFZL1J8AVs138vxUGBBPAFSLdE8AVYFyCwBUEwafAFUQEi8AVtpepwBVC2BHAFVBuF8AViW0IwBVGGSzAFUezesAVMM+SwBVDQ2zAFbGZhsAV0yQwwBXXlbnAFQqhl8AVWVJ2wBUrJknAFapJv8AVFPR1wBVhJYXAFc71hcAVnaowwBVti57AFcO5QsAVZt0YwBX7OwLAFYEaQcAVQUNiwBWBhRvAFVCq1sAVoH6NwBVFIzLAFabWtMAVWlpAwBV26QfAFWVIMsAVN3fyvxX6MRfAFd7JicAVK54cwBXG5jPAFZI9JsAV7JCQwBXv6orAFRxNUcAVY3KXwBX8CJrAFeb8UMAVJYVfwBXXJnDAFXttJMAVTfkFwBUhGFbAFU3AD8AV82Q5wBUBrRjAFQz+L8AVG8sAwRWQyhjAFfTRdsAVVPs5wBXePy3AFSspQ8AV3YORwBWCIgPAFfPAJsAVs8A0wBXg2oPAFd5Bi8AVuZ1awBV7H8LAFcejYMAVqkMxwBUOEUjAFYopUsAVx6lCwBUTJCvAFYCT9r8VzdA6wBUrZEbAFUIUHcAVyT1DwBWycETAFUvSNsAVmIKDwBWy5OfAFZSYmcAVL1a9wBXItCnAFUUaWMAVZcGXwBXwRyDAFRzTRMAVbOUSwBVFNIHAFa56GcAVSdhZwBWU7SLAFa2OKsAUGwiDARACEIcLEIACEIisYxCrwEkQz4cBEKKCcxDv4UoQpaMdEPvOPBCCtwgQ/8pVEOqDCxD7004Q8+AcEOSLNxCL9yoQ4dwMENTbaRCuqlEQhetVEN35aRCN5xMQr4JQEISMGBDc2TcQhaszELXvBxCCgW0QpoJoEOD0XxCUxV0Q6NBBEPToSxD3ynQQ4IglEOaKcBCX9F4QzRAQ0MIuEL2LEhDa+QYQjIZZEL/AZRCb5jcQy7xtEM3OOBCitCgQuPpbEMeRERD7z00Qtv0cELKrKBD9lDIQ8YxjEN2oChCmt0QQuZ5AEIGQbhDGyiUQtq0LENSVORCZ3UwQtdBFEPKETRDSwGYQ+oFfEOfLZhDVzAIQ1e4KEOTqDhDIrBgQ9MhiEMjSDxCDw1kQzOULEIT2dBDSozIQn8NaEL/RXRC/gG4QmJdhEMeaAxDV7G4Qzrg7ENrgGxDS0QEQkpdxEMCxFhDF/BYQ7r4XEJWBGBCC8k8Q9MEbEK6nXhCe0xkQwK9IEIj9JxCUARDzkjYQ77Z2EJaDNhDHkQQQ8cMJELnfHhDZsSAQ1PUJENqiJBDykgoQ0ZtFEKDtJRD+nzkQiZZ1ELJrEJauOBDg+isQvcJgEIeoLhDGunQQ46c9EOzFNBDz4TYQxsM7ENPoBhCXzAwQ+YVBEOPtTBDmxk8Q6Z5mEPz3bRC/g3ocIAAwgAI4gAJBAAAAAAAAwD9JLUMc6+I2Gj9QAFiYC2ABaKukmeqXkJrEwAGUAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDUzLOuARCD7quzAhC+hJuWARCiyvzaARC9wIJlEMvd2AcQyMmBXRCitJmrAxDN+JbiARCqmcTdARCem8qnAxDO4rTOAhCCt6L8ARCVwrCbAhDXmZLwAhCvhoIyEOaSqe0CEPySwqcCEKae+EsQv/WS8QEQh+6augEQw4rj4QMQreTpwwMQud7LVhDhjeEqENiNpP8CEI+36KcCEIDkvfUCEPmvi6kDELXjsucDENbvoKIBELiPkaQDEIbdqLQCEKvQ08UDENnXruIBEMDgg+gCEKne8rcCELWxn60DEPGhhakCEMSe25ABEMHAuK8BEL/w37sBELJSJCr8B0H8rdZCApuPQlJoHEI6SHlB5WDKQga7okJC/HZB9kYwQj+k10JW9PFB+PmYQkR05kIDmRVCWZiNQegHmkI73NBB99cnQlRKhkH1T1xCW5oqQjnkR0II2rFCPy8bQkwgI0JOtO1CUjP3QgvQf0I6lixB+ZdiQdqfykI/nIpB1zH7QkZyk0HnOcNCTiiyQci7skIDwClB3Bg+Qk6vAkIFjp5CAON/Qkn7ZEH4JNJCTHA7QgmZ0EH+xOJCBtpAQcmB3kI/hmFB97pcQjvE90I4ZXpBzgWdQgHydkHi6qhCTppbQljbKkHpPL9CStyaQk+lmkH/3f9B8jTSQjSHE0JOMUZCRuDZQgBuRUH8p6tCAJRIQeUP/EHuakRB5LqoQdjHhUIFEt9B3POmQfYQZUI5akNB+L0eQlFUSEI7oE9B/6QxQf5qxEHXitVB3aTeQjtk+UJKPnhB9RHbQkSmWkJTIiNCAj/3QkOQt0JRlEFCO2q/QdNqLEI1E2NB/F/DQlp7O0Hln1pCSNMDQeuVrkHzSf9B6XL5QevJVEHeZV9CRHWcQfQO20JCIxRCT0cqQeg2a0HNAFVCQl6nQlcvmkHQKBNCPV9CQgOmzEI7C8tCQhrDQdjl4UJJyoVB7gBaQj6pHUI9APJB+LqnQjVDVUI7GSlB9+UcQjYWqEJDDWVB6D2eQkIJKEI/HKVCSwg5QgJANUI85iNCQtXBQc4OTUHxsOFCV6oNQfnz10JSCyJCQYypQkUazEI8BDlCPfsMQjrG9UJMtY5CUPIpQlCukkHK9CpCUFN5QfKZeUIHb0RCAgAeQlAlxkIIG6hCA1oFQjhCr0I5JudB5jBPQdlaeUHmjAVCOoFHQecGwEHmzNxB3OtYQczG30HtBMtB23uXQluLi0JaC7JCWWRvQleTD0JMgdRB5keGQcp78kI6cnxCWde2QgGtIkJTMNBCBTtyQgJbqEHY83NCAIILQlee2EHwEBRB9LTcQe1X7EIIYGNCQtI3QjnhY0Hnuy5CA2TxQeV/TEH0PUVB4jh3QkaEykHmHU9B1Qg+Qkie7kJSPKBCWfkVQeG2RUI/RJBCU9hVQdZwl0JM7btCAK/1QgD78kI/40NB/4usQeq2QkICZsBB5ZPPQesCskJJNkRCAe3QQcyw2EJGg4xCQ1PIQk5VtUJN0u9CQMDIQlV6aUI+S6xCNl/2QdotDEJCw6FCAo28Qd5FNUJNvc9CVAbsQdaZOEHdS69CBax1QfvEyUHd8PhB6vQBQkctxkI/oiVCWswvQkdLkEI5AVpCAmUTQewxPEH5Xy1B3JcLQdUfHUJFLqRCTF5sQeFEpEJF+EFCPPE+QkgK3kHRHC1CPJAGQdT7gTIIRkxPQVRfMzI4AEABSP8BUwgMEAAQARD/ARDv/9/3ARCel/DcAxCwju+7ARC+9sNQEMGq2xMQxdGguwMQ0uLOggMQ2JyiyAIQwBFUWwgMEAAQARD/ARD/39/jARD6h9pbEN6C+7sBEN725+EBEIGh/98BEMzygCcQtICPQRDohKFAEIJBXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVCfv/vwgIWGwVFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEOKo5fcDEL+v/64BEMaR8LACEJPLtXgQ5/WnfRCT07y+AhD54IHXAxDZ4oyLAxDntrmEAxCSr529ARC05NLNARCEl5OpAhCkzcCgARD0ivyAARDZ/MPkAxC+gI1fEOWSlI4CEM7n+sYBEM377KQCEJCXsuUDEIvKivECEIX/5REQx6O7wwIQlqOxqQIQq8yfURCY3dxxELD18cgBELaj30oQzq/DCRDMnJkUEO3MraEDENTCmlAQhtOjwgIQjrH9ORCw9fPYAhCQuuSAAxDGhNipARCkyNqYARCcj+WHARCUm47MAhCNx8fVARDbpJoBEKgEJCr8B0HQr+JCRyHQQk47vkHjYqhCREquQkJpJUJOaWZCR4szQkCTIEI7DnZCTrl9QgN/T0HKZzJCBBudQfCoYUH3MIRCPbkBQjkwNkJCdWhB13YlQlDOSEH4KRNCAkCxQkMLFkJNEfpCNCDEQfbDq0JNKGtB5exuQkpHyEI6pnpBzr61QctbK0Hrq1dCBM5YQk8mbUJPqIBCTmcfQe3Uj0IIyY9CBLISQjn88EI6Zy5B0ypNQfb+pEH3b3dCB4QbQgArmUJAZJxCVxNsQggwtkIEpDRCBhZSQdhJVUHOBHRB1tAJQjlIZEHS61lB+/AkQkOl70HgPv1CRlL/QeYQnUJGUnpCUMZLQgH/AEHgYAtB9U6HQgMWQ0I/SERCB0aBQd75EUH2m8hCRP4xQjYfOUH60zJCT1iDQfVwKUH0+dNCUw/LQjulSEHdTrpCQsP1Qfp5IEJSrwBB6BGVQlqF5UI3V9NCOpCPQe5BEEH0T2xCPUvKQdVzTUHcf3JB3B8wQkFdBEJAjB9CNT29QgU/akIKOQBB4JVsQgqZokIFUQpB8JTVQe66BUJQC9ZCQB/aQcr9cEJMugFB/WcGQkOgBEHdV7lCU6c7Qfe16kIG5zBBygfKQfwCgkI2DB9B5GsWQfIjiUI/f4ZBznbnQj9vXkI/Y1NCCZASQjdL9EI5MCZB+iFzQjnqmEIFeYBCPFtEQkA5ukJX+9ZB7FzrQfkhBkJMTA9B4lCZQdcEd0HtaH1B5FShQfU9QEHcVO1B0homQe+0ZUJMKjpCR//5Qj4vDEI9aTlB/vtyQkSN+EH8UNVB3qKJQk7wYEHidZRCQsNSQkWa+EHVVctCPoD2QdGxW0IJ/dZCShERQf5xz0HKyzdCUKfKQeX4D0JVkJpCWTxIQgiy1EH5l+dB3wOiQjcTSkJDE6ZCNfEAQfr59EHYBHZB9N8EQkODvUHLA8JB4o+vQkAlEkJV/eJCU40TQkfi50IEe5tCAiraQdU6mUIG3tNB/BE5Qgn/2kJKBZFB5CXqQkUnRUHpQIJB7B0PQfoBn0I11tFB/JC6Qcvv8kH37gRBzjliQdh6CkH4IktCQu+WQjWxBUHQYbVCQfMCQeLDR0JQGo5CRlglQksCkkIFxiNCBTEGQlVOw0IBDvRCS1FjQf6DL0He94FB/Q/YQf8dEEHKAiJCPpIXQeeTtUHivqdCSmMKQkQ3MUJDr7NCNkyxQlCCe0HSmEFB43rvQjssD0Ht+zFCTYP6QdS7s0IHmOxB8tyMQkCmQ0JHY5ZB4G+UQe4cekJGOB5B/DAuQdOL1kJAJu5CO2FgQjj0k0JDaVlB9TsjQfbIb0JBOdtB/yrXQeLH9kHZE4VCN21+AAAAADIIRkxPQVRfMzI4AEABSP4BUwgREP///////////wEQARD/ARDrn/GzAhDpr/qpBBCLuKufAhCqlZ/jAxChiPCVAhC+kcGXAhD9m9TkAxCJsOaLBBDP+7jVAxCcuc/ZAhCeu+bUAxDikryXAhDkmOGLBBDICFRbCBEQ////////////ARABEP8BEJHFvPECENvNyNMCEPem0OwDELGoy+cCEMf2+pUCEKf7gKoCELCCp+kDEJvPmZUCEKCxseMDELzt57ACEOzC4ZcCELG55ZwCEKuN5tICEMkIXGMIAGRoAHMIAHR4AIABATxBAAAAAAAAAABIAVDG39+WzMPa3n9YAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEMy6/CYQ8fiBmAIQvIbSlAEQqefKvAIQ1aWTPhCY78zUAxDdxNqbARCJiYjLAxDi46eZAhDTkbLhAhCn9OY6ENOWjzYQzomhqwMQitO7tQEQ1OCr2wIQrIfT8AMQ9snn4AMQ/MiZnQIQ6tz4ShCunbuGAxDMiMyIAxD969GxARC/rKvUAhCj/7gVEJis/e4BENO0/eQDELyEr4YBELT+uFgQ6Lff3QEQ2uf6ggEQpe6l1gIQq+6nogEQke6xRRCU/r+nARDk46mSAxDq/rDlAhCXp87FAxCaidi7AxDZ7+OmARCBhIOsAxCi7vzrAxDE2LPNAhCTjwEkKvwHQdwfGEHsiShCUZKEQgj5/kJa3UVCAJCQQj2rzEI/85hCSyyXQeY/5kIIYFRCTds5QcsMqUJKOpNB7PyjQlKZS0JTs35CPsFwQkM2UkJD8BhB+HzzQjpT1kJREtRCUy3FQkTiBUH4oEpB9gA/QknYmUIC1OpCOX1ZQgX24kHyZUlCT8L1QgvZpUJPe4dCVq05QlNyNUHgKulB2z99QkmD5kJOkShCTNTOQkjmQkHYeFNCOcAfQeLTnEICmr5Bz3+yQfPDbEJW5jJCBe9uQjxjBEHjsRZCO8CkQkyDRkHoovpB8+h7QlYxVEIG4cVB3WviQjxBnkJMJadByt83QkFfzUJFLZlCQ/SmQkx2rEH0FztB9jMHQjhvvUIJdrxCUtZrQf9d90HuoT5CU+HyQgSMkUIACf1B6WAAQgWO/UH7uZZB2LCHQj2cSUIIZ01CUtGmQfrffkH/17BB0KyxQllXjEH7QLBCVD59Qc5Cq0JM2ZRByPEMQjY0YUJJpM9CS+K2Qd5l+0I+0+BCVA2lQkNfhEJG9B1B1UQyQgjgt0JMMAVCTHvxQgVEYUIGyLlCR4K8Qf9ZY0I/WqdCT3/qQeCvv0I419BCABgnQfUgcUJPWEdCVVoTQkf2pkHsBrtB6aipQgTg50HZuKZCN8nDQeOXfkI606hB+Ca3QjaAAUJG5DxCQHaVQdub6UJGChFCUyqkQlRLr0JIY19CPMS8Qj+ZB0HoxCZCWaSEQkjaU0JPvFtCNPyRQeREZEHyLS1CP/YcQe3PokH+ViNCNvv6QeGi6kJDMSpB7Mj6QlprWEHdGnxB47RCQgYCGkI0HSlB0oaHQeucmUI5t5NB2FVXQj4xR0HVAYVBzGTAQdI4Y0JT8uVCNNPaQd1m00HmgExCPRTdQe33AEI5Bb5B1iJGQkkvXEJIOvBCNJrBQgH3N0Hlf6NB4LngQlkMdUJNx0BB60OBQlQ0YEJUOWxCQWsqQd2HQkHRdeVCP5GLQkYoV0JEdNNCVYWrQkX5FEHt/fpB0TvXQjvHaEIBUS1COmcxQfdW0kHwykNB/SKVQfkmKkHhLDRCTNGOQjzckUILuTdCAG4PQgITgUJCwrpB+9NLQedg/kHSXUJCO/rHQcoKCkI5ncVCRUY2Qd/Tq0HVDWBB5H7PQjnY2kIAH0tB6d2IQjoNh0I1hPVB8pFFQfa4fkH5BT1CRE4pQfuOHEJE2fZCQk7lQjtCB0HhMfVCPnQ9Qj9K00I/HTJB6s/eQkIyfkIChPxB1Kq3QjbcLEIEfnpB+yqiQe62xUI4mKlCQYHiQgmkjUJBurVB3ctoQfOtmkH3ehxCQYRtQk6JIkHXSA5B9Z19QjfnxkHbWBBCQgj+MghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEL///v4DEOnnse0BEPqr5QgQrfLblgEQzqP1jwEQm6LS2wEQ3MzDAhCUnaGwARBAVFsIDBAAEAEQ/wEQ5/3+/QMQ4ead7wMQ/rPRxgIQnd64UBDUksQOEJGD8TwQ+LCSkAIQ0JuD4gEQwGhcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUIrmjcrstKuAyQFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEKu52A4Q2PmF9gIQ/sqhjAIQ5Pi5ZBDA/6XkAxD516PFAhD1s7eAAxDM37W5AxDH1sCoAhD/uIbvAxCgt4fqARCTt/7dAhDgyozGAhDs2tGEARDPk/reAhC038OPAxCZ/7YMEPbM7U8Q7dfTkgEQ+4ek3AMQ5sTwywIQ1tWlswMQ9/mc/gIQufCiqQIQlfXRGhCuvfj2AxDY06mWAxD8lf2pAhCv7vehAhD0p5iiARC/p73NAxCizsaTARCJme/vAxCC24vFARDK1ohJEJK+91UQgu/97AMQ6sTfkQEQ0Zr+gQIQ2e2qkwEQutSorgIQsomm0wEQrxEkKvwHQkzCeUJIAulCWXdTQcto7UJOTYhCCIr7QgEV30HfMaNCNaMKQdvRhUIFNuFCRPwJQgbnw0I6fzBCR9OsQdhX70IK/w9B5AmXQgEDl0JalYdCWmvLQfJ27EH2fYhB0PsrQdcHy0HrGj1CO9J2Qd1xoEHjC81CARuXQkRzTEI4bZlB7mlfQkX2hkHLhq1B/cc3QlqHTEJK6wRB9uvpQkYOGkHeoaxB3yytQeDvj0HxN+9CRZ4CQeDRs0HIS+5COA99QjbO+UHZZUhB3fyEQfzvCUJQSZ9CCKw5QkCE5UJOgUhCTFlOQkfmKkHelAtB8UkkQdMCDUI+zPZCB0QfQkmqGkHwgK5B7sTrQlXV0EHhT0ZB98NaQlRF+UJCzZpCOkIDQc4xWEI/gH5CPbyaQfBRg0Hn5kFCAARQQd7lZkJDjf5CRwGsQf/tnkJGVMlB6qeyQlNIw0HuctVB67t1QkoyPkJUWoBCVBtyQgeySEJYxcxCWWsKQkxL/kJXmNVB3Y+VQlCs1kHzYQ1COOpMQjWvaUHYjrdB3RSIQePisUJHzmBCTQOoQfwEeEJKZeJB58C5Qkan7UJH4exB8AuPQkH920JG8hxCWBgPQlduCkJHFPxB5rT0QeeZc0HoZrRCVMOLQghqnUJRFOVCB0fPQfWalEI6SMVB+qQ1QdplB0HMvVlB4jX4QdjpG0Hrz/5CQWziQjdTSkJEUv5CQ/nJQfAKnUH0XoFCVqjuQkcZxkJQBB1B0gRiQlONBUHUM2JByN3+QlDXwkH+JTJCBm7CQgUub0JWtUVCU/oGQd2bGkI4k9JCR1ZSQe2/YEI8Qw9CP3OwQdt7kkHzi2xB1nL4QfQwpUH/6mpCNdPoQeAIqkI/USZB2JUjQe4RLkJNc+tB+KLbQjSly0JH9nVCUqSeQkUpqkJVrFxB5NTKQdlu1UJUDw9CA0+lQgcIdUIEHv1CBnAeQjn04EJBl/lB+EQAQePpFEHp+g1CAG1LQgdA7UI1vdVCPO9CQj5t5kI+x2JCOlykQkMC+EHf/IFCSBtcQkJs1kJKVpxB5Ua6Qe+5S0IJXxlCSJKuQlIX2EHOtVxB0L+KQebcU0Hl5p5B3WrYQgEthUHwQDVB+11sQerZp0HzcDpCRGpnQj2FA0JAxudCPDplQdsmvkHlKEpCU4EKQf+xMEJRtx5B69WnQdbcF0JG8UNB9rl5Qj2z9kI9laVCPoUMQk44IUICajBCONvlQj65NEI802RB4vRlQkhpBEHtCzhCO0c/Qj0P1kHhyJNCOAAAQgLLxkJHWJpCA+eVQgTrNUH5eqFB8s5uQdMf0EI9VihCQX+hQj4UdUH2c7JCPWGgQj4K3UI0tBlB6UGAMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEP///5sDEO3+iS8Q9Ie91AIQjfmDzAIQyfbjsAEQiIeMxAEQtPaQiQMQ9MGzWBDFClRbCAwQABABEP8BEJ/fn7cDENO+4YcBELGD/8UCENi7i/oCEMG/x5UDEMr3iCwQu9HCrgEQoIHFQBBAXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVDCu/+Xoq76/M8BWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDZ+ZjCAxCrvb7HAxD0mfvAAhCbkaJvENDPqKADEMC+jjIQ7ajRdhDb65i0ARD2oO3hAhDu0s7cARCRsYKDARDQ1q7MARCv+/HrAxDKr4W9ARD7m/DyAxD7xK2vAxC1+IvdAhCbmq2eAhDA6rW4AhDfn/fSARD1kcp6EPTcuEsQ2aSGzwMQotjv3AEQjKGuqQIQ6fbJlQEQkrjCSRC05L3gAhDC+8fuAhCXucGZAxCW99LiAhCQqI74ARCG19FNEKKsuJMDEK+5maQBEK2/zukDEO29ytgDEI7gu3wQnOezvwIQ7O7LgQMQ0+Tn2gEQ4Pm28QEQvy0kKvwHQkssKkHKwqhCR5pHQczvM0IEzx9CCfFqQkoS+0JKBfFCVghTQj5qyEH1R3pCA4WdQgZv00JJ/fhCTpAuQkuyMEI/D8NB/2wHQlihekIAEXVB3cBTQk0f00Hc/rRCCbAMQdgquUIJ1u9CP9bOQfZT30Hy9X1B5ekZQgmkcUHZKI1CPVZWQkKmOUHcf/FCV9hnQk6/d0JQizxCWhI7QdkEJkJTx3tCRtHZQk/HUEHSorFCPmPrQgJqj0IBJ7lCSqi3QeMHTkJXyUxB4ZhjQgZFr0JGRVhB5MjlQf3L1kI5hChB9r8AQkjNGUJUSvJCSnEAQk1+GEHVGe9B/wpcQkB19UIJAsxB0jUrQdeVxEHcSbdB0xEyQk2rTkIFzztCAK05QkydzEJTkfZCRX7+QgawAEI6H8tB4LG1QgBad0IA4yRCPKndQfODakH5RDtCSWPOQk6vqkI9GJFCPpODQfMSv0JC1wxCUUBLQld8MUJWLvlB2MOVQcvyRkIBLlpB/k0UQjytCkJOHddCQ1J/QkOIG0HxY+5CS2RHQkJzo0IHNwdCPRi9QflglUHsZo5CPoGbQfFhQUHVOe1CTzmYQdfF7kHqnI1CWB3OQk0YE0H3vV9CN9QZQkFhq0I65UZCTeM4Qjb7okJRIPFB0jtbQjtoLkJBZDVCNhRGQeqwrUJSovtCWkZ2QfK8O0HmHNlCAfRaQjgZwUHtmpVB51rKQlO/70HzBoFB9ursQd4xKEJJUvRCW9bbQlAb7UJP/aJCWWuFQenFekHTRXpB+OKtQlsZckH+yINCAXofQj+PsUJCsxxCA7kiQfqu8kJEbhNCPIpHQfaRIUH73P1CN5irQjW72EI0qz9B4f9yQe7xK0JPslJB4sMWQfrKXkIGw31B4Y81QfKLRkHhPE1CUvhdQkGBwkJKSxdB2yJ+QjycI0H52fZCOGexQfL4KEI8lXFCWWN4QdA2p0H6eHpBzqyfQj74VUI0U3dB4+X1Qf/QxkHuI5BCU4T+QlkBBkHm8zpCR9W+QeMnmUHzOg5B6MX8Qcu2V0I99d5B78fGQeCi3EJDtClCOq6pQfeE3kHoSeJCOjuJQjjP1UI/ygxCOMBgQdVusEHdT1JB+r62Qj7Z6UI8xH5CQc6EQjp6a0JB4LRB3z+OQkcFxUJIzt1CQq8sQjnThEI8PYFCPjSRQdAC2UH4aDpB7gFoQkxY50HJ7IJCPU5rQfSE/UHV2VxB221qQkPjF0Hby3JCPhqhQdw0tEJBgytCOWnnQkWHTUI9y3lB1cCQQkLgqEHS5JBB21wgQkD9kUJAczxCQ6+XQdCBNkJPck5CPxbZQkDwjkHorjlCQvK1Qj4ZPEI7oYRCTfsOMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEPf//94CEOvNr/MBENW8yqkCEOOHkPICEID3q8EBEL+A6JICENmx8ZkDEL7h4OUCEJQQVFsIDBAAEAEQ/wEQ377v/wIQmszzkwIQi7rAiAIQ5YGI/QMQpP+N0AEQrcaIigIQmLHRnAEQ+6mt5AEQoEBcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUOqk9vzh4LnuogFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BENK+7e0BEKqNkf4DEKeR8rABEOHg0MwDEPmmv5MBEMi0wM4CEKXL2oQDEKH20TAQ+/nzgAIQl8Os9gIQ16CDlgIQxYjK9AEQ4eHhiAMQwaT0RhD8v72MAxDt1eS3AxCHrd8DEJ33mbABENqfk+MBEMX01/ABEJiV/aUDEInBj0EQy6bKmAEQ++O0SRCy9qf/AxCLhMKvARC8ia+eAxDWu6+YARDFx4hCEOHXpcMBEMakqiMQwJXe3wIQ6ZivLhC2ufTeAhD6+o/VARDXw6FHEOTwv+gBEOzh3lcQ39qRzgEQlbjb+gMQ4aTIhgIQ6oyy9AIQ2jAkKvwHQgZMykH5TolCR8P2QeU7yEI2JTBB+XTZQglREEI2LrZCO/KpQfWUVEIARMRCSBqUQj7hIkJOIadB4sIjQkdKXkJDFVZCNVnYQkLsMUJA4IBCA30EQfCQAUI83dFCBLJ5QlvZK0JV7DlB3XJUQjbmckHzORJCQNU8QfmIcEH67MBB4qiXQgcUQkI1f8JCATRdQkwBr0JUKYZCNvmmQkHVL0JC/ExB7jiFQjdHwkJM4+JCAFxDQgDTE0I5ggxCVKUBQjzTTEJDYlhCPyGyQlf3oEJVjilB6u7dQjVvUEHfdFhBzitlQfFebUII/IRCTly3QjrW9kHitH1B6yrRQk8VI0IEMbpCVVBzQkgHF0H5m6NCRRA+QkoBT0JbAPFCO/dpQkWCskI5tLxB26b3QcrXkkI0rUJB54HoQjmpOkHLS9RB+wAmQeAMD0I637VB12qnQdjH3UI5kulCOT/kQfeOm0He7i1B8pSgQleXMEJMQhJCAz7/QgE8TkJAImdCQVpQQlXz1kIK09pCTk9OQjXg7kI6gZtB0M/iQkIYEEHqQeJB29HYQdlKRkHw8odCSeLtQe4tOkH2DBlCVA1FQlBnUEHVJmdB8hFLQjh310ICkrZB2G0dQkS7skJBKcxCT3aOQgUUeEIHvjhB2RZnQfMRzkJF6WlCAS3aQjy770IEJ19CACotQkBwhkHLGRxBzQvwQk0kLUHYrINCAunQQjQJGkI8Hh5COTQzQlfiOEJEcHtB8OmsQc0fzkHUqBhB7bdzQgLdoEI70+dCAE6HQjkSHEH+l+dCP6TDQlImj0H79htB+txdQkCL20I9tLhBysEbQe+YekJI80FCUQmdQkOJ1EHXMkVCUwcuQdLb+EHluUBCR2nHQkjVtEHURd5CVox8QjxOTEHiuUdCAIDAQdZ9QkHtGwhB5KYfQj/MKkJKJwVB11K1QdZgyEHlX39B1ielQddaZEHT11BB+mMiQj4FCEHv+o9B83DmQcjsoUHLizRB8usRQlTSPEI4xuRCTmtTQj/9kEJB3JxCWL3HQkVMFkIIeilB1jitQfZ+EUI26ABCNzobQc9QVEI5qwJCSSI7Qc5eREJQW9dCQAfwQkdhDkHbbYVCWZBCQkEGuUH9wbpB4yjAQevjA0HdBhFB3xOaQd2xGkJXlHtB7QSXQk2A5kHSXCtByYSBQeaZSUJErKBB62tRQk3LsUI+vfJCT6sXQlAED0Hg0sNCRGz9QdGuGEHS91lBzP78QlPefUHUHPNCRD8mQkAwxEI7/bJCUcEWQk7tj0JD3btB7315QcwaO0JQ0JtB+ebfQdxptkJEq7NCQXXuQlhpSEHQSUpCRAFqQfGEDUHPSp5B+ERUMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEP/v//8DEN/F/d8DEOq03ZEBEMeD+2EQ0eCYPRDR8eitARCwnYGqAhC4iO4mEMQFVFsIDBAAEAEQ/wEQv+/P1QMQzae0jgIQjqD0lQMQ4pHPYRC0urqxAhCh59BNELnJkDoQuMjNQBCGA1xjCABkaP///////////wFzCAB0eP///////////wGAAQE8QQAAAAAAAAAASAFQqZHK2rC+p8PCAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ0fC45AMQotmZ7wMQ5Zn/yQEQrdLS+AMQiIPL+AEQ1v2O/QMQ1c2DrAEQxfzeqAEQsOCv1AIQir/l0gMQvNHwfxDfqIb8ARDeuu1uEKKj5Z4BEKzNvGgQka6whAMQkKeDaRDQnqWCAxCbseXpAxDxzKtpEJqMopYDEIf51JQDEKus1YwBEN/44Z0DELe2oKEBEN+Nmp8CELPfkYECENXAhv0DEIyfnCQQ/q7VIhDJ98KuAxCl4rJ/EL3spp8BEJvI9Y0DEKHw1u4DEPLgyLADEMPRuOkDEN/GyMUDELrZ4pMCEJjji/oDEKCehgMQwvyIigMQy3ckKvwHQlP60kHtTxlB6jrcQeANrkHJ1ZRB/lC8Qfk9pkJW5TNCPdkiQgJLgUHM1idCCxPBQlCvaEJNOSNCOuHhQjsfoUI55x1B8QfbQjy15kJVIwxB5PWdQlZKPkJWl4dCWOFRQeMkXEIA+09B9tExQjkIHEJK6TxCUhEoQd7YrkHbgq1CQh4VQjUxMkH6VVRCQUxZQkT56EHkQx9CTAZXQlNpn0HsQ/pBzWfPQkWWKUH0YzlCVP7aQlGYV0JZB/ZB86xpQeeY5EJABxxBzWugQkU1EUHPSC5COCQAQdKrS0IFmFBCSR2TQdGM/UJSgdVCSjVWQctxokI8mUZCA6HrQdJi8EI4WdVCS3uzQlNDC0H6AmpCRtj4QlKNM0HOJtRCTMtPQf5iR0H9O/pCUa9pQfhFCEI2Pp1CAxHGQgre+kI8CgNB4GY3QdKLOUI6JdNCPvs+QddwjkI2j9pCNzA6QfVhtkH1k/FB8ZagQkIhREHeTfZCSdNyQfKUwkICD4JB30qZQgsmb0HUHBhB/nbeQjQmxUHbOzBB/rTpQeQvk0IGt5dCNDLhQewXyEH7QdhB2nQSQljnUkHjbrRCBjWoQfqenUJbPTpB9Rx6QjXwNUJABBpCRtW3QkR9HEHUT4xB6LWVQf8SxUHzcYNCTO4JQkUGH0HeSxJCUjtOQkda3kHjTUdCQzKKQf2I3UI+DbNCOe5CQjorRUI5DJpCSYoDQf5mFEI6qmtB35JaQkqkHUIFlTtCS17wQf1GA0JJi6JCR/kxQlA1QUJXRJBB52DKQdb4QkHZ/gJB9DbLQlEiBEH3VwdCV7ijQjg8E0HYS31CRlUuQkUM10JEB6lCOp6GQeHTBEHZJK9B6SQUQjWRb0HmOOtB7fnQQj/lPkIJzEdCSj7VQdYI5UHtMXtCQihUQdVZ4kHwzo5B8oEvQdelw0I24TxCVZsIQe5bOUJYPvhB8f9aQjeAkkIKpqlCBqIWQk92K0JNF81B2dr3QlSyTEI6trBB1T9mQkSAHEHX+DlCROJ4QknRQ0JDcwFCPwaBQkAvGUH9uVRCTjTsQki2oEHwBBBCNkScQf2KM0JHoMNB5ZnRQlkIIkJC5oNB/eVZQkQ+2kJAs3lB6ir5QfpoJkJEUKNByU6mQc9ERUI9lPBCQCtlQj4NsEHMKthB9aRKQfIr+EHuctFCB4DXQkCT7UH9/C9B1wQ+QgbkNEJEMxZCBgf1QfxV60JRMopCT9S6QkKCyEJGJFdCRvZ5QdNKcUH6Dx1B2ANIQkhCg0IArwJCOBB5QgNmQEJBm2BCN4v4Qj+EgkH1tGlB/R0mQgt210H86ZZCURxtQgM5lEH2AlpB7AMlQjSEokHmFbpCB7O6MghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEO///+UBEP/d9fwDEK/VsJYDEO/mkskBEMyhmZ8CEOjB58EBEICSkBAQ4vwIEOoMVFsIDBAAEAEQ/wEQ/++/exD+2e9OEK+JkTYQjv+TzwEQg+vMDRCBwrLeAhCUiuXQAxCXxIioAhDABVxjCABkaP///////////wFzCAB0eP///////////wGAAQE8QQAAAAAAAAAASAFQgq/cr4eeqKbvAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ+v/CnQMQ2IKvWhCp97jQAhDAub1GENTnkccDEKr4q98BEOr7ky0Q8dHG1QEQ17TdvAMQwuGs7AMQmqOx7wEQ652iJRDiiMZQEJvq57YCEM7/rjUQ0fLrtwIQtomFjAEQtvCitgMQ957toQEQ1L6K6wEQ0/fdLhCVr63pAhD3yZaiAhD40+S0AxCb/fAQEM2StgUQ8P/uxwMQpbWaqgEQhJaEqwMQjpGCkAIQjcv+yAEQwKex5wEQqp67pwEQpIa2rwIQ35fnmgIQq+v/xgMQ3q+rQxDVpKyzARCC95NLELL8tYoDEPKv4hwQioSciAIQtTokKvwHQguAMEJTDppCOkkPQjz+VkJIRpNCPQ8aQd8KbkHqTWNB82TbQjoZikIE5qxCOIZIQkLvrEJPg6tB5mXaQfSDcUHbS8dCWux7Qf/lhEH1eUpCSicUQgvxckHgdbRB051PQgLCVEHqezdB1D94QeJSLUIBQIVB3565QfQBAEJGZkBCTVqJQjlup0HsWQtCPsTPQfyaD0JKrFFCO1tyQkvvf0H6ostB79AwQk1+qUJHPOpCBGlwQlGJiUI5R3NCRO7KQlSGl0H93TpCAXCKQf6bJUJK/kdCOTVwQctxhEHqzwVB5wqiQcmoU0H1+DdB769MQeVmkkHS4sFCC27bQdc2kUHuUK5B0WV0QkHHlUI79JJCV2zbQk2Z6EIH/LBB2TMDQdm4XEJUdJpCRSq0QkbjB0I8v7RCUWarQlbsk0HMatNCOt/tQkviKkI2NqxCNreZQgGX8kHZq7lCQYA1Qj6cvEHgnQFCV+VOQjubr0HO5GhByILnQjhm6kJWaDRCR8pLQfPL0UJKhD1CTiNiQdvUu0Hlg0pB+RZQQgH92EJM5aNB2/nNQj8nrUHKTSdCPYXIQjVPDEJLaDRCR4LGQcpmokJLRYhB3IWxQeU1jUHjYyxCVP3yQkq1nUHPThNB9v5iQkjsdUIKck1B+Y+AQfnvzEJHuXxCAed7QlsX8kHqfZBCVsbEQe1+mkHeRGRB6nmzQkthCEJN5vRCUENAQkYV90HYGEpB+W9uQgKIVEJGdOhCC/eUQgYgo0I1tvxCQMmNQkN2B0HIih1CORSIQdYk2kJT2CFCRDfjQkcIxEHTKEFB13OEQj7sikHLA1tB8i/NQgF+xkJPkxxCRFB9QkP00EJMVn9CAj5NQlk9f0JPaahB0UYAQlRv7EHkm3FB3/49QggmgUH7gTdB0mspQcxp7EHOflhB5tKeQfSSh0HQrUJB+4UoQjmtLUIKTuVCNcxDQk1dHEHnQiBCTIMcQkSHw0JA3j9B0CKfQgdeG0HoA79CBlrMQfAPhkH6gFdB6csdQgg3SUI/7i1CR2+NQkVQJkIIKr5B0vd5QgapY0I3XU9B34oGQjqJm0HeUBZCCKp8QjSJC0HrzC1B2ZeKQlDxNkJAEsBCNiT4QkvrFUJTp2hCO976QkSpvkJLVohB4Pa0QfV9t0H1gylCOjr2QjT7TUHQwPBB1FULQjgv4UHSIgtB2PMuQdaDvUHrSx9CWQ8MQgYP7EH2dRtB7YZJQlXLU0IFVLpCBrqkQga5HUI0NYRCP5N4QfxjbEH7O/RB6qBDQfmuf0JHoEJCPNo+QkoedEJBsfRCRkJ0QdnRnkHbXKBB4+8+QecllkIGwtVCCIjWQjutyUI3EINCPI+pMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEOv79/8BENm91rQBEI7siboDELjx6vYDEOaa+gYQzqeSLBDc4sWvAxCTkoSFAhALVFsIDBAAEAEQ/wEQ//vP9gIQ7b6uwwEQuu3o/gIQ+6Oe9QEQpqyXkgIQxqaAiAIQkJDDigMQpJdgEJEjXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVDr/sb/tM2Gx25YAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BELDvhXkQrfT/ZxC41q3eAxDGodf7ARCIx//SAhCbk+HHARDY0J64AxDVwLSgAhDH8uNdEKuI9KACELCA71YQgYvxEhD96OLSARDi8dyeAxCn0cb7AhDDrIf8AhDokc3fAxC8vYRRENn7kv0DEKG08C0Qks70vwEQ496wgQEQ6pfuQBDv0e2ZAhDjjIPlAxDkotfgAxDqk4DOARCbgsWiAxCF5KtyELCN7FEQ9toEEOb9lJ8BENXum8QBELbZr/MBEMHyrN4BEKnKmsQBEL3N0mIQ/pqh+QMQ5/TLowEQ49bSsAIQ1fXF7QMQ/KP6jQMQhZYBJCr8B0H945pCSmgNQjjdskHYCq5B7i6BQkIpEUI4b0pCTRpMQek7UEJS3rBCNCXYQgTLJUIEPFZCPJ4OQgSHbkH9bShB63siQkNf/0H7s8tB+aagQeqFZkI2VqdCNDFfQj1i50HZX3pCAM92QgLVmEJa3HFCU87lQkZqukI93R1B27FhQeKo50HbhZtCPU1PQdgn7UHyC8hCAiP3QdMbCUI9HgtBzOsWQlCof0JRalNB+yoAQgL7MkIGNeJB9QF2QgeqMEI6DkJCBUjPQdpr/UI7BYFCQJTxQkLRQkJaDZFCOQQxQjk/2UHihGBCRvmOQdrXckHYJYZCQMExQeAUH0I931BCS0xaQj3ivUJQ7NFB4fHyQkujnEHq9tFCR3lgQjldnkJYkStCV9VDQkXxr0I0H5hCN+eYQjrBGEIK4FhCNPaZQgEUCEIB9UZCRccJQj/W+kJKxx5CT+6yQd2MkUJWMYlCRPefQjnyPkJFYmFB5roCQjR95UJE6ElB4FgQQkf13EIDioVCRKu/QdMPL0Hi2iNCRRVvQjVwVkH0qsJCOYCpQk92ckHiI2RB1CkzQj6wg0JVg6xB3sn5QlGmE0JbitdB4m+vQjbK/0JGLuJCUPZ6QgmvW0ICZPRCSN+0Qf6mGUIFI1VCCl4TQj82DUHhefRCNr59QjrxyUJAFupCRtISQkRSKkHRSKVB65r0QdkGwUHNUIJCOuk8QgE1bUI+y2pCRHJcQeQVDkJOFpxCUy7tQeJsg0HxIelCPRFzQjcu2kJGncBCUan5Qk6YvEJCypJCCkUZQexHIkH6XqlCUpW9QgFrvEI9uNRCQOe0QezAC0IGfgRCUaPmQgmV6EILESRCAJ/TQfqOAkJLkLNB5oEgQgfNjkHNU7pCOmymQeiBsEJDOAVB3AAeQdoFEkJMVi9B6T7DQkaB1UHoKXZCQJXBQkxzjEHeRvpCVm2WQkOwhEHarWpCSh3JQkAjgUHwLItCBm6kQes6LkHoSWRCOcxvQkYOEEH5VltBymmuQkHQfEJXwCBB9TEAQlPqQEJKxDhCBwPOQfMPtEICYnVCRC3pQkOvAUJE/s1CBWdcQlsvDUJWbK1B/ciJQgMmx0Hs4NZB6oGQQj3te0I2P3BCRuMMQj4x5EJQTjlB59m/QedlYUI3XNBCO8fFQkJQ7EI6sZpCUh8aQeIR5UHItWpCOb7SQkuE1UHZm4NB2HcvQldcc0I6gLtCSP2EQfFqUkI+5hdCSq1iQgsQIUJPf4lCWGLSQdBnkkI8srZCP3UvQkPsPEJHFOlB5wHoQebl1EH7KUJCWC7EQebP2EID1cJCT+50QgS4OEJP3YNCOiogQfZDwUJFSD1B+rZvQeBZ6TIIRkxPQVRfMzI4AEABSP8BUwgMEAAQARD/ARD/9//mAxD1+6X8AxD/5ZDzAhD6gsuZAxCew/uhARDQodsTEMDhoIQCELqAtMICEIQKVFsIDBAAEAEQ/wEQ////uwMQ8bvFtwMQjsWCvwMQvpfsfhDT98pSEP6hysoCEMngyBEQhIDkAhAAXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVDc48bQm/HNn5UBWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDg6sfyAxDHmLLmARCKj7X1AhC3wJ6oAxCq0KPeAxDF+8CfAhC1nK/cAxDRy5LkARDb9f/bAhCg+v+XARColavAAhDu763vAhDin/LtAhCpzYauAxDZzaPjAhCQl/ieARDkjdRxEMT+36UBEMiqvI8CEIaG5A0Q+NaX0QIQsK2DLRDknt99EMTupYMBELm7wQwQlITRywMQgbfj4gIQtOf5OxCsiZOLARCcl0cQ2+qa5AIQuLL27AIQ8ojo2gEQ+OW3ogMQ/uu66AEQnPy12gMQpaHRhAMQqYXahwMQr6+asgMQ9uj4xgIQkYLgkQIQ4rPxkgIQtQQkKvwHQfocxkI6gDpCVGVUQjZ7R0I7DYZCONeiQk3QUEHXMUVCRfZ6QjRT0UHMYvxB9OPKQev5zUH1h1pCU9K8QeH9YEH6LotCOBINQja1c0I8hHhB0nAdQkTBOkHsc6NB5squQeYDv0JX/qBCC6WtQkuHMUHwZ0BCUr7eQjkgHUHuJgxCQMkiQdxPgkI9dWtCP7CrQjWShUJYDjFCQPdxQjR+lUHvx8xCU8VvQk3QsUICUL9CV5IIQjaYpEHktJVCByt9QkfbzUHSwZdB+VkbQk07z0JEJ0ZCS/nOQdXW6EJaTQFCOU21QjWVCUJWAdxCWZ1PQgv3h0JH3LpB4lhWQjkqhEHl+zJB20BuQelHuUJSsZBCQPJ5QfjIWkICF8JB2YXlQd4roUJKhmJCVdstQlD/dUI02ZRB2BzhQlDHCUJIlllCTEK4QjrioEHdv6lB/u/lQlhqNUHU2vVCQZzJQjsW9kHficdB3BSlQfOsrUHi7nVBzgvGQda6gEI8F4NCRoDGQdiqw0I4H5ZCSHsVQeP5U0JD22lCSO6VQd1v5kHRoVNCTYR6Qj1fbUJGH5tB9GtCQgLWmUIETCNCQStSQf0t/EHuD2RB1931Qdqsl0HrVFhB30t/QdmYakJJj+hB3w0KQeUkWUJI5vdB6YbuQkclf0HTkVZCRT8DQdaqvEJB2WdCU886QlpEmUH6AfRB7VOOQgMRZkJXB3VCQjlmQkM5jUI0NDNCRG/CQggDqkHno7VCNSHEQeEo9EHj4fxB6QWiQjkzGkJArqFCOiNAQd4ECEJCy8FB7gzjQfMO20HVuCZB0Xa/QeTrK0JIdOlB2EqEQlE8rUHjMINB28nHQknF8UJCbmtCBCM1QgeiskJKbQxCAdMRQgADh0JAo/JCT7veQfVGokI5PQlCWL2sQjceZUHv/YdB667SQe8tuUHP2dBB2fA4QkIHhUJD8bVB1dWfQkepY0HwYpVCSHkwQkj+hkHQ4OpBzcfoQfFN9kJEyqpB5Tx7Qk0vMkJbvldB+NLUQgms1EI/KvlCNN4UQexGSkJBU65CRDFmQc5aKkJMCJJB+DlYQk34vEHZmidB1Z3OQeWYX0JLc/NCVsEYQlM1GEHaV8lB8RilQenKo0H/2edCQ+okQdmVKEIFqd9CQlomQk55P0JV/KhB/2YVQgkudUJRD0ZB/+UlQkY6gEJKcpZB94AuQkmMEUJASRNB0ScSQlmPG0JGxFtCUIU+QkebEUHcBrlCSFSXQgtVY0JNpzhCV5HZQgZs70I8BqBCA0vHQlDkPUHUdXpB31dHQdyrjUJCCTVCUjJoQdYZ7UJNuqxB90L+QgQKk0I6mYdCSeFYQk6Oq0JMFncAAAAAMghGTE9BVF8zMjgAQAFI/gFTCBEQ////////////ARABEP8BEK3prOgCEJfDtOMDEK3ejqoEEOSip4wEEJyt2M4DEL3Sq9cDENfO59QDEN+xt4sEEMj+p6AEEIzUnucCEIHWq54CENzX+s0DEPWP2dQDEMUIVFsIERD///////////8BEAEQ/wEQ07jv6QIQ9uqo7gIQ392OqgQQg5uSlQIQs97OzgMQvq/NngIQs4SM1gIQoY2v4gMQz6OBrQIQyeepowQQ6dXI3AIQo+vBiwQQg5yYlQIQxQhcYwgAZGgAcwgAdHgAgAEBPEEAAAAAAAAAAEgBUNug78uryp3YEFgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQyfvQIBCAhae4AhCQo88rEI35m6wCEKyMtfwDEPvfiZ4CEJn47L8BEPmdnyQQob+TXxD5/42DARDQvs4yEIPJl/4CELauxbgCELb6jogDEK+oz5wCEOf5yXAQydjU4gMQvuvpxAEQvrbr6QMQh4/i7QMQnrHIdxDq5sCCAhC2jaXyAhDH3bWQAxC4vObgAxD/17rBARD5vMaVARDu0d/HARC8gcYtEOLh5HAQ6/KfUxCh+pPQARDikP3JARC04sQnEIvJ/EUQgevyzQMQirSuywMQ9vmKsAIQ2uHojQIQ7f+iwAMQhrfVrwMQ64umPBCKAyQq/AdCWaVqQgjJHEJbh/dB8NsCQlCUHkHg461B7O3RQgEbA0H5S5FCTv9DQgBsv0JQnehB6wlEQdvmd0HVEqZCRffrQlhlIkICzkNCSnO6QgVLEkJED2lCSgDMQfkJlEIJvydB6EyWQkdoUkI6UJBB2zryQfoRwEI/H1lCVRysQlNoZUJE4ttB0j4lQfFKIUJRd2RCTX64QgM2FEJO1X5CA/sGQk95c0JCkAdCPj6qQjtcs0JWPQRCULCJQdybOkIGPrVCOCnyQj0Mk0I8kwpCSfhsQgLNAEJH8yhCOZHJQjjiT0JI6JxCA79hQezgnkICg91CCH7FQeHivkI0W9RCQNZNQjvveEJEGkVCTTkqQdvVE0JZGdFCVwmIQdP2YkJTkZ1B/JM9QjWaSUJUWshB596jQj1m5kJYPeVB9/ibQlAdUUJAgdpCTNH3QeWAV0HZsa1CTzvgQkxqHEI6OV9CSsJ6QjSn70JN/JdCQt2FQjRRcUJD0cBB5Vx4Qju/4EJXB4pCTXJrQdKPUUJRo7BB1sPXQkWLB0HtsYVB14DeQlGZqEJA+5tB+7tIQjgdYEH3YelB6CuIQkLVwUH/mHBCRbxmQlB/W0HgwIpCOcbxQgtGDEI7z4pCSnpbQjnvG0H0H3RCBUdWQf5WS0HUOzZB9FUyQjlyaUJLUqRB7S6lQji790HlmUVB5fO+QjsA5kHwY0dB6JCPQkdUG0JMsEFB4P0GQemuwkI/xKdCVOMpQgvo6EI2d5xB8M38Qen4yEJMTUlCAHl5QkM9p0I9Y8tCPQH+QjzxN0H8eCRCNU6HQkuBl0HWVytCU7M6Qe0DZEHSOxNCN3RoQkJpAUI19n5CT0F5QjqYo0I8yrhB5taDQkfYDUHqq5tCQiP4Qkxbg0H7c1RB8YohQkRVOkHeddNCSQNHQlL26UHgN09B5RaFQjifv0HdFS9B3aWrQjXZ10JBx8tCRxX0QjpZ3kHp565CPN/TQfT1AEI6j11CQ4U4QjT040JFSaxCROTRQeUdWUI5iXxB2eupQkIxc0HRvexB57+9Qjnwt0H+3qdB4um1QjTfGEHybuFB1i6cQkdl1UICkcpCSDk/Qdd9r0JGfclBz1PuQkN4UEH1/khCPbA3QfebZ0I/jPJCQ3DwQkHoxUHiixJCBtbXQdOLdEHba7RCPz+SQeBbXUHWkBFB3uE9Qjmh7UI4AcpCSrBvQc/CM0I7ef5B1qsiQdM1j0HiMMhB54H4QlDmikIClSpCPhE2QkyNxkI9iaBCPi52Qdx3C0Hepm5B3qYsQf1MaUHa9nFB6Qc5Qlomv0Hf6i1CPe4MQkF3bUJQqmBCRyYfQcrlMkI3CeBB3sOBQe4ZaQAAAAAyCEZMT0FUXzMyOABAAUj+AVMIERD///////////8BEAEQ/wEQmc2KqQQQ76eukwQQu/OenAIQ/Pn+1QIQnMnHjgQQiumDnwIQ07auowQQorflsgIQ5aDG5wIQzIX22wIQ6YTCmAIQ9LHVlwIQ1Iq2qgIQxQhUWwgREP///////////wEQARD/ARCEzKiVBBCepsLsAxDjiOynBBDBq42jBBCf4tDOAxC04cGeAhDoqo6qBBDTibOXAhDguKigBBCPx+3OAxCQn4bVAhCg04fcAhD7v9vSAhCWCVxjCABkaABzCAB0eACAAQE8QQAAAAAAAAAASAFQxs3q1K6f1fikAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQourYkwEQ+aK/nwEQzM65kwMQjbuv9wMQ2PnRXxD+ioDTARDHnOA6EO71660BEJz5sb8DEPfL/jsQvMzLVRCUuqCZARDUpqnsARDWtq3SAhCWj/GnAhD0qoDHAxCE1auKAxCVp6nqAhCMnspUENyRkU8QiKSY8gMQgJPhtQEQiZvnPBDr9/vBARCS0JHuAhD6qebeAxDKyOqvAxC1+aNFEI3Wr4ABEIT39LABEOj+rv4CEJbnoLsBEIPP2VwQ4aqNtQIQj/CD+QIQw8HOsgMQnbHp6wEQsv/5ARCDmbTxAhD+zuDwAhD57LaPARDmo9+NARCxAyQq/AdByxS9Qk/6k0JWYX9ByktUQloqWEI4yfBCT1xJQjmW8UIEJz5CUmEMQgCa10JUYMlCA2QPQgEL9EI+xmRB5hH2QfcQGUI9rmJCT3BJQfWWh0JFwItCSesMQdKX/EI8GvNB4lPpQdPNeEJOWa5CAfm6Qje6b0JVbZRB9YYeQlqI/kJOm+tB3PmyQgSoM0JCCw1CNmq+QdVFDkI/xFxB1GTfQkBXsEI9cVRCBlfCQjZMbkIKAM5CUf0TQkW0Y0HMbXlCAapGQgYpW0JAUx1B7F7bQdywdEJUIOJCVwZTQkNtSEJGCe5CSpQsQkbQpUI9fElB6tO2QjSIDkI9ga5COHIiQkost0JH5KBCCJrPQerpl0JQqXpB2KtnQgY6W0JYzwlB81nYQdwCr0HWa8xB8SvmQgDymkHt0tZB2Yd/QgGMxEH58iJCAgGeQgbHAEJHcgxB/hsRQe5Y0UI3NAhB2PicQkfK/UHdPC1B7aYuQkgG50JMrAJB6Ad4Qdi3MEIEd3tB56AUQdnnyUHRlJdCVXg5QdMQCEHtCTZCOAWjQel9EUILptFB9P3aQdj1kUHgcRRB7gq4QfpMh0JB1wpCT8nhQkXVtUI3hvtB0WefQfuKekHdzPRB8U9JQeyeB0H0+vZB4CcrQdx5MUHlzdlB8gxDQgPFRUJBqc5B5WmkQd4s50IBcBBB5JgYQjWinkJBhP5CPsrQQdtbokI4ZbBCQTNiQkPuCUJTuqlCU68IQjeLKEHqrvpCVccTQlR9LEHehOxB3NBgQe7JGEHs0TlCA05UQd1mmEHfAA1B6KESQkUtpUJHNbxCQXAfQk1MPEJOULFB7LXUQfcyP0I1G7ZCPE3BQjtce0HoPqpCOAzVQk5qAEI7Pv5CS377QdUZakIAQcdCNw42QegvtEHQZ01CVjoSQeoBIkIEaDlB5yPYQc4x0kH/qn5B2G4dQj7+1EJCKkRB2nIKQjZdA0JFSRBCR3anQevmCUJKhRVCAbWyQfoMS0H6QwlB5axNQdn7j0I/V7hCQrhhQfCLH0JErmRB92TFQkKYC0JQkExCOdUMQkdt+kI6hHBCBNnEQddVXUJB6J5CQIBnQgMC60HsF9NCR+yoQe1pI0JCaNNCPqUHQfE4oUHatmtCV/I6QjgIjkJF5G1CPTcRQgdgU0ILu9FB3+PSQj2TUUHXRkJB0IdpQjnSREJJWJRB5FZGQjv0DEIJLN9CRkotQgr1TkJHQ6JB5cteQdx/k0I3nPhCAOj/QlKnuEI81CNB8gghQkkBb0JA0LFCOVjFQkG6K0I4hjpCVVPLQgEMakH9cY5B1MrOQkDPpkIB9f1CN7NxQjgH5EHsVf5CNGixQjt0XQAAAAAyCEZMT0FUXzMyOABAAUj+AVMIERD///////////8BEAEQ/wEQgditowQQsIPp1QMQgY7x0AMQkfLA5QMQmNjA6gIQgua81QMQ5ICn5wIQtrjJjgQQm86xlQQQrrWs6AIQ3MG85wIQyqqRzgMQs6H80AMQ4AhUWwgREP///////////wEQARD/ARDcioPlAxCPy+qcAhCsgZXQAxCJ+cOMBBCOqKHsAxDm4aOTBBCc6NfOAxCh1L6OBBCd+87sAxCTuamcAhCh3qruAhCylcbnAhCZnq+VAhDhCFxjCABkaABzCAB0eACAAQE8QQAAAAAAAAAASAFQ7quVgJbl77glWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARDDqt/mARDSocHhAhDc9tMTEJar7t0BEKKm+NEBEPXlsvcDEPzl/94BEKyaxY0CEKn54TIQrs302AIQ2di5BhCEnpGwARCz2v9rEN7v9+8BEJzUnREQj/iZnwMQ9ta2mgMQ9Oq90gMQ3Z+Y8wIQloTgsgEQ4P/MuAEQjNLajQIQ8r62rQEQlo/+mwIQgafHRxCu37zOARCWt5L0AhCf/6geEMC/gLoDEJ2MvwoQxanxEBD/nJ/GARD91dGYAxDGofC/AhDFq7D0AxDj0cL/ARDQsfvbAxDarva6AxChhsS8AhCL8qFsEKWIw+QCELjIucMDEPSqASQq/AdCUdCyQei5YUJGzctCRHqaQkbTdEIEOttB+e4dQegJiUH1rCNB3AVeQk9+7EHQgcBCAmL1Qe/VlUILJXNCTwjZQlbLd0JLiwtB9xApQd9JgEJDjz5CV0TSQjrV0EJRmQFB2INPQkQtPEIKyhNBzt8PQjnAnUI4vP9CTQVCQkKrgEHsnTNCUxH5QgL1EkI6L5VB299FQlBrkEICGMRCWfk3QkocV0I39k5B80xuQk0vJUJQdoNB+l4UQkZhrUHIuFlCTb0zQjbM8EJFuF5B5DPeQkAAm0JEtGRB6SjHQjb2ykJVWMpB63D5QkIzNEJH3/5CWWc1QgFzR0JUsbhB426fQfYiLEIIWrxB5TmoQdA8mkJUDNFB2VrGQfy0fUJHmURCSl3LQkLzg0JB6hJCQZLfQj/uCkHXXJZB7j29QdnHeEJROTpCWf6HQkNqNkHr3k5BzFCMQf/ncEHkMZZCCWo7QeLaq0JEyolCVCcLQfIDIUI4krtB0D+5QcxphkJbj0pB5Eg+QlW5S0H2lWlCSusbQd1psEI8mEZB49xkQkVYs0JDvGdB6VY9QdztjEJKhQ5CWmZXQgA5YkI/ZwNCBRNaQf6+SEJKXjxB7waSQgM0KUHykrtCA+UCQle7akJW2jNB6WgVQj2h/0JZ/g9B4FyCQlV4RUJUmPpB2cacQdwJ/kHk0t5CNZ2oQjjKAEHgpDFB3vyyQj5OykJIXQJCS+HGQd8NlkIClyNB/PdqQexmMUI12DZCRvlXQkrnfEJGNA9CStWZQfQqkkIFSBpCVMVPQlZ4E0HNwmpB64NsQlL2hEJAm+RCAPyxQebvXkHTF+NB7yvBQfM+qkH0VLdCNHumQgM7UUI5cyJCQB3JQfaZ6kJVcaxCAA5PQfW1DUJIPpVB3oRoQeMNdEI8i5pByOQJQe1roEJUl4pCN3X8Qe4SeUI5edhCNHjKQgAeqkIFIepCOqoCQfyDlkJJWAdB22ZEQjxsz0I/cF5CQ6XvQjrfCkI27ApCO9TZQfXQeEIKEgJCPJ8KQjyickHu701B2QFqQje25UI1ooZB5w7MQdykJ0HngnRB8OwqQjsXZUI1aylCQIimQgZg+UI82KhCAUFbQgUcn0I9jk1CQ140QjkyIkHeSNRCOY8+QkF5JkI80pBB8F/3QeDCTUHjG3RCOsH4QlG/kkJAgrNCB0QLQe3G6kI0mPxCSMMIQkAl1kI52FdCQosxQjjqaEHXa3hB4zogQkITmEI97ZNCOW4WQePCaEHTOKNB2/woQf+6bkIA0Q5CNT+nQjiqekI7qEBCQ3RgQkGV3EHuGBFB3UhRQjqtkEJAwzNB4AYhQdCXbEICnrNB4tCYQjalqEI1p1wyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ/+/v5wMQx6ORjwIQ++65VBDkl5a/AhDAkdiwAhChmbwwEL7o9skDEJ6T3IEBEJhCVFsIDBAAEAEQ/wEQ3//95wMQ09uJ7wIQmKustgEQkufJLhDIsPKgAxDnnbBAELvu/sgDEISFwSAQiChcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUNOPiru2j6LKFFgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ89f49wMQ2oiMnwIQo9/11AIQt77EZRCx67u4ARDw89zkAhCk1vKIARCoscXKAhDWnuHKAhDHlciCAhCQn+X3ARCH9qpMEITrpEMQtYOl5wIQudj1NxDr/Jq/AxCI3bz6AxCwiYH2AxCHm+ThARCNsNi3ARDp2qpfEJ+qxf0DELmSi70CENbR550DENmgkEEQ8Nfn/AMQ/8e7xwEQnfyZ/gMQ44DG1QIQ0ve8+QMQrpWHzgIQxZX4zwMQ5buUfBDoxuRgEOqIpeIBELq+wI4DEJGFwFwQ/saHcxDD8aHOAxCbhYFaEMXzhTwQmJiM+QIQt4gBJCr8B0JG4N5CV7nmQenqbUIL3HRCQ3uiQlmyOEIEhmZB1VloQjp6L0Hx9f9B+QgjQlG6C0JZY7RCTHxYQj6q+kIDdthCULTPQk330UJNc4dCW9cFQlBmekIGMztCWR5NQgStp0I4jbxCSrhOQjfY+UJAXrhCCt7HQk3QC0HshiJCP+3mQfcQUkH2XGVCSw5eQgJnakIBWnxCTtzUQfF5VEJYjzNCNg1AQc+urUHlplhCQqvBQdLIBUHtuI9CPjbjQgrz7EHveghB1HsMQlPk1UIAYN5CQ3UYQgfdWUJDIylB62eIQexGgEHzbj5CUhekQfmCTEHgP7RCA7bDQlZgd0IEJYZCTV7SQj7EekI8kK9B74MFQdDnVEJDoNFB3O4KQemlxkIGZBhB29q8QjhNWUICT0dB8pyvQdfIMkI9B1xCPymQQe5K7kHJLWtB3jABQcvP6EJLyVxCRDniQkxEHEHfABVCSXUkQlDorkJT6JFCU+LGQlQy80I+Ht5B91cOQk/YWUIBvZVCC6JCQj+wG0HM6vVB+M8nQjZMEkHrscFCNLXdQkLZZ0HqvrpB4E6NQkAW9kI5/g1B0npKQlIlxUH8F1tCTzhAQfNMzEI/AONB/QBgQgQimUHL925CSJ2DQlfX+kJK1QxCWC/kQleNcEJb9S9B9pnTQkvV/UJN3klB/MhuQkcsa0IGgdlCRICYQlV8QkJB705CUBKHQcry9EI3kU5CAYyuQj5eaEHh4cRByOukQdSHT0I4fcNCN56rQkgHvEJDKXpB46LeQdsKVUHc6MtB9kZVQfC8f0IHT6BCSWtTQeR+yUJQ2M9CR++HQk/UBkJMc9tCRNYZQgGUW0I323dB/YuXQexGsEJAb+hCCqzQQlcjoEHvZgJB4PIiQj9kCkJP0BxCOVIPQePwZUI6N45CQRYpQkTWp0HmUcRB1G8AQcu3m0HhJhhB6u7UQkBKZ0H7VZBCNnwtQfemVEJEqXhBzch9QeCyrEJBnKRB4UPTQfShVEH1bVhCQ57iQe0gXkI7G+JCNejUQjgmo0HiKU5B0qaqQkh+kkHdnUJCR+4SQdAa6EHKx7RCPmmlQgLi7EHLEPxCTCGFQkw7lEHP54hB2wENQfD9s0H1sgZCRn+9QkXyKkHa5ydCQjMKQf49X0I3Fq1B46+jQfDOAkH8Gu5CNDJmQkdBBEHuk6tCQU6sQdBTAUJD1MdB1aEpQj6vIEJG5aJCALsLQecscUH3p11B755ZQeXoO0JG6Q5B0H6XQdR0kkIA6qlB9rx8QkF/YkJFUZNB+xWnQfahIEIGaclB5d46QjRbO0HdiC5CBFZfQj6JjkHm6vVB7e61QkWW2UI68ltCPUUjQjepdDIIRkxPQVRfMzI4AEABSP8BUwgMEAAQARD/ARD/+f//AxD17/e6AhDk65PkAxCBgutWEOeViK8CEMSAlIYDEJiBlAIQoNOSjQEQlCFUWwgMEAAQARD/ARDv69vpAhDz/v+6AxCw66fOAxDF8a7EARCxiIv+AhDG8Ia0AhCUk6HDARDo2bcgEPQpXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVCGj6remp3LkQ1YAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEKqf3KMDEO+c25cDEICt+KsDEJz+7lkQmIL9MxCcx4fqAxDG/+hPEJ6XlkgQhob/sgMQ9b7dQRDCt6kfEOXP2+UBENf2uqADENjfhCYQ7vqfjgIQvY+vmAIQwOnBWhCPvqPrAhCE3/zdARDjh/UkEJLWt2UQi4P9nQEQ8NfdwQEQltnkYxDnj4lMEM3X9WgQv4m2rwEQovDFowMQrfz3TRDxvJm4ARD4oLcYEI/MrYYDEOn0+mcQlui/ZRCJyNbsAxDtmtKeARDKgrS+ARC29a2JAxCJ4r6SAhCJ0amkARDr5oS/ARCU2NDMAxDUFSQq/AdB9GV+QjX/kkI4ZqdByUnwQkvCgEII6bBCVR8UQjTkg0I+zt5CRw2AQlCcp0JQIp9CAzcuQfR7MUJF40lB2VWXQkAHukH7jUJB4ez9QedNsUI+Lq5CVPiyQjRhk0I5KKxB9QcZQgLaHkHRHyFB/KqtQjvd7kI/YwxB0Nh8Qgp/hkHdUAhCVN21QdDCjEIAEAxB/ezoQgsXDkI0KJlB9bpWQj2x1UH7VNZB/yLXQfAYhkHfxbhCQtmdQgAhyUH9SO9B0SMWQelv4EHSlv5CSPZ3Qj+zQ0JB4+tCO1KRQj/j1kJOJFxBz9UzQk3jAkHrW9hB7MmOQfUZ8UHWiIhCALp/QdBJs0I9+RZCTGoPQlVlpEI0yY9CO1arQkwLm0HpEi1COpcMQejGzkH36ZBCOk5JQfoMIUHs8xZB8k2JQe6YnUI+VwJB/SX4QfMTFUHearFB2LqiQkxkLEJK/cZCR4wlQerhLkHjQGtCPV5sQkRYBkJTkwpCS+8PQe2h+EJSuGxB7PGNQgAgREI6ZzZCAchUQjZsy0I4f6ZCPV80QgmqOEJTjblCNEQjQgEI5kIA2yJCCz7nQcgfQEI5vdBB2zLhQkIYD0I6kKxCRrSVQkZLoEHrdGRB3YPuQj+9h0Hq6mJB5XSHQdwpjkH0UOxCRKEIQeIcskHxGqNCV4L8QdI9I0HVTj5B0UlpQkVBUEJFhn5ByJTlQk6bskHZCnhB8Gh7QjcI2kIFmQZB7BH/QgBlvkJAnyJCANAuQk/xPEIHmW5CUimYQk3n5UJNNtZB8e3HQe1qqEHsvQ1CPretQgDRC0Hdn0ZB2wODQjU0sUHngYtCQ9m1QjcQ+EI514hCQFazQgdPUEHg58ZB4MDDQkWCa0HZpoZB+xA+QlK4l0H6bmBCW5uPQllNj0JMxu9CQxBeQjgVckIBethCQCreQj9qV0I1cG1B6tScQggf9EJCpkdCASMkQkD+e0IF70VCVHDpQckBOEI71QtCQJjlQgJY+UI1Y7VB7rVfQe8jrUIGceNCPZuQQjtF50Hh8v5CQEQ0Qj3K/0HRXXVB23noQdHTfkI5pIpCT+FAQds3lUHseu5CN+aAQgadrkI9ZftCO+LCQk0qzUH6pTNCSVbCQlnpx0JIxw9CRK2BQkhr/0JCUY5CA1nBQgbyBUHUQqhB7RmZQe5XC0JFvM5B7g0+QjtR+kH90GxBy3WgQd1C7EHgInFCPXZ5QegdNEH+6/lCWQGEQeRElEI8wL1CPHj8QfN/ukIDlahB7av/QeG5DEHNeMRCQd04QkmCSkHbcOpB8VB9QdrBO0JQ5w9B1DywQfjgCEJTRrhCAivyQjaGXUHO7BNB7jO+QezHKkHu8v8yCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ//797gMQ/b/59QIQzIn7ywIQ2ODc1QMQpMLtDhDLoqiaARDHuImLARCRkKAgEMICVFsIDBAAEAEQ/wEQ3/7+5gMQ/db5/gEQtpv+/AIQmOKtdRCmkNkVENuLzvIDENTIyAEQgYSIqgIQyglcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUOmI1fe6zJeIqQFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEOeL4qABEMqR0fwBEIjntSoQwsa9vgMQoevMDRC9n+ZqENSd/OYDEIrV+a8DEIH6x3MQu73D1wMQv4nIvQIQk/rS6QIQxba47wMQqorx7QIQw9v+KhCLrarVAxDzx7MfEO7rJRDZ9fL/AhCh/8bsARDGwcvDAhDEnOW6ARCj3oucARCf6bOPAxDil6nrARDdgpzZAhDyvr6EAxCavueGAxDY5ZkBEIOL2aQDEJ2evW0Q4P+91AIQmL7+2wEQkPvV6gIQ0fO3wwMQhpehmwEQrNDzWxCn4eyVAxDd9a6kARDhr4B1ELju6SoQ9vnnPxDeCiQq/AdCTReCQlSjuEJYl4NCQlGZQkA83kHdJXBB4oZxQeDqJkHU9DZB7riwQkZbYkJHSctB7u5UQesRzEHRt95B9T0wQeVpa0H0tDZB79KVQd5gFEIGYlhB/mk5QdD8KkJNRZ1CRqW9QlLCvUJStOpCAD7+Qkr0yUH2OrZCTsNDQk6JJkJALaZCRDJOQkYwjEHzRMBCBMtOQdT0ZEJXUzJB78EOQktU4EHWFaJBy2p2QemCXEHaBKxB+v4vQjqB0kHwHsZCSPNmQgpvAEI4A2NCN0NeQkariUI9K1hCRQ7iQlC30EJCuuRCTC1HQlSUYEJTdk5CVJusQlWKGEJPRshCAWmlQlU2WUJRIr9CRJKZQgnXsUJSl+5CSJ5EQjoFqkHzlpJCNn+YQeojbUIGP2BB0iAlQfU8SUHIbIhB+G1kQkJZv0JPmrFB2lLjQkhF2EHT5rtCOgFjQgS4r0JY5PBCPNeZQkq2eUHeA4dCRIgSQfOfB0JUrJlCP1uUQgbTYkJVPiJCTBsYQkxUnEHn3ihCNptmQfYzA0I2vx1B67h0Qj5+LkI9M51CASH4Qfv2m0Hq3V5CQGeMQcvlP0HO+/pCU3QNQk3l2kJM+DdCVYH5QlOriUJOtjpCS29VQkondEIIm7FB0H+YQck8pUHcX/hCWuG/Qlsk80H8A9JCB8J8QfVSfEJPbW9ByO3KQjU9bEI8n2FCO6pHQj68wUJL6olCPgrbQeA5v0JAY3pCS+noQglK9kI4Q9tCU+lAQevQs0HVQRRB1nxcQk0CdEHz3mVBy1f5QgW9k0HReANCTUDxQfO29kIA0IVB2CuiQeCv/0JNu/hCAQF5Qla0R0I76cZCOhujQc2LxkHtiIdCBTf6QeXaTkI2ixVCOO5jQjn7AUHxaB9CBZJcQgE9TEIFE11CBHQJQdmgpUHSgnRCNERYQeEOd0JIGAZB4qeUQjiHlkHyPo5CTVQSQfBfekJYoBpB6HUvQdd5mkHacHRB54JBQjggMUI5KQlCBJxYQeq27kJE/1tB70yRQgRlqkI3zZJCOQtzQkVgBEJIIt1B+RLjQeg5HkJEZgBB/tSuQlLTYkH2e8NCPa99QdfZhEHvEKNCQ+91QdVrTEHOl3RCAAPgQeCD5EIDO8hB+9VOQfVcUUI8+q1B+4upQjtHO0HmVHtCSpqAQj+/kUI6/lNCPcRcQkI1B0Hh2sBB8ZUMQjcRZ0I+zltCOwtiQfbk00HjaexCR6prQgtZ+UHJ45NCO+jQQkEWE0I1E2tB6iviQgaf4UI6KJ5B/BN0Qj/abkI8balB6wqzQkJgKUHmL9BB2/dDQjphEEI55cxCOclrQjePw0JH0OxCAHrbQeBpP0JA+u8yCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ//+ftAMQ76/+lgEQvOTUcxCS5Zx9EPD8gykQo9a4oAEQ6OqhyAEQrOWIhwIQgiFUWwgMEAAQARD/ARD/t5+cARDu3+f1AhC+kv6/AhCA7+Q1EIjvjswCEKCtiaADEIr7oYABEJS1jS0QDVxjCABkaP///////////wFzCAB0eP///////////wGAAQE8QQAAAAAAAAAASAFQh8fE3NK8j6UrWABgIGgAdQAAAACAAQCcAZsBCgMyLjAQABiAAiAgKAAwADsKAzIuMBD/ARgBIwguEAAQHxD/ARC2vpDWAhD07uATENDF9fUBEJun4PUDELegscECEKC7mWYQgqrWeRCbp7iXAhCz6uKBAxD+57RZEK2Dn1AQkeaTigMQuZ6iMxC/v6OhAxDY45TgARDn9eBsEOLSo8UDEPHTgPUCEL/AmN0BEIOfk8YBELuEwfYCELHZ4+oBEKS95/kCEO+tivoCEMaR+icQga7omgIQvInGoQMQmr36jQEQwZyVugIQ4M+dEBCDsca6ARCmvPiQAxClgvi6AxCQ9O/VARDEoN3fARCxhf8aENGfz88BEMH5k08Q7uregwEQoozjnAIQsa+1rQMQ3OKMiQMQ3WgkKvwHQgq+UUJa+/1CVwjnQfJvC0HdCJVCPaQDQfiCJkJPqudCRPs1QeJtWUI6LBRCWBqLQepVckIDqy5B/pzzQf8rXkJF8qtCUQZ/QjrhKEICvOtB9cNxQeAC3UJBu7NCOb0QQkuvCUJOf9FB9U3SQeB6aUIF/cNB7EN/QgXySUJVuFBCWgDqQeam40HgpGlB1uziQdJ3hEHnUY9COnVPQkreyUJCkKxCRdXKQjVQ8kIGqZlCBRrNQfAh1kHrxL9CWm6aQkoskUJHuM5CT7/rQkGm7EJN1hBCAlVLQgdQ40I9H5hB2GmuQgKu9kHtg0RCSAjpQk2smkJRGEtB9VCbQkdsb0H9ku1CTmdfQkP20EIBL5dBz5HsQlBSqUH/qtRCBKZpQjrUbkI4GQlCQfNZQj9u0UHJvtZCRYj7Qj+KIUJJiaFCQuV2QlEL80HnHMlB9pLmQgZdC0H7+X5CAe2rQgdS9EIH5D1B6RmUQlLrpEI+fzhB4ttDQgsCd0JIQiZB+/H8QgkmIEJBgOJB6XldQlYn7UIIyGpB000gQlaWDkJDHYtB4uIlQeqrw0HJvbVCOzyuQkSizkJEhOxB2EitQe3ChkHoVoVCVt5rQkEeGUHevqpCPSi5QkZ4Q0IIUfFCAx17QkKESUI1azFCBTKeQfXl3UJW+H9CNliTQlcnUUJHDipCNUvRQkypyEJUt2dB1peKQelrKUI584RCRVXSQkaWgkI0EXFCPaM+QkeYo0I0YqFCO2nzQjZqGkHv0T9CR0lJQgFonUH9ZhZCAcLkQjsdyEJBtm5CBwWvQkEBHkHsHHRCSVtWQdfO3kJBW9dCSuhWQeF8p0JHZLhCTkjDQkNMqUI2Z9FB7zBPQfGuU0HkMPhCS+IKQkVtakI8RQRCC2YaQj8jRUHJk+hCPf5CQei6S0Hj8MtCQ0+GQeblqUJAIphCUBJNQd4EyUHlhzZCQ1RMQjntZkILHY5CChzDQjZrZkI2vrVCQpYpQckvYkJBU6BCQDI8Qden90I8+75CR8gBQk8U+kJL7bdB3PmhQgeDsEJMWrtCRI3sQdhqMUHdyZVB1XrlQk3ue0I7PiBCRUIvQcofJ0HawpRB3SefQeCGU0JSN4hCNbfUQjm0aEI7EqBB33jGQltrk0JDiARCVSrrQk7sGkHUyWZCV7gDQlchZUJNlv1CANzsQkIjwUHY/ApCQFJdQjkKIUHi25xB0QdJQdFq0EI1jS9CRn2DQlQsk0JS3aRByBwWQe6gHkI7s1NCS90kQkDytUJRoW5CPsFuQko3ZkJGEapCP1IZQdOYAUHvFmpB3NIBQdKTokHV31tBzyihQdQKkEHW02JB5kerQjzcF0Hc3UBCNYXNMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEP/5i/8BEL/Mqt8DEObtuuEDEOuw8akBEI2/wiIQ/YfMfxD2joXwARCsgr6QARDlIlRbCAwQABABEP8BEPvvy38Q/P/jIRDe74WBAxClkY0QEI/xwSAQ+MHwnwMQwK7JkwIQoICqURCFAlxjCABkaP///////////wFzCAB0eP///////////wGAAQE8QQAAAAAAAAAASAFQ5e+Eg6ve7v6DAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ/vzkwAIQ0Yix4wEQtdmHyQMQxcerPBDr2fH4AhDEjobUAhDc86CLAxDn6p2jARCKlsBKENfYvjAQqMuYwwEQ5L37zwIQm8DcyAIQq7aXWRCL+dlpEKWp6O0BEK353WQQuu6u9gIQ/MPaxQIQwZieRRCs14aKARCkwvfuAhD3v4vbARDBtNbYAhDR4raeAhCMzt80ELXd6t0CEJ+Uyf8CENSmjhIQkJfnnAIQodXmqwMQptT9PRCZwPbNAhDI2+1iEMbV7ewBEIbCt0AQxLHNuQEQ9cW35gMQ0P/lPRCAz4+jAhC5+bm2AxDlseZKEPbNASQq/AdB+W+8Qk8B4kI2rXVB9LLVQlVmJkHZ6BZCUc97QdtSokJDBmhB7ccbQfbjj0HaUttCRBzjQkuIkkJNfCFCNAvWQfzEOEHyEG9CU+DHQfEAGEHtYHZCUvr1QgJx+kJFYFtCOUmKQkbqvUJWLrhCA1sRQjsKrkJMKJ5ByonGQeumU0I22TVCPK+JQerh60JM5VlB7B9oQgS65EIIpYRByDwuQgMr40IE2zxCRcZSQjSi8EI5FdhB/Qi/Qec1lkIE/plCC7B1QfAxuUH9WM9B0netQjXzFkHYZBtCPcPAQfNtJEI0UjVCTPIwQexVfUI+FF9CABJUQj02oUI/JhlB9DJjQdnjqEHLRPlB70BRQlZsBEJWNhVCV1vWQkOFMUHykdhCQ5dLQe3RO0IHeKJB7vPHQjVeikIDFtRCSgo0QleFj0HtVutCV20SQgKBykJKqeFCNR04QdQ0gUJSLiBB2EHeQkZMKkHmlWFCSH2ZQkVW2kI5P0FB+MnVQk/XiEIHAG5CPQw3QjQO0EI+AitCAPvxQk5NI0HVuP5B1VDHQkkackI7Q2dCPCftQfcllkI57uZCAbzLQj7qqkIIbjtCWwaDQlDsBEIEL5lCVqVaQckH0kI/qP9COI3WQePApEIAiyxB2kDMQkykyEHM0n5CTiw9QfuiXkILFBFB8YR0Qjqgo0IGWMdCTt1hQjdKE0IH37VCQpi0QjU+tkJBqFJCTyx9QdLJkkJYfQdCNjs0QfTF1EHeESBCSMIDQkF7L0I3JwtCQBHwQffu+0HwplVCPregQdDud0JDCtlB3pQzQeeP+UJBAmFCRl0sQjplIkJCHvlCTWVGQj80jkJJtoFCNc3mQjb+LUI+gOhCOoaUQgXDdEIDbK1CAhhbQkqnB0JUkBVBygN+QfhgokH5W5dCRZlpQeAB3kJMkhpB/X6GQgTFkUHxMNBCPGchQkSIo0I+EupCSXuTQkX4qUHqxapCQpWzQjmumkHZ+utB/3WJQjSJ6kHNBPpB6I6fQjo/6UI6z81CPGWTQffki0HrQYxCUyQEQj5qj0Ht7qlCAGc9QgLAsUJDJGpCAOK4QjsHckHL80FB157ZQcgRkkHsBitB6mYXQjQQjkHWga5CBwG/QdDMmUHW+mVCOnyjQgQb90HdBqpB8fraQfFs/EHj2E1CBJdHQky24kJDHcVCQQOQQjzR70HwnYRCRq36QdfxHkHcoc5B74f7Qdldl0I97hJB35XOQlRHAkJFd/FB1t1DQdGycUJJrz1CPLOcQfu4JEHuRMpCTcXsQk+tyUI8ksxB1aEuQeiyLUJRsrpCPu4mQjbxd0HYoCNCPGfkQjzTk0Hku15B3HhDQjoEkEI+DJYyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ3///3wEQ/+fr8wMQw4i9pQMQ85XqwQIQ/qWBmwIQ3JGOsQMQ45DFqgIQqYCyxAEQzQJUWwgMEAAQARD/ARD///f4AxC/xMezAhC7gK6qAhCjt7pgEI2Y7ZkDEPGIiNkCEPCBhZoDEImCssABEIwFXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVCZnbTdu+moqCRYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEKz52z8Q5bnviwMQwMn4hgMQ2qyGzwIQ9Nyn4QMQnrfoLRDO4qqXAhDvrO2eAhDele4pENuMl78BELy9wlAQ9I7TlgMQqbbH/wMQo+PTOhC/9/RSEL7s2u0BEMa7gkUQmeL9VRDYgpTJAxD60bjyAhDLooi9AhCax6TqARDrnrD5AxCIkuymAhDWzvb0AhDf96UJEIeGsq0CEPqb5a0DELnOqpQCEPH993cQ6qybgQMQqJ79hgEQnujMRBCcw+slENakxW4QzqO49QEQsM+BjgIQ293IxgMQwsLlLBCx4/jAAxDw+pbcARD37IE4EKeAASQq/AdB1bVyQkW2XUJQ64pCPixYQkwnm0I7qetCQYuBQkEtg0JQl8NCTrrYQkc7qUHNiFlB7wB9QeB16EI9RUtB0cGiQjqEXkH9rtpB3nGsQcojb0JRyW1CWwMcQeHjqUHQGZZB+0emQj2yVEJaUbtCSUSWQeTvg0H0u0NB3resQehPxUHSkblB3PdAQltoBEH2WslB2LQpQgI61EIKLCNCOidjQdqCEEJRt35CV09LQjuAQUJEZPZB3REtQj2V2UJOiARB9U9WQeBor0Hl9xRCUYf1QkS5sEHb789COc9iQgP8IUI7L4VCSZ0tQgQB30I8dElBz0fTQkXgWEJDcCRCWhPCQjkDm0JK0QxB831oQjgLpUJBvwBCOKJ8QknaQkJCsdFCUCNAQj/4Z0Hd4vFCTGtuQkUn3UJIKa5CNAUbQkn40kII5SRCOErzQjg1BkJHCcxCRciKQj6v7UH7LNRCCEo5QkqxEUJNfVVCAZafQk9n+0JF/IpCQMFwQk33Z0Hb+ABB20RyQfHK6kJZBURCSfThQfBnW0HYq6FCTci7QdiL50Hk+FNB2BqDQk4IKkJM0jVB1JFaQdOEBUIB1adB2aLhQfWjXUH6zABB88/uQjSHmkHtJGBCAeRrQdrgT0JHUl5CS18mQgdEeUIGkFlCAa5EQgr530JQ1S1CC6ExQgDj90H51rZB61N6QfjuE0HlZexCVzQ2QlSC8UJO4TBB0P8rQdPo1UJAvV1CBzybQdD3TkHuxVhB+s1lQkb0xUHiM9JB2dpKQgvx8EJZRw5CRGs3Qj0T1UJCPxpCTTntQfRJlkIG08xCBA/sQgi9g0H7FtBCThpFQeeRkUH15GtCU+kSQghzvEHYMshB518VQjkoP0JBzX9B4l+TQj80VkHI/W9CSThCQkOFkEI/gn1CUrl3QeXeJUJORUZCOKn9QkpaYUJJWGFCOn8PQkqhTkJIF4lB3m8BQll2xEJVhftCNYPrQepwskHqOY1B5XUvQjm43EJShONB5qGTQkK5m0H7UuFCAhE+Qfdch0JGxh9B85XtQk4xGUIAsZlB+WZ7Qgamx0ICmNhCPIqPQk+XqEIB1FJB83ZyQgSMPEH4fpxB482HQjrlTkHjJZpB2RyJQebVc0HfGilB8m61QfJPl0I1Io5B6khDQjT1hEJWuVBCC36QQdoodEHdpqJCRBVyQgUrTEJXoVxCCmDOQk/CZ0Hg6MZB9VgFQfoz0EHxXkBB4541QkKGiUHkyRdCTCi4Qkr9/UHygqdB3ykkQk5MbkH8iEZCBhzYQlcep0JLfdBCOy07QfGIbEI5Vj5CPX6dQjTmYkJJo5VB+ynLQd3HYUJApFRCRs7SQk0V5UHO/hAyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ/+eKrwMQ/efqvAEQ74b9sAIQmu7AZRD25Nn7ARDFqLH3ARCSztmTAhDRk+HEAhDCA1RbCAwQABABEP8BEP//s94DEO2vyv0DEP/klbEDELCPkOQBEMaFoMkCEIyhrNYDEKXdxZQCEIKBoEQQwEBcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUPyRxf6d1u+gtgFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEIOg6UkQ2rm44gEQz8va+AMQu5abrwIQ4pfaygMQ7NuM9AMQnMu4XBCcg/T7AhCgwvqjARDT7pnOARCD0+crEJ3+v5QDEIW+2tsBEPu+hncQ5Jna7gIQmNi8ywEQ0vCw5wMQ48K6dBDWvILwAxDW7IWDAxDDntSMAxCzzbqJAxCE8fjnAhCc3eC3AhDT5s6VAxCktog2EKCHq4ICELHdzm4Q2LEzELqVvrUCEMf99fMCEJyn/pgDEIut9+sBELaM5usDEO7WjxQQio///QIQuJXh7QMQ39D7fRDgspOlARCF0IbZARCR69CEAhCh5f9HEPN2JCr8B0JFom1B9XyvQgadX0HgSgxCRKYsQgBTXUIBL0hBym0WQk+NJkHym2hCBBQtQdGlGkJRqTBCAtdvQjjsOkJZzRRCTbppQlOWi0I+5IZCSHbnQgYJz0I4IadCCAHgQgrVCkHypd9CUE/IQf+Dj0I1AL5CS4ObQdqwEEHeeE1CVU3bQkJdYkICGAtCCp8UQlpXckHT9HdB8wdtQjhCbUH+sm1CBj8+QlOCJUICR7VCAJfQQgickkIFjo1CSzStQkiNxkHJYCxCRzHqQdLpW0JF9MBCPuDIQd8wSkJTeudB5ZrkQkbnz0IHjfBCC2NMQgPVoEI73YxB5RIaQgvnG0JLOwhCWo1fQdTXl0JXqZFB4euCQkYEM0JEjMJB8xdjQkiIMkJTR9ZB5SvMQjiZrEJCvepCOXdWQlPH+0JIHE5CPnpdQkYuckI5/KVCAkcDQkc2PkIJar5CRg1sQkPz2kJAkPZCOj9mQfPTdkH5fllCBZGDQk0aekHiEG9B4YwhQekcREHglH5B6XIVQgk1A0IAllxB8ssaQeIRT0JTfNFCPuHLQckfOkI5M9BB7fFyQkCV3EHbr95B7QwOQjmhhUJBHXxB6WM4QkzqXkIA3wdB9dsWQkaR+UHfDF5CBYkVQdHLh0I2st5CAjlTQkCqNUHI7XNCRIuTQeNWEUI27yNCTKh1QlR/CkJLrZxB8hF9QgF/6EHx/A5CBlvTQevq/kIHq/pCSI8fQdHZvUHRIXdByZc6QjQ+XkHhY5pCPbuOQjeZIkJGqxNB9Vc+QgP870JHgzJCNdLoQlXQOUH3vplCOp28QgQOSEIDfrVB+dToQlJubkHbidFCS5pVQdTdTUJGEWxB0KlCQdg/jEJZmXZCQBrOQkCPSEJIeBBCTfBZQgpLCkHQdQhB948GQdQx3EI3zH1B1aX/Qcr7JkHiAdRCN7VhQdLhQkJFoTdB05WnQj/nXUI9uZdB/AzVQkAO5EIAup1CNKVSQjhHakHrodxCA0GEQfOUdUJMBbFCQvn1QksO6UJG3RFB79KLQkYRukJFemVCSJ6vQdxAOEHKpjxCToEeQjy9S0I9r7FCO8tmQgQXD0H7/chCO7CDQgDzN0I7rmRB1TL2QlMS+UHX0ehB3EAGQjsgZkI+CyNCUB9yQkndOUHgFv1CQGarQdWllUHfIxVCNuF2QdabpEI/lgZBynEJQcjgIUI80XBCQtunQk4050HanyJCNZ7cQgDhdkJM4kJB74iuQdXsn0JRxn5B2birQeNwP0JWgc9B1oxxQlfBKEJDxptB4i1bQkxrp0HWOQZCSIHzQdIbpEJYAQFCUqsdQdVUYkJLUuhCTuDoQd9ggkJNZ65CVl7BQfE8DTIIRkxPQVRfMzI4AEABSP8BUwgMEAAQARD/ARC//7++AhC3nMXvAxC8of+vAhDYlfucARDVycrzARCBhJAgEND4iLEDEIaBkvEBEIgCVFsIDBAAEAEQ/wEQ//297wMQ15fVlwIQu42niQIQ/JGDsQEQ2cPxtwEQ0Z3C5AEQ8LWmkAEQn4CfwQEQg0RcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUNXim9Opq+HBX1gAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQi9D7pwIQ4uDErAEQ7sTN8QMQ8YmCjgIQ74ez7QMQ3o7+PBDp2L7yARDHk7jiAhCRtMqnARCx1Oa2AhDRy4owENP21oICEOST/MQCENr73mwQu8bK+QIQmvOMjgMQoLeD7QEQkazpgAMQjvi57QEQ3cX14wEQ48WGmQMQ+e/47wEQva3XqwIQ3YSQMRDzz+20AhD+gbf2AhD/9+U6EMi+7YgBENXH1/QDEMDi0csDEO6Upf8DELrEoUIQ/+CTuwMQp8Xe6AEQku+6ogEQxNXgywEQjIzvmgMQ9/6rqgMQ//3e0AEQv5bnvAIQiue+/gMQn/q3aRC8jwEkKvwHQj4Uz0HM4yNCAOzuQjoO1kJHESpB7f12QfBOGUJExfxBzCLVQevrWkJME2dB4glgQgV+GEJXB4BCBQ/7Qcu77kJQl2hCTb8wQkcWt0JGwBdCVGGwQkdAckHrVQNCAmTyQk8vekJTDu1CBveIQkq+/0IDSsRCBbFEQecx/0H4TfJCQpGkQlEUiUJMH0xCW2nOQlMiVUJNNWFCRITLQjtBPEICgxhCRVt1Qlj2REIKrMtB4XUpQc5Z/UHpJHpCAJggQj5V2EHkFPJBzsV0QkK7j0JISylB4NdbQkmnYkJbZHJB+C9RQlRtmEJNYChCUcldQkDPNkH2Uz9CV6tEQj4kbEH92c9CUyUpQlbRZEHtxXdCAlxRQjs+d0JCDFNCAOHtQgpw3kI7OzBB2hF7Qdmd0EJL039B8mOdQfRCs0HVivJCSI41QldV7UJLZnNB8eshQjxgdEJCovtB87WLQlURhUI7FMRCVGm9Qfyvd0H7syZCByYiQgqwMEILuHxCCut2QgVgO0JWLg1Byv4GQjbDTUHm7d9B2+7MQj4QYUIGqTlCPQOgQglF3EJDYz1B2+KVQeZev0HXaNhCUUMLQd845UH1F0JB/TvdQlU0hUHPAZ1B97u+QepTn0I60LFB4eevQkx2KEI9DOFCB43ZQko4/UH5i/RCR7uLQlU2vUJKA1FCPjUjQgSezUI2I0BB9XxnQjc78UI2K+dCNHRhQjiyh0I97VZCCaRPQj27HUH8k+pB23iEQc3nd0HLSf5CSBYjQjoWdkJThPVCQ1QmQddYYEJJzSNCWcp8QgEIZEJVuxlB+hfGQjiP0EIFgyRCN5BdQkBtLUJC9nlB38t7QeZiYkI4aYFCOmPOQgAEOkH9B6tCNhoLQdUmO0JERYxBynrPQlMVtEH19ItB5PYJQk0bakJXeztCVYGbQdQY80H6olpB+Bd5Qe5v1UJBrlxB6iz7QgJaMUJJMh1B1dhSQfJFw0HzKz1CQZSkQdyCyUJHLV9B70RKQgjZoUIA78xB1aLcQj1ALUJLTXhB3XlJQkht1kHc+dJCUSWQQk2a+kJHR3JB1/JkQkO4K0JENFNB1yFKQdiH6UHPnQ9CPoG2Qkx0ikIBCS9B0ib7Qd+Iw0Hm1b9B0f05QeZv60JGKX1CB0ARQcxYrUHLq7pCVoONQkPDr0JMGq5CVNa4Qk9va0JRPyhCOpG3Qkmnp0HeV1RB5mPYQkN5Y0JQ8gNCWOWqQjl2P0I55tlCNmGiQjgf/kI4eQJB8Rv8QkO3UUJNL29CTgnzQe2GSkHTUFBB6UzkQjSfdEHhXs5COpI6Qjm0wkH5ADZCQGs6QkfVqEHSQW9B3AxDQeMEg0I9V89CNJmrMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEP/9/fcBEL7t/ZcDEOzz8j0QwPznhwEQod3F/AEQprHUtQIQncPVgwEQn9SIpgEQYVRbCAwQABABEP8BEP/0qfcBEPyd/5cCEMrRpvYBEOC+jREQg8qA7gEQuOTX6QIQlIDi1QIQgICMpAIQwCBcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUNmOpfSvhua3nQFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEKmNzVMQ6NDrpgIQvb6x2gMQ05+b0QIQl4L3hgMQj8/ZrgEQzsuH7gMQvrjjuwIQoKPYDxCqoNCGAxCU94jiAhCqt5s5EIbW7OECENbPnYADEInDjeABEPXf2Z0CEKK+1I4DEOHiqEAQ0Yfo+QIQpfatdxDe47TcAhDr4r88EN+cnP4DEOaf3swDEMTzuRAQlNrefxCa563/ARCSpd7EAxC76fPIAhC5tvDZAhDzh5AfEKH/nocCEMSvuJoCEKT1hpsBEIbZ5ToQxfLXQBCvx9HsAhCf7buEARCi6tSEAxCL/+q9AhD6sN+wAxCI6rl5EOXSASQq/AdCNtHGQjilN0JFVkJB9ubDQj8CvUJFeBdB5L2AQj8kl0HMPDVCPawKQjqo3kHi9shCS1WxQk8iSEJYjIlB8WkTQf/6qUJQ/GlCNpO5QdB3YkJD4xdCNAQzQd+RiUJGLElCUe9TQdk8EEH4aR9CUqJsQjUzG0IDOAtCPnPuQgH2rUI66mFB7QTdQkqilUHh64dB+7SwQd60VEJB/DRCU81RQgX7GUIAnVpB5/BrQlpdc0JUn+FCSHzMQlQoJEJStmJB35CtQjueOkIBTH9B7fCFQj4EnUIGkVRB5u7bQko38EIBmARB5xJNQksbfEHQGItB9IyHQereCkHULVZB6oAhQdy3hkHp1hRB1e6dQkROkkHp9PxCSbZ5QeM1pkI5xSFB81IrQfWjPUHoTc9B5cs7QkneTUHUMPtB3T0rQeAuJkJCQ7xB2MFwQdWq80Hh1PJCQokGQfc+00HdzrFB3NylQf8OJEIB+8ZCREtbQj5fMEI7J7FCBRPAQkDoM0I7k1FB+pi6Qkhs0EJNh01B54wJQjYkvkH6mW1CRDzaQk5r9EHuaR9B5oYbQgg60kIANpRCUvpvQeUHekHR7VhByoiRQj4oOkI2C0JCPIorQjmbokHVbItB1oaDQftkDEI6eGZCAGs9QdXSbEHvGVFB3E/PQfsliEJD2H5CR3PlQkdw1kHx6xBCO28kQeo5O0JJ7h9CQsxPQc2/Y0JBxKRB/314Qe95DEJaPARCC62wQlv8SkJG/H5CRc94QkQl4kHyIYxCBUBbQgv0hEHW/1ZB0kgeQebV0UI18SVCBF1AQd4iWkJLNBtCRAEcQkNP30I+nsJCAH7mQdRcO0IAkq9BzxuzQd2DGEI0ISVB4mN2QciE4EHSs5FCSnYOQlXVFUH7M/hCRDpCQlB00UJWYJJCTFmtQldMVUH4G55COyiQQlPGQ0HijVhB2KW4QjocxEI+HTpCNbS8QlTikEIB90JCAthlQeuiWkI/HcRCWtpSQk3IuUJPxd9CS1IAQdr1dUHnX5RCCtloQgF6VEJMU15CB2JmQdrMXUI4+fxB2xVaQjuQ+EHMdd9CPBvVQe8dbkI4oB1B79mHQfiHdUI8p0ZB/ny6QkDaykJbmmNCTLKUQcmQ5kH+wvVCQqGXQkxQ70HsTf9CUY6cQjtlp0HMG6lB5Y1TQjue+UHLYT9COinsQetRm0JIjNpCOBI6QgVBNUIFfKlCA10aQkKzbUJO/MxB38BrQkaQeUH0VI5CWZETQeQDqEI2g2xCP8/AQjZJsEI8bO5CBwSQQj23j0HPTLFCTulZQlXhsEJa9d5B/kRoQe7j7kJH3xRB8cdAQfbjW0I2OYpCW9FwQlSgZUHgazUyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ//3PPxD7/+/vARDj4qC8AxDXib8CEPWo8JQBEKei6bgCEKqRlbkCEISggpwCEMBlVFsIDBAAEAEQ/wEQ//zj/wEQ6pbv6wMQh/SErAEQ5qi/JRC+5JCnARCS4cOsAhC5ivPoAxCFzsgMEAhcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUL/Kr+vao7+2FlgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQzZOyogMQvcGH/wMQ6bnwExD2iNaLARC0o4TIAhCE16NCEIeBiFwQtsv9wgMQrN+vhQMQztWK7wEQsKylnwEQ1Yen8wIQqPKF/AMQyOyttwIQ4pPZ3AIQ4JLRcxDojLmgARDj/uuQARDB1PbsAxDp695mEMPGlt4CEIyF0LACEKf5rLcDEJbq79wDEJuEkS8QpvSDZhDc9sIEELbr0e8BEM2BlvQCEMyD7sIBEKCKoKkDEJm8lJoBEOrqgwYQkpvPmwEQpqjn5gEQ5q3vfBDC6dKoAhCbl7wHEJG32L4CEMTm9AUQsfKRkAMQ0PqUuwMQwQQkKvwHQjSeuUIDOUlCCs8UQj/sdEIIxv1B0mzyQkkO2EI/6xFB548mQkSTj0H7tmRCNclwQlkfFkJZ23hCPRRHQgOCvkJHUJ5CSC5CQgCf5kI0nsBCTXsNQkSrRkI1GuNB3SQ1QfmDL0JKsaNB6jpJQdmJ5kHtVsdB3Ke2QdjemkIHD7ZB5hfEQj6WukHtqMRCAXIhQlLZskHsKjpCBbqzQcvGT0HjW95CQc3qQgAeHkJaH51COoY8QgGdDEJCHxBB0FJTQdX1nUJDuw5CN++bQk9wlEII6UdByR4qQdIvE0HmnCZCBMdBQk7RBkHWURNB/iiuQgsUa0I7FVtCQifLQcjDc0HeFKBCOVh4QkXHi0HR+GpB64oBQkt82UH/oHlCPf5gQdp5SUI51s5B0XGuQeIww0IK47tCVXgHQfVh3UHj3PpCS3SQQgVJ7kHyvtZCQrMgQel+iEJEhxRB1szbQdRsPkJUlkJCS8fbQgMmZUJVpwhByCblQfExd0I/53JCNabPQezW4kJZElxCWkL2QgHLZkHbu9JCAaz8Qk1vNkJXcDRCTFOIQk/lwUJBOElCPnPaQke2bkIIJRdCBQbfQkzswUJbELZCBu/BQjlnG0JPYXNCQmSxQj890UJJuwtB4IwbQkS45UHnA8BB/5k8QknUIEHN5ShCWj4uQgYFKEHoaJpB9hucQeFZIEJN4QBCOf/wQk4q00I209xCOzMmQcn7+kICVhpCS1n3QfHISkHvzyNCPZMoQkuZq0JY6ZVCQoAoQleLEkHxqelCBZeYQd1sRkIFr6tCAn2KQfInDEJGuEpB91yKQlTdPUH721JBy1qOQekPJUIKOGhB2PufQlNuT0JXPgpB9EM0Qd7RwUI7pr5CRQNWQfbWAUJb3LJCAmqQQkVnTUHqMcNBzyeWQlIlQkHa5qNCTOFRQfO2AUHcLFdBy/DHQlLEPUI6eyVB2Di8Qd9wDkJD1gNCNpzVQgTik0IK8L9CBA5jQju/fkHbPOJCPE7xQgFSzEHxRL1CVrRgQchCcEJDxu5CQ7DpQj8kz0HSusVB0zuvQe3m5UIHxqNCVC18Qj/mr0JDHLxCVxMbQfTBbkJPiXNCU5JmQlUHb0JOZgRB1IeNQfaJh0JDogBCT9IFQkRBx0JJrG9CRvLrQdu+9UIDQmZCTFsxQkTg2EI0tkhB/oQyQjiaEkICsxZB4+EYQgbc6UHg1tdB0j6pQlFgPEHyCTFB7LuvQf3KjUJP8PZCSxEPQePbyUHrzXxB9C0jQegN0UJA5cBB2k2pQkbBgUI4kJZCANc6QgqWj0HWoh5CUs6FQgFHXEH2JypCREJJQeUJTUHTMlpCR6ObQjmpwkHcd5MAAAAAMghGTE9BVF8zMjgAQAFI/gFTCBEQ////////////ARABEP8BELij86cEEM7Qq6IEELnP6qkEEOjAv7ECELLlvJUCEP2c6twCELe+n7ECEKDI9O4CEIuuxeoCEMfw/+QDELHV96wCEJrriZ8CEM+F2NICEJcJVFsIERD///////////8BEAEQ/wEQhMagrAIQpsWEoAQQkoDo7gIQwezKlQQQkq2YlQIQ/NmpnwIQsPnarAIQmPb6nwQQgov1pwQQho+/4gMQgZPdsAIQ4r+algIQic+YlQIQlglcYwgAZGgAcwgAdHgAgAEBPEEAAAAAAAAAAEgBUIqgj4eOg9WNswFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEJud9REQ+KT40QEQuvH/sQMQtfbMqAEQxoiK8QEQkdTVvgMQwNGk1gIQksiZfRCR/rFkEOf3lIoCEJDZxNwDEPyK3q8DENDAuHwQka7V6wEQg4K/sAEQ6M7c6QMQ/+Dd1gIQyfSbTxCB5oqDAxCpx4xSEMigse4DEMvC+0gQqt3SgAMQhsfR2AEQjZ/O3gMQvfDJvgEQzLiqiQMQzZattwIQkvueowIQi5fBmwMQq5r8jwEQ/6//vgEQ5MTLfBDzo7aJAxCvr62LARCc2uPcAhD2ser0AhCG7YP2AxDqypcZEOqKp/ICEPaKm9QCEJeKiasCEMMCJCr8B0I1uqVCCGPEQkQNIkHMu9VCPhA/Qkax9EH1zNRCPmccQgSGQkHYhZBCSc03QlTSjEH9iTxCVjA4QeTpu0I+R75CPkluQjucZkI/hMNCQboYQgFZvEIF8FZCT+btQeGCDUIIUmdCA/jXQkFPpUI3krZB8qYtQjn4TUI/gpFB6drfQdw87UHwrc9CT6ElQlJ3O0IIW4lB4HXOQfWdcUH+045BzBmlQj6/fEHidFlB7SbQQkOcnkH8jBtB8e6TQkJ0b0JAR2FB+0KwQkGNgkHzEwdB2w/FQdlm1UJGUIdCRXwEQesfFkIJjCpB6PUDQdWibkHi6HtB3/CGQlBAf0HbxV5CO9XtQk7NR0IBBZlCS2XnQk5jC0I6Qg9CN7jHQfi740H57HtB6RygQdLZPkHOgmtB5kJsQjwmKkI+he9B9IfDQkG1WkHiS7JCPtaHQehI7EJCf99CB4g+QdzIwkJPcqRCCqT6Qknk20H5O9FCSnD+QkEONEHpuTpCTd++Qd3NbEJT4j1CV0JNQdAuREHNY+xCSHFfQk/BbEJCiddCACIhQfims0JTeGdB8i7hQep/VkJKJGRCACCrQd1DWUI5XalCAB+5Qcsr/kI/sK9CTPeHQfdai0HuDFFB637XQj2DU0HVG2FB8jT7QevKYkH4J/RB6ylEQex3zUI4yd1CARLMQdb8aUJFwIRCOhnoQfELnkHMpWFCVg28QkM+kkJH++xCR4iGQeXot0IGW3JB1vLUQdvCVkHeGWVCWjPwQk8CTkJVb95CCe7dQkR7vUJC1zBCTV4cQk7Y5kJStbVCQTj4Qge3LUHrIL1CPfKIQjqOhUHT47NB0I2HQjipuEJCXjZB4keKQgFyiUI/JYNB3BDGQeueukH/69xB4AHsQjpK5UID2g5B7ixDQkWf0UI2nLRCCnnDQfS430JTfKpB1ShPQdDe3UHawydCQq4/QlERmkI6mOFCSB2YQku29EHeK/NCWfdgQfQovUI7sdFCUomMQkcAKUI77QZCW4+yQkge0EHrDUdCVpDGQeBPVEI/hBFCOYqkQkcDhUI05zJCRni6QgNO1kI65tNB+S4CQgEhNkI3CaxCQH5HQkbx9EHZqtRB4QUsQd7X40HbeqpB+NpYQlfj1UJKONZCWL5EQlSgZUH4oipCSjsuQgWFIkJObaFCRyO4QlIQbkICdpZCBHJ4QlNIHEI5sa9B8rRjQj/Fv0HyFUhCPuzsQkH3RUI8js9B4MoEQj8FtkHx5UZCUAihQlFqC0JUOw5B+MHjQlV6HUHq3R5CNfBwQkOGj0I6MZxCAgDMQkPOj0I/ueVB1Ir5QjpDdEHmTPdCAsEOQgC2n0I0lUxB0HVKQeht1TIIRkxPQVRfMzI4AEABSP8BUwgMEAAQARD/ARDv/v/3AxD//96xAxDr09bvAhD5kNrwAhC49J5WEN60uiAQxtSQBBCMoOQCEJYIVFsIDBAAEAEQ/wEQ777+5gEQv/+flgIQ86vwCxD8tuutAhDd4KEKEMaAg7gBEIbt0AYQjeDjARBiXGMIAGRo////////////AXMIAHR4////////////AYABATxBAAAAAAAAAABIAVDSvez3u6Gy3k5YAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEPqvxPgCENiy43QQm+aYugIQ8fbKsQEQqIa4hQIQvbaD7QIQuYOmuwIQvc/q/AEQyLDAvgEQj739uQMQvoS5gQIQw6+SThCsoefcARCSuYcfEJL59oICEIHS1VgQiMzysAIQ5OCsugMQlKqGzgMQhpfCuwEQg+j+mgMQ8bjy2gMQtdfHDxC+z6avARCt1fbbAhDY/KLcAhCm6v8tEI776dgBEISO4toBEKnAuEgQtOWFpgEQzpPPtQMQxb+EwAEQhcrncxD16KSSAxD3i6SVAhC74+2FAhC+sJWuAhCgg9Y4EP2knLMCEPfB/RUQpO7lWxD70wEkKvwHQgoEIEJSiI1CNk2VQc4G30I9XwJCUFvSQf5uF0HUTHhB3doOQkSiU0JGXxtCU5K1QlcPhUHrr9NB507OQe7Kj0IGz9xCT5aLQlR37UJOkmRB2ibrQk1uOkJCk0RCUGxRQglr0kJThH1B4BnfQf5TeUHs/WhCCJXtQj1JGEJE3pFB6wCYQky6+kHhSvVB11aNQkhHuUJTLkRB+qn7QkM6JkHY4PxCU0b9QlUNjEJZnTFCTVH4Qk114EJYOfpCQPRhQfgA20H6ecZCClRBQeQ/6UJFTKNCRgu8Qj+QIEH3XH1CSdMIQfCJbEJISH1CU0GqQfV1qUI4o+5CCsNTQeQ5rUHvrotB66BqQltZXEIFxGdCQpW/QlAGjEHl2bFCB2eaQf9/UEI3ioFB5/AhQj9Xb0I+93ZCTOIsQfflR0HnRN9CWXY/Qka7t0HM7Z9CP30IQc9ZckH9/xpCPiGhQjW2TEJGY4tB2mBxQkknlEHIl+xB+sD3QdwjAEJSZ7lCU9JiQgkHrUHwv8VCNa6kQjzdEEJTaMtCR9mDQeS3I0I+KPJBzFADQc0NmkHa6NFCTCUHQeTqakH/If5CUTfsQlEockIArIhCAiFBQf2bqkIBxLJB61XiQkelPUJTg0BCVAq7QkajSUHnljFCSom7QkOhhEJMFV5CN4bTQjvCxUJTLOBCUgtyQkWiJkI9Wy1CQKQSQkAxc0I0O1dB4bR2QkR6wkJJ8oNB7ptwQcyPckJKvLdCPWspQjQ+QEH7MMNB/tfjQll7/UJTdt9B390DQlS+yEJRf0hCOdX+QdeOoUHTI31CRsA9QjjS10HbYdZCT01SQgKlAkI6KyFCTRw+Qkc/30JOc21B3Sl8QfF7ikH6CwFCOy02QfSqLEJG3y5CQaVLQdkYl0IDJWFCVDEpQkIyfkJFo2JCQWs4QkDa4UI1uSxB/irwQe03rEH3pnZB3UkfQdzodUI2m3RB7+d8QfcDP0HdTghB3QNIQgChaEIDabJB7Nz8Qjf2YkJJ6S9CWMGsQkg02UICFJlCV05TQgjKgkHfRCdB4RyKQjxaqEHsIXZCQFJKQjp4RUI7HcxCWxXDQj9TvEI/JzdCT38+QfDhF0H5NfVCRVU2QkPhgUJODNdCTlaSQgQ8X0IBnexCOZWJQknIOUJF9hpB5fotQdSNdUJHen9B8qghQfVC40JA89dB7qOuQeMszUIAMTZB9rEPQdLC2kJLdXpCChyOQlpOb0JRZkVCUVI2Qj3m7kI06kVB1VqzQfxtrUHyhtxCV6XYQkcInEJa+45B0ZC4QdXYCkI8HcxCO0QCQgWMrUJSDa1CPCY2QdXI6kI/dkFCToPNQj8vcUI5TptB+vAbMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEOv37/0CEKX6l9oDEPqU7rMBEKi0+n0QqMbFHhCS1r26ARCg0efJAxCDh8DOAhCFGlRbCAwQABABEP8BEL//rn8Q546P+gMQo8zU6gEQuqDzORDl4MQ3EJrF/zAQuaDlwAIQi4aJiAEQoAhcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUMyfx6T5l4KtugFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEIeBwIADEPmazfsBEPSV39ABELPridkBEIiRnGcQjcr/xAMQtqzlvAIQg6e32gIQwfzEPhDar7TwAxD4/LLYARCl6qOwAxCqlbvEARDxtNeVARCfsLLoAxDh05O4AxDi56iFARCulvrlARCpx5V9ENus59ICELekr6wBELaXtM8CEKHthyIQgOOR6AMQ3szO3gEQwrvy+gIQ7Km9FBCymIu0AhD6ptK8AhDVy5/OAxDzpaPJAxCyxILRAxDgxLShAhDXhN6MAxDfiPyBAxDfnrpvENCl57MCENaAi5kDEI683ecBEID/vb4DEJ2FypABENqS9LoCEPChASQq/AdCVTGNQduI+kHgMo9B75aQQll4uEHoarxCNE4nQlhaNUJbopBB48iKQlQ5qUJTXbdCCtdMQk+ffUHXVONCOo7CQk4F40JLo0FCUbn8QkyIjEJVG71B/B2OQgeZzEJDldlB0FxMQexqWkH7qhxB//1rQgHbnkICuylCVYlvQc5e2UJNtgZCViSAQlM500HVBDdB5M3nQluWOEI9IkRB0RWfQk5yc0JY7dBCPd4CQgm7FkHQQZFCQvZtQeFsKUJX/D5CWj7jQedbPUJP80FB/m3FQkTv3kJDydtB17ktQcpcfUJLt1FB6a6cQc6anUJOoBBB5XqhQjkHFkI3ulJCVpgsQgKBvEJHHxNCRepmQljuuUJTRq5CSMhtQgYh+UJIdcVCC3EYQlueh0Hpv/9CO278Qdnd+UHcxHZCRnx0Qj553EH7JW9CRjguQjhuGEI7yXVCOMCCQeYb6EIKUJNCNZ6hQdynp0IHGH5CRJ6GQkPtWEIF3fxCQrDHQf+cVkJTXw1B8PT+QkD8a0HjcvxB1riLQfoJJEHRNn1B1ex7QkdhqkHJpTFCR0MRQklE6EHRYzdCQlWCQkpDLUHQsjVCCl9aQdewpUJK3XZCUn1UQd39EkJFHyFCRbp6QjzXvEJInk9CNz87QkpJUkHlKtpCWsL5Qf8aPUHYg+RB5FDSQkzuzUHb3m9B4827Qf6ARkHzMg9CSfNvQkDKyEJJ0ldCR01gQdZxnEHQUqlB3Ql0Qe8LA0HtwmJB/yQtQgcuDEHJeF9B2owLQeSrrEJAoNVCPPbXQkWns0JDe/9CCAD1Qf2Y5EJKIShCS8FiQlZPz0JO9EZB1lB/Qkw+FkJOuSxB2+XVQgCOpkJFlbJB1qDyQjq0FkI8Op9CTgUUQd6yUkJDQvdCBPl9QjeHp0H1L1xCOgemQkNRv0JJzpVCT8O+QgLHUUJFP8JCWBEkQgMlr0H8ikpCOTGoQjhbzkIAei9CUYFAQftMw0HzkN9CALTMQk1uD0HY4uhCTBk7QgeCdkI+UnlCBAbHQkDHukIBQgdBzBb8QeGkTkHo5rVCPKgqQeisBUH2CsZCWZ7wQjbCh0Hx6mFCOCCGQgKjfUJE/iRB2lkmQkTizUHeIstCQANlQfArXEJB3QdCRTcEQfqNx0HnES1B7EeyQf/mNkH7IL9CVchEQjuKD0JRx4BB3vGiQeA9RUH9K7BCRhrfQexQ8UJL7T5CCkHKQgJ3eUJCH+dB/z4wQj9nmkHsJyNB0WXjQdlackI9XYRB8tQ3QgIWjEI7hURCUMRtQeLiUkHetXlCO/zfQkX3z0JJOUtB6ehhQddCAkHqm3ZB6cmcQj6ku0I01SJB0L2oQkMjT0HaNmwyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQv/v2vwMQ4fav7QIQ/837ogIQn8K/6wMQ3qKQIxChoJiPAxCj6MHBARCchOfXARCRRVRbCAwQABABEP8BEPf3vj0Q4O7DPxDzhbiQAxCbzIf7ARDUouP8AhDriZCPAxCC4NoIEJCh5QIQgDNcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUIqBgcqS/J7ysgFYAGAgaAB1AAAAAIABAJwBmwEKAzIuMBAAGIACICAoADAAOwoDMi4wEP8BGAEjCC4QABAfEP8BEJP3idYDEIO1t+0DENzjytECEJz0tkAQzarh1QIQ26XnZhCOnOHTAhCj2M7dAhDiq8bMARCwjdnqARCG+ZaEAxDgwdbEAxCtwpwUELiYv7EBENWS/dsBEJ7yv6YBEODT24gBEJauo0wQ+qfDQhCi55pKEIWywbQBEK2f2vIBEL7jgM8CELu8qN0BEJ+C9OoBELna0YUDEPrSmpUDEIP2lYQBEIaNhMsDEMqC7Z4BEOSuwrUBEI6F2ogDELPl8+sBEIj7gI4CEOuvlYQDEIWu+N8BEPG8u8ADENSrllgQrdWdShDAluD6AhDz6YSKAhCb2MPAAxD9fyQq/AdCNVauQgX5mkH9/G1BzAQaQevT1kJb2X1CQIv/QfGRx0ICVDVCPY9dQgPn3EIBXmBCBHBOQd5JMUH39WdCRfxpQkq2s0I2LpNB2+8hQgEJ/UHjcTFCNEJKQd8Be0IA3tlCOCqIQfmZ7EI1J2RCC0t2QjjAOEJD+h1COtYuQe0KHUHfVZ5CS8FsQk97dEHwPiJB/rudQgI5y0JCfAxCBwthQlqgb0I6sJhCNSjZQkaYXEJBu2hCRHYOQjQIyUI4bgdCBSNpQkQJS0JFS21CWs8aQkI/JUHqYbBB0iv2Qluna0JBgVRB7485QljeOkH2F2xB0k4rQfx2XUJUzntCRYk3QdJzR0HP1LBCBd5FQkJGvEIAgKJCQ093QkyWTkHaXwhCNicCQlBCLkIFl9RB+1EtQdZawUJaMdNB3SQpQjttH0I6Ad1CRMoVQfbuEEJBgupCUFUcQfV2VUHLNIZB6sEdQjxGeUJKc/9B5BouQfr7KkHZ36FCPRzfQgUV70HLZO1B+FDXQkt1m0HdcnxCRtgTQkiwakHXrT5B+sODQgIfJEI7BktCVbiJQgu8tUHwir1CCIqEQjquJkH304pCRvKQQkYBGUHLsCxCA2BgQkAfMkHblkhCTW7vQflxhkHRBdZCQCPfQgYB4EIDVktB5sGFQj/bGUI4mAZCOIzJQkNiAEJIyRJCRJMTQlfkNEJMW2RCCe4qQlMsm0H/GQtB6RhtQdGHpEHXfoRCVrbGQk/BRkJAQSFBzCJpQcyXIUI906BCNTJuQdrwukH8y+pCAqPmQj4ZgEIGrdBCVN6WQlElZ0JQSMxB/5PiQjxKCUICowtB7wVbQlJ2VUIEMExCOyYTQeTIlkJMbi5CT0ocQeViI0HJfIpB1QVlQc+UB0HK4qNCAcLOQehdckJW9BlCA1XKQfg+AEHoAP5CCjuwQeuAZEHVRFZBzcPTQjmBQUI8h+RB133iQk/DAkJLwqBCS0HlQkZoEEJCcKtB8KR6QeL6XEHfV1RCOLyeQkheA0Hw6c1CUBCbQlK2R0Hq23tCRSUlQjpUMkHeeENB7xGxQddnY0JMJBdB1/ViQdjBi0HPholCTFyCQkEu4EJNbmJB7zsiQfXbXkHsFANCQkGDQfkBukI48tNB6eXBQkmea0JVdR5CRpTGQlbajEJQSJ5CPyM2QeTa0kHbKEhB1DqrQd7SREJKBZdCPIFoQdU7rUJTU+pCSDYQQlfUjkH8JEpB6fT6QgQ7zEH44uxCBmRPQeZi5UH0E95B2upoQkq580JMz1BCQLDDQkExqEI51e9B5PtfQc6VV0ICfFBCQviZQdmKrEJLzLZCRd6BQlODw0HfV1pCQjeNQkMrGkJE/XQyCEZMT0FUXzMyOABAAUj/AVMIDBAAEAEQ/wEQ5/2f3gIQzd212wEQhPXDiQMQwK7ezgMQppizVBCj/PBkELXmwA8QooangAIQgQJUWwgMEAAQARD/ARD///2vARCdh/BTELe94s4BEMD/294DEKODoJUDEILk/+ICENfq6pMBEMCJphAQ2wFcYwgAZGj///////////8BcwgAdHj///////////8BgAEBPEEAAAAAAAAAAEgBUNDdrfCtm7aBIVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQwo2auAIQib/77wIQ4eiV+AIQiMn9gAIQqr3sXxDOz9a7AhCinp/PAhCZsajSARDt3ty2AhC6s4rDARC3isniARDjwKrpARCv7cF0EJzz5eMBEJy81P8BEMyBl70BEJeeg6YCEIaWo2MQ3Yf3cRDpgoiwAxDK+IneARDQneqdARCG8/rlAhCKr+bNARDgma8EELSxhakDEOXB+coBEKXsw/UCEKjkhNMDENPeoLsDEKnbt8wDELLg3/sBEKD8kAQQgp+Y1QIQxaamxgMQwZ+PJxDVv6EcEPawvfoCEKDU5PUCEML79dICEL3amMQCEPSqxjgQmgckKvwHQgZ9dUII9ItCN6sjQjgOn0IGMSVCQWgVQlvIp0H/n+hCPeckQlTLPkJX+jlCBtKwQkhpwUJJNnNCPSfKQgKcukIGJlBCVfkgQfj2gkICIhhCUb4xQf+LGEJAlmVB8tETQdyGc0I4qV5CO/muQf5YxUI7MgFCV0VsQgHkjkH1GpdCRtlLQls6zkJbHzdCRHfhQgqGwUI4sF9CRLj8QjfWEUHW2VZB0ABYQjrivEHeFYVB4mYhQgZdd0Ho8blCQj1GQlrdPEJan8BCPIemQgSUyUJFpU5CPE4zQeFWFEJKocZCCR2mQjXPakICddZB1OEzQj8aCkI8kNlCOTf5QgNC5kI0QsFB+K5nQlFT6UJH6ixB11oOQkaCtUHKyj9B05XOQj1z9UI0TF1CQpMZQggSMUJRrItCU74cQd0wOUIBQ2hBytz4Qdmdz0JAKLtCAQRTQdGKcUHSV55CNq4jQeOwq0JBnyVCStQEQdBLP0HZarJCAyCFQk+D80IHWuBCUT5JQllFYkH7MnBCNoYrQkH6LUHkgK9CC9ClQfvaUUHp/PxB3jK4QjQNdEHxAQ9B5NhDQkTkyEICdLhBzrB2QjvKSEJA5y5CTNs5QjtseEI5e+VB2wteQdxOTkHnNMlCSAD6Qd9yVkIDr7FCSyxnQgIUU0HS2Y5CTWI6QdLn5UHlqfRCOYmAQjh8/kI/T8RCPrZfQffYjkHsgH1CBkS3QkDIMkJJOjZB7dAaQeRKk0HrrytCRh8bQleF10I5LFZB6d0eQdhVmkJIOCNCSbwMQj1RWUIBeKdB1j3+Qe7IQ0I488pB51uqQdhT20IEmvxB2sXkQkB4hkJG4F1B52OMQdaLr0JVc7RB5/20Qkidp0JFhQpCTpr1QkAT6EJBy6ZCSQhNQgAzzkI5DRpB1qUgQc5ShUHqFyJCQUkOQjlZjEHlVRNCPAZSQdds0UHxf79CPkyKQkB2XEJSrO9CTnuDQkwfoUIItAdCBPM8Qe0o30JKgFNB5XSvQkB0NUJMS+9CPbqUQexEC0JMjGpCQeN4QeNzpEHUVFVB2aayQeA7akH8Qv9CVDp6QewOLUHhnVBCQJG8QjwJYkHTq9xB3dr8QkrHp0Hd4dFB5GJeQkuIBUHqhMNCTh+PQk1a3EHlXdJB3P04QllDL0Hr6x5CR0VNQd7WF0Hok09CUa9kQff0H0JJA7VB2G9gQfTJn0HoqvhCUbopQfLGPUJNHGRB5DHhQd9PT0JK9TdCVr5YQfbCKUHQn1tCUfOtQgIZ1kJZBS9CW9XiQkLjXUJAv9RCRWCOQeBBK0HmS9JB76NYQeAe20JRZtJCOvxJQklvREJG4FFCSOjcQewfoEHe7DYAAAAAMghGTE9BVF8zMjgAQAFI/gFTCBEQ////////////ARABEP8BEN//j6oEEM2nleMDELyT3c4DEOzq+6wCEPDS0qcEEJznyukCEMGJ5pQEEKqYi9cDEPSVn5YCELLU6pwCENn4wukCEJ2soeoCEKnF+s0DEJkJVFsIERD///////////8BEAEQ/wEQ7dLE7AMQ2bew4gMQza7brAIQ+MXRswIQlcDRlAQQsNeT3AIQ442zlQIQ+Ivf4gMQmZjglwIQrZ+00AMQvseRlQIQ2oKwkgQQgK/4zgMQxQhcYwgAZGgAcwgAdHgAgAEBPEEAAAAAAAAAAEgBUKfc1JW9w+7pT1gAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQzcb97AIQl6qhKhD+wf7iAhDF4OecAxDCsoKxARDNoO+AAhCW88nFARD7qLPyAhDshOzRAxDxq4FgEI6wt5gCEOGpYBD335b3AhDunsRLEJ6gsbABEJvgkZADELHdjnIQ6bf64wIQ/Iq9/AIQrd7h+AMQ3LC01AIQsKqMExD8suL6ARDNoYlNEOSJ2pUDEJ/FneUDEMzTtZgCEIWLmOcCELD/wHgQpqbYpwEQxPGBtAIQxpi0HRD8hYroARCo5swSELGvvl0QsZi+owEQuum9EBC7r9/mARCJiN+9AxC93d6EAxC+yvizARCT3d/iAhDWByQq/AdCTf5rQd5ZmkIH2e5B7brbQjTemUHdEs5CU/6zQgOcMkI+9MZCAIBAQf/eRUH0+JpCBqHiQkNnrEHuevlCWtTHQj9SK0HlsBdCTVm9QdBiJEHu2dhCNJbKQkncTEJTpm1CAHQgQgDjnkIDod9CUkxGQf1xREJG2sVCNo/iQd4ZQkHiPjdCOp/3QjSpdUIKHo1B6OFIQgG9z0ICleJB/BixQk9NakHTVV1COUbeQk/m1EJU5P5CUfaeQgC2OEJGKoxB36pxQj1NC0IAVIhCAyw0QjsIcEI7qypCVpMGQjnf70JL5L5B6J7XQfS2a0IAbrtB6kASQdHdV0HNmttCRaz6QgIoxUJQvQVCVUYKQkOelUJPZchBzqGEQliLoEHYagZCOKtQQlXp2UJDPnVCOmoxQgb7pEI4kllB9rPNQkI/J0JOJalCAliUQlY6E0Hdig1CAJ1DQde9tkIF54BB1rvBQdNJWkI50zlCRiFVQdqa8EHjeq1B7loAQe/mVUJZM6NCTjHLQjkplkI71O5CQimXQfOmhEI+JhhCQwAhQjiHlUHo19RCUCfnQjije0HJnOVB1LtHQkfOXEJWeXdB2hP5QedZlkJX7sBCOyODQjSDX0I/M5tB0wqAQkxdlEJKM8hB7yETQfho2EH1gYlCAZCyQeBSPkJCc1VCAgmPQkL1u0I4bd1CAy+kQdV5c0I4bXVB2COnQlqJQkHVv4dCUfs5Qk9EyUJQiC5CUDo4QflScEIAzb9B2j1wQckTDEHZyzhB1dz+QkuCm0JRmGJCPtpLQj6hH0I7iN9CUQJuQdMT60IDMDZCCLhFQevb8EHySPdCCK2NQggP9UIC3g5CBF+UQdbuuUJYZTFCWN86Qe7AnkJW2kpBzt46Qds2REHMLNVB20ecQkTnHUI3uEtB5nmLQjsCcEJKmzxB41BPQjzApkH/JOBB/0ahQj4i0kHeZ/tB4Y3jQfn860HqKXNCAYgQQeA5dEI6HdpB/ureQgLkeEJCBg9B+Z8KQgdFKUI6qTJB0ZqHQjZ8wEH0lxJCR4AkQgI1PkHc/LpB6rk/Qjy1ZkH8NV5B/EjUQj74t0JRkaNCQjOdQjrnVUJAVI5CPTcCQeQh1UJWz1lCVnOOQkgPREI2hzhCSjtbQdV8kUHUcf5B6/i8QkUQyUJI/0xB8yBdQd4Vq0JTo3dCVU8fQkGmt0I7RtVCPJIGQjef+0HlRwlCRMR+QfgvIkJa8Q5CQwb8QjqLCEJBrLRCQmRkQj7jqUI7s41CNKPlQjpb/EHiqkFB9urpQj9yskJC32BB/8kvQkDwU0I+SAtCOyZfQeaJvkJArCVCRK3kQk0SLkHpFXRB8UCmQeAy+wAAAAAyCEZMT0FUXzMyOABAAUj+AVMIERD///////////8BEAEQ/wEQzZrSkgQQ2Z+hogQQybGeoAQQprrrsAIQ3M7k6QIQ3+Cc6gMQ2fjI3AIQ7KLH0wIQgMmZoAQQlKjIogQQje2j7AMQpvu8sAIQ7qCn1QIQ4AhUWwgREP///////////wEQARD/ARDlsY3WAhD24NbkAxDHkIHoAhCcy/fkAxC0qYnWAhD45N2qAhCOtLuOBBDujd/iAxC85LDjAxDVvLviAxDkvujuAhCVhYzxAhDSyMDpAhDFCFxjCABkaABzCAB0eACAAQE8QQAAAAAAAAAASAFQiYivzqi406eaAVgAYCBoAHUAAAAAgAEAnAGbAQoDMi4wEAAYgAIgICgAMAA7CgMyLjAQ/wEYASMILhAAEB8Q/wEQ6Myj7QMQr+r91QIQoJ2qnQIQh8PEBhDBtqqGAhDU1YvZAhDhi/pREI2r1m0QnJuM1QEQzbOYMxCCjehYEJOvgnQQsJG5kgEQhJOAugEQ+on2DBD5sabsAxC7+M3vARCn2uy7AxCoo40GEMyK4+wBEI+y8MsBEIG/+swCENLpv9sBEKzMhegDEIWtrYICEJvgr0wQ2I2etgEQhMfKlwEQ3ZGjsQMQxurSlQMQq6TBtQEQy7unSBDg4/bTAxDjjKqZAxDvwfWWAxDt+LuUARCe95vzARDNzfChAxDc+bngAhCexajHARDqwdiaARCe7qiiAhDJgQEkKvwHQeiPS0JXSPpCRR23QjU94kHycJVBzIY9Qlc5KkJCw+BCWILVQebbG0JR5bxCS2v1Qgiwc0JCSxxCVJcQQj2ntEHPu9dCODH4QlXogEHug2JBzBEvQeMc9EJZ8ABCAlqMQkkU7kHw67JB7SmoQlCf6UH+3ldBysySQfdDpUHpZ1ZB1K5LQkewF0He8+VCVkmUQlSOEkJOSIBCRolPQjpxf0JVwmdCQoZtQj82YUHg6DdCNdh2Qk6cgUJW7SRB8Ij5Qedy2UHInXhCS3owQdprP0HKYg9CN0eGQjzKgEH5I9dB9TbbQeO/zUHPsSZCUebMQgef2EH9R7FCP30XQgFMNUI/7qBCNuokQlQ/nUICuUpCRO3xQkSbVkH3y95CRmrZQfOIwEJO9ZVCBxlGQdLIbkHgOJ5CPFRcQfAN9kHdoVFB4DTFQesBWUHy36FCT8kpQfY0JUJa2gBCRjesQj+RDEJB18FB662bQjaNU0JKcglCC2gMQkLQ30H6zaxB7i08QkkHIEJTClJCQ3+6QguZ4EI+1UxB3iWIQkCywEI/9I5CPwMhQdiVPEJIIwhCQs0/Qduf2kJDGpZB2pIoQdqhVEHO9C1B7MTMQglBgEH66m9CSRPtQkohhEI6RjRB7SFdQjyfaEIFBvtB+uq/QeGEM0JP+YFCA1OCQjn6BkIKVg5CWU4nQlok+0I3yi5B8kTMQeiGG0HPJYNCO7ndQjuYmkHhciBCQre3Qeq1jkJA5WFCRMkLQdVG/0H8UiFB1A3/Qjjr7UHYoz1CNp18QgSPOEHzkEZCCndpQj1TZkH++lpCAQspQjqZ2EIAmTFCAANRQfLqBUHbVppCSzd9Qj39EUIKPYtCTAqiQf9wvEIGcWFB4Z4IQkHvk0I4D71CTSxYQlUqmUHdkNdB5uLMQkIoYUIBxp9CTCC1QelHQEHPYoVCTx/4QkXGRUJM8eRCSVazQlOZNEJDjgNCARj2QfX48EI0iPVCPR3yQkZKZEH6uotCRJbMQkOfZEHRqbBB0N0CQeaIDkJRv3dB281QQkBTj0JPpZlCSItiQlFApUJKEv5CU6pbQjyOaUHjvTBCNBhXQj15eUI05axCCVzRQd3Pz0JOujdCPDDFQj+BUUI5nmtCPP4/QjcNHEHJc7VCWfDbQdmKrkHcfiVB52RYQkKqO0IEXltCOkX7QjihD0HXXSZCNJ/nQfbdX0JL191CABPmQgAdEkHTuWFCPObOQelTEUHRjmZB3BYDQdL0s0H1FB1CAoyIQfzGdUHVd2JB3H1kQdzDm0I7/OJByl3kQe2MgEJFXexCU579QdvIVUHgc75CPuH+Qc86rkHcs6RB7HaZQkNHMEHQhixB8FwAMghGTE9BVF8zMjgAQAFI/wFTCAwQABABEP8BEP//u/gBEN+s1nwQj6ry+wMQxv2c3QEQl7P8owMQoMDYeRDitIKhAhCz5rbBAhCCCFRbCAwQABABEP8BEP/v3vwDEL2svywQjp6jkwMQ35C9nAMQnIDftwEQgYGAUBCOtM+RAhDXhIGIAhCUQlxjCABkaP///////////wFzCAB0eP///////////wGAAQE8QQAAAAAAAAAASAFQpZmOmMDwprZJWABgIGgAdQAAAACAAQCcAaMBCAAQAKQBqAEBsAEBuAEBFBsIABAAGXsUrkfheoQ/IAAo+QowCjsJgBSuR+F6dD8RUG1Mfk34aEAZ1Rsg9/TbYUAh7g8Mz8XtZEAo+Qo8QwmAFK5H4Xp0PxFQbUx+TfhoQBmJv5dlbdlhQCFmEkmFmOxkQCj5CkRLCYAUrkfhemQ/EYSDAMsa/HdAGbSkme/HmUJAIbSkme/HmUJAKPkKTFEAAAAAAAAAQFkAAAAAAADwP2EAAAAAAADwP2gAcQAAAAAAAPg/eQAAAAAAAARAgQEAAAAAAAAUQIkBAAAAAAAA4D+RAQAAAAAAAOA/HCMIARMKAzIuMREAAAAAAADgPxoIUFJFVklPVVMiCFNUQU5EQVJEKgROT05FMwghEQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/EQAAAAAAAPA/NDsIIBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA6QBEAAAAAAABKQBEAAAAAAAA6QBEAAAAAAABKQBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA7QBEAAAAAAIBJQBEAAAAAAAA7QBEAAAAAAIBJQBEAAAAAAAA+QBEAAAAAAIBKQBEAAAAAAAA+QBEAAAAAAIBKQBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA7QBEAAAAAAIBKQBEAAAAAAAA9QBEAAAAAAABJQBEAAAAAAAA9QBEAAAAAAABJQBEAAAAAAAA/QBEAAAAAAIBHQBEAAAAAAAA/QBEAAAAAAIBHQDxDCCARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAOkARAAAAAAAASkARAAAAAAAAOkARAAAAAAAASkARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAO0ARAAAAAACASUARAAAAAAAAO0ARAAAAAACASUARAAAAAAAAPkARAAAAAACASkARAAAAAAAAPkARAAAAAACASkARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAO0ARAAAAAACASkARAAAAAAAAPUARAAAAAAAASUARAAAAAAAAPUARAAAAAAAASUARAAAAAAAAP0ARAAAAAACAR0ARAAAAAAAAP0ARAAAAAACAR0BEUQAAAAAAAAAAWApg/////wdoCHAgeCCBAQAAAAAAACRAiAEAowEICBAAEAAQABAAEAAQABAAEACkAagBmQuwAZkLuwEJLUMc6+I2Gj8REkJjkv5rhkAZEkJjkv5rhkAhEkJjkv5rhkAomQu8AcMBCS1DHOviNho/ERJCY5L+a4ZAGQAAAAAAAAAAIQAAAAAAAAAAKJkLxAEUJCkzMzMzMzPTPzEAAAAAAAAMQDiACUGXhPVsXw7yP0sLCCAR4HjY5hTToD8RAAAAAAAAAAAR4HjY5hTToD8RAAAAAAAAAAARDLnI3z27gz8RowEitbItQD8RDLnI3z27gz8RowEitbItQD8R/2+NG7eWgj8R7S1Ecscifj8R/2+NG7eWgj8R7S1Ecscifj8RdVgdCk72pz8RpX76edL9lT8RdVgdCk72pz8RpX76edL9lT8Rok3qjfcnlD8RSNaotTyIoD8Rok3qjfcnlD8RSNaotTyIoD8RUXvNpQdqjT8R7ZbKRbL8lj8RUXvNpQdqjT8R7ZbKRbL8lj8RoY3HXx8WmT8RK1w8eaTyoj8RoY3HXx8WmT8RK1w8eaTyoj8RdNnesyEnqj8RC1c/nd6xlj8RdNnesyEnqj8RC1c/nd6xlj8MEwggEVprBCRAYVg/EcvDNfipb68/EVprBCRAYVg/EcvDNfipb68/Ef198FMTKJA/EW47f3M3764/Ef198FMTKJA/EW47f3M3764/EajfPJhjrpA/EUKAJ8MWa5c/EajfPJhjrpA/EUKAJ8MWa5c/EQAAAAAAAAAAET9YqO4il3w/EQAAAAAAAAAAET9YqO4il3w/EaQvzk6JmYM/EZgoHGxz024/EaQvzk6JmYM/EZgoHGxz024/ET0JGzrIXoA/EWR/yXQMB14/ET0JGzrIXoA/EWR/yXQMB14/ETclxfwjHlk/EQAAAAAAAAAAETclxfwjHlk/EQAAAAAAAAAAEQAAAAAAAAAAEaDvAmCTc2k/EQAAAAAAAAAAEaDvAmCTc2k/FExRAAAAAAAAAABbCCARAAAAAAAAQUARAAAAAAAAR0ARAAAAAAAAQUARAAAAAAAAR0ARAAAAAAAAQEARAAAAAAAAR0ARAAAAAAAAQEARAAAAAAAAR0ARAAAAAAAAQEARAAAAAACASEARAAAAAAAAQEARAAAAAACASEARAAAAAACAQUARAAAAAAAASkARAAAAAACAQUARAAAAAAAASkARAAAAAACAQEARAAAAAAAAS0ARAAAAAACAQEARAAAAAAAAS0ARAAAAAACAQEARAAAAAAAAS0ARAAAAAACAQEARAAAAAAAAS0ARAAAAAAAAQUARAAAAAACAS0ARAAAAAAAAQUARAAAAAACAS0ARAAAAAACAQUARAAAAAAAAS0ARAAAAAACAQUARAAAAAAAAS0BcYwggEQAAAAAAADxAEQAAAAAAgEdAEQAAAAAAADxAEQAAAAAAgEdAEQAAAAAAAEBAEQAAAAAAAEdAEQAAAAAAAEBAEQAAAAAAAEdAEQAAAAAAAEBAEQAAAAAAgEhAEQAAAAAAAEBAEQAAAAAAgEhAEQAAAAAAgEFAEQAAAAAAAEpAEQAAAAAAgEFAEQAAAAAAAEpAEQAAAAAAgEBAEQAAAAAAAEtAEQAAAAAAgEBAEQAAAAAAAEtAEQAAAAAAgEBAEQAAAAAAAEtAEQAAAAAAgEBAEQAAAAAAAEtAEQAAAAAAAEFAEQAAAAAAgEtAEQAAAAAAAEFAEQAAAAAAgEtAEQAAAAAAgEFAEQAAAAAAAEtAEQAAAAAAgEFAEQAAAAAAAEtAZGgAcAB4AIABBYgBAJIBCFNUQU5EQVJEmgEETk9ORaABAKgBAA=="
  },
  {
    "path": "Java/parkservices/src/test/resources/com/amazon/randomcutforest/parkservices/state/state_1.json",
    "content": "{\n  \"version\": \"2.1\",\n  \"forestState\": {\n    \"version\": \"2.0\",\n    \"totalUpdates\": 1257,\n    \"timeDecay\": 1.0e-4,\n    \"numberOfTrees\": 30,\n    \"sampleSize\": 256,\n    \"shingleSize\": 8,\n    \"dimensions\": 32,\n    \"outputAfter\": 32,\n    \"compressed\": true,\n    \"partialTreeState\": true,\n    \"boundingBoxCacheFraction\": 0.0,\n    \"storeSequenceIndexesEnabled\": false,\n    \"compact\": true,\n    \"internalShinglingEnabled\": false,\n    \"centerOfMassEnabled\": false,\n    \"precision\": \"FLOAT_32\",\n    \"pointStoreState\": {\n      \"version\": \"2.0\",\n      \"dimensions\": 32,\n      \"capacity\": 7681,\n      \"shingleSize\": 8,\n      \"precision\": \"FLOAT_32\",\n      \"startOfFreeSegment\": 9536,\n      \"pointData\": [\n        0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -76, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0,\n        0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100,\n        0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0,\n        0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0,\n        0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, 68, 0,\n        0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0,\n        0, 0, 0, 0, 66, -85, 0, 0, 66, -64, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -78, 0,\n        0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0,\n        0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66,\n        -85, 0, 0, 66, -64, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0,\n        66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0,\n        0, 66, -106, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66,\n        -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0,\n        0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0,\n        0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -104, 0,\n        0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102,\n        0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66,\n        0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0,\n        0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0,\n        66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, 106, 0, 0, 66, 116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        106, 0, 0, 66, 116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0,\n        0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66,\n        -62, 0, 0, 66, -58, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0,\n        66, -112, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0,\n        0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0,\n        66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102,\n        0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0,\n        0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -98, 0,\n        0, 66, -68, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -90,\n        0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0,\n        0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -76, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66,\n        -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0,\n        0, 0, 0, 66, -114, 0, 0, 66, -76, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, -104, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        -104, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66,\n        -62, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0,\n        0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -110, 0,\n        0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -127, 0, 0, 66, -98,\n        0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0,\n        0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -127, 0, 0, 66, -98, 0, 0, 66, 72, 0,\n        0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66,\n        68, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0,\n        0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -98,\n        0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62,\n        0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72,\n        0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0,\n        0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106,\n        0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66,\n        -106, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0,\n        0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66,\n        110, 0, 0, 66, -116, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0,\n        0, 0, 0, 0, 66, 92, 0, 0, 66, 100, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66,\n        96, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0,\n        0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66,\n        92, 0, 0, 66, 100, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0,\n        0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0,\n        0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66,\n        -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66,\n        -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -80, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0,\n        66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0,\n        0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100,\n        0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0,\n        0, 66, -108, 0, 0, 66, -80, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -111,\n        0, 0, 66, -106, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66,\n        -106, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        66, -104, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0,\n        66, -108, 0, 0, 66, -80, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -111, 0,\n        0, 66, -106, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -106,\n        0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -108, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0,\n        66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0,\n        0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 108, 0, 0, 66, 84, 0,\n        0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0,\n        0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -120, 0,\n        0, 66, -108, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0,\n        0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 108, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0,\n        66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0,\n        66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0,\n        66, 72, 0, 0, 0, 0, 0, 0, 66, -65, 0, 0, 66, -64, 0, 0, 66, -66, 0, 0,\n        0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0,\n        0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66,\n        -74, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -100, 0, 0, 66, 68, 0, 0, 0,\n        0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0,\n        0, 0, 0, 66, 126, 0, 0, 66, -100, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        -106, 0, 0, 66, -74, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0,\n        66, -74, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        66, -62, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0,\n        0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 96,\n        0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0,\n        0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -86, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0,\n        66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        -109, 0, 0, 66, -86, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0,\n        0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -118,\n        0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 0, 0, 0, 0, 66, -89, 0, 0, 66, -62, 0, 0, 66, -116, 0, 0, 0,\n        0, 0, 0, 66, -89, 0, 0, 66, -62, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -85, 0, 0, 66,\n        -68, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0,\n        0, 0, 0, 66, -89, 0, 0, 66, -62, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66,\n        -89, 0, 0, 66, -62, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -85, 0, 0, 66, -68, 0, 0, 66,\n        -102, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -78, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, -111, 0, 0, 66, -78, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -89, 0, 0, 66, -62, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -85, 0, 0, 66, -68, 0, 0, 66,\n        -102, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -78, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, -111, 0, 0, 66, -78, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0,\n        0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66,\n        -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0,\n        66, -82, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0,\n        0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0,\n        0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66,\n        -118, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0,\n        0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0,\n        66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -72, 0, 0, 66, 92, 0, 0, 0, 0, 0,\n        0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, 104, 0,\n        0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0,\n        0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0,\n        66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0,\n        0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -113, 0, 0, 66, -102, 0, 0, 66, -124,\n        0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 68, 0,\n        0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120,\n        0, 0, 0, 0, 0, 0, 66, -113, 0, 0, 66, -102, 0, 0, 66, -124, 0, 0, 0, 0,\n        0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -76,\n        0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0,\n        0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122,\n        0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0,\n        0, 0, 66, -105, 0, 0, 66, -64, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -105,\n        0, 0, 66, -64, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        0, 0, 0, 0, 66, 100, 0, 0, 66, 116, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -105, 0, 0,\n        66, -64, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -105, 0, 0, 66, -64, 0, 0,\n        66, 92, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 100,\n        0, 0, 66, 116, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66,\n        0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66,\n        0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -100, 0, 0, 66, -112, 0, 0, 0, 0,\n        0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -108,\n        0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66,\n        -78, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0,\n        0, 0, 0, 66, -106, 0, 0, 66, -100, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -78, 0, 0,\n        66, -110, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 120, 0, 0, 66, 80, 0, 0,\n        0, 0, 0, 0, 66, 100, 0, 0, 66, 120, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0,\n        0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66,\n        80, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0,\n        0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66,\n        68, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0,\n        0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -126,\n        0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0,\n        0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -84,\n        0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64,\n        0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70,\n        0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0,\n        0, 66, -110, 0, 0, 66, -80, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 104, 0,\n        0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0,\n        0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0,\n        0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0,\n        66, -80, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        66, 116, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0,\n        66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        66, -104, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -76, 0, 0, 66, -88, 0, 0,\n        0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -121, 0, 0, 66, -98, 0, 0, 66, 96, 0,\n        0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0,\n        66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 0, 0, 0, 0, 66, -121, 0, 0, 66, -98, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0,\n        66, -70, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0,\n        0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66,\n        108, 0, 0, 66, -126, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66,\n        -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0,\n        0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66,\n        -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        -126, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66,\n        112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0,\n        0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        66, -62, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0,\n        0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0,\n        66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0,\n        0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66,\n        -125, 0, 0, 66, -114, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0,\n        0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -125, 0, 0,\n        66, -114, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0,\n        66, 88, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0,\n        0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66,\n        -68, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0,\n        0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66,\n        -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0,\n        0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0,\n        66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126,\n        0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0,\n        0, 66, -68, 0, 0, 66, -62, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -68, 0,\n        0, 66, -62, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124,\n        0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0,\n        0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0,\n        66, -102, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0,\n        0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66,\n        -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0,\n        0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -107,\n        0, 0, 66, -102, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66,\n        -102, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -108, 0, 0,\n        66, 108, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0,\n        0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66,\n        -102, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66, -102, 0, 0,\n        66, -112, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -108, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66, -102, 0,\n        0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -108, 0, 0, 66, 108,\n        0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0,\n        0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -96,\n        0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -96, 0, 0, 66,\n        -122, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0,\n        0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0,\n        66, -96, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -96, 0,\n        0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0,\n        0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0,\n        66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0,\n        66, -110, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -112, 0, 0, 66, -120, 0,\n        0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0,\n        66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0,\n        66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -112, 0, 0, 66, -120, 0, 0, 0, 0, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66,\n        68, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0,\n        0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -71,\n        0, 0, 66, -62, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70,\n        0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92,\n        0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0,\n        0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64,\n        0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0,\n        0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104,\n        0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0,\n        0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0,\n        66, 108, 0, 0, 66, 112, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0,\n        66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0,\n        0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76,\n        0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -95, 0, 0, 66, -88, 0,\n        0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 0, 0, 0, 0, 66, -95, 0, 0, 66, -88, 0, 0, 66, -102, 0, 0, 0,\n        0, 0, 0, 66, -122, 0, 0, 66, -100, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -122, 0, 0, 66, -100, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -95, 0, 0, 66, -88, 0, 0, 66,\n        -102, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -100, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, -122, 0, 0, 66, -100, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0,\n        0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -60,\n        0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66,\n        72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66,\n        -68, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -68, 0, 0, 66,\n        -116, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0,\n        0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66,\n        -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0,\n        66, -102, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0,\n        0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66,\n        -82, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0,\n        0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0,\n        0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0,\n        0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -120, 0,\n        0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 0, 0, 0, 0, 66, -73, 0, 0, 66, -70, 0, 0, 66, -76, 0, 0, 0,\n        0, 0, 0, 66, -73, 0, 0, 66, -70, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 0, 0, 0, 0, 66, -73, 0, 0, 66, -70, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0,\n        66, -73, 0, 0, 66, -70, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0,\n        0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66,\n        114, 0, 0, 66, -126, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66,\n        -112, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0,\n        0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66,\n        -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, 114, 0, 0,\n        66, -126, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0,\n        66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66, -90,\n        0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -107, 0, 0, 66, -90, 0, 0, 66,\n        -124, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0,\n        0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66,\n        -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0,\n        66, 120, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0,\n        0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66,\n        88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84,\n        0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84,\n        0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0,\n        66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -66, 0,\n        0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66,\n        72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0,\n        66, -66, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0,\n        66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -62, 0,\n        0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -102,\n        0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66,\n        -118, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0,\n        0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -116, 0,\n        0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98,\n        0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -104, 0, 0, 66, -128, 0, 0, 0, 0, 0,\n        0, 66, -116, 0, 0, 66, -104, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 112,\n        0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, -116, 0, 0, 66, -104, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -104, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66,\n        96, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0,\n        0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70,\n        0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -58, 0, 0, 66, -126, 0, 0, 0,\n        0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0,\n        0, 0, 0, 0, 66, -92, 0, 0, 66, -58, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0,\n        66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -72, 0, 0, 66, -104, 0, 0,\n        0, 0, 0, 0, 66, -88, 0, 0, 66, -72, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66,\n        -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66,\n        104, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0,\n        0, 0, 0, 66, -88, 0, 0, 66, -72, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66,\n        -88, 0, 0, 66, -72, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66,\n        -60, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0,\n        0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76,\n        0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108,\n        0, 0, 0, 0, 0, 0, 66, -85, 0, 0, 66, -60, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0,\n        66, -85, 0, 0, 66, -60, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -112, 0,\n        0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112,\n        0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -88, 0, 0, 66, -96,\n        0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0,\n        0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0,\n        0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0,\n        0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -88, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0,\n        66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0,\n        66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0,\n        0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0,\n        66, -112, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -100, 0, 0, 66, 80, 0,\n        0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -100, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0,\n        66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0,\n        0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0,\n        0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 122,\n        0, 0, 66, -118, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0,\n        66, -110, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0,\n        0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0,\n        66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 122, 0,\n        0, 66, -118, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -84,\n        0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0,\n        66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84,\n        0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0,\n        66, -124, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -124, 0,\n        0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72,\n        0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0,\n        0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0,\n        0, 66, -60, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0,\n        0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66,\n        -122, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0,\n        0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66,\n        112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0,\n        0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108,\n        0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112,\n        0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0,\n        0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0,\n        66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0,\n        0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -68, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66,\n        96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88,\n        0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0,\n        0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -68,\n        0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0,\n        0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -68,\n        0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0,\n        0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -119,\n        0, 0, 66, -112, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -119, 0, 0, 66,\n        -112, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 110, 0, 0, 66, 124, 0, 0, 66, 96, 0, 0,\n        0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -119, 0, 0,\n        66, -112, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -119, 0, 0, 66, -112, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 0, 0, 0, 0, 66, 110, 0, 0, 66, 124, 0, 0, 66, 96, 0, 0, 0, 0, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 0, 0, 0, 0, 66, -119, 0, 0, 66, -112, 0, 0, 66, -126, 0,\n        0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0,\n        0, 66, 110, 0, 0, 66, 124, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0,\n        0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66,\n        -96, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0,\n        66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66,\n        96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66, -96, 0, 0, 66,\n        -102, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0,\n        0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66,\n        -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66, -96, 0, 0, 66, -102,\n        0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -106, 0,\n        0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126,\n        0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0,\n        66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -117, 0,\n        0, 66, -84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0,\n        0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0,\n        66, 104, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0,\n        0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0,\n        66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0,\n        0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0,\n        66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -97, 0, 0, 66, -94, 0,\n        0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0,\n        0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0,\n        66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0,\n        0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        66, -78, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66,\n        126, 0, 0, 66, -100, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66,\n        -100, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0,\n        0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66,\n        -100, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -100, 0, 0, 66,\n        68, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0,\n        0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -116,\n        0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -114, 0, 0,\n        66, 120, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0,\n        66, -116, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -114, 0, 0, 66, 120, 0,\n        0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0,\n        66, -128, 0, 0, 66, -118, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -128, 0,\n        0, 66, -118, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116,\n        0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -114, 0, 0, 66,\n        120, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0,\n        0, 0, 0, 66, -128, 0, 0, 66, -118, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66,\n        -128, 0, 0, 66, -118, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0,\n        66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0,\n        66, -70, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0,\n        66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0,\n        66, -76, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0,\n        0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0,\n        66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116,\n        0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -84, 0, 0, 66, 108, 0, 0, 0, 0, 0,\n        0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -100,\n        0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66,\n        -70, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66,\n        72, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0,\n        0, 0, 0, 66, -111, 0, 0, 66, -84, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66, -70, 0,\n        0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0,\n        0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66,\n        -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0,\n        0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -125, 0, 0, 66, -92, 0, 0,\n        66, 68, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, -125, 0, 0, 66, -92, 0, 0, 66, 68, 0, 0,\n        0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        -72, 0, 0, 66, -64, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -64, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0,\n        66, -120, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        0, 0, 0, 0, 66, 102, 0, 0, 66, -128, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66,\n        -93, 0, 0, 66, -66, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -64, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -64, 0, 0, 66,\n        -80, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0,\n        0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66,\n        102, 0, 0, 66, -128, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -93, 0, 0, 66,\n        -66, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0,\n        0, 66, -109, 0, 0, 66, -82, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0,\n        66, -125, 0, 0, 66, -124, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -109, 0,\n        0, 66, -82, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0,\n        0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -125, 0, 0,\n        66, -124, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -101, 0, 0, 66, -64, 0,\n        0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -101, 0, 0, 66, -64, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126,\n        0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0,\n        0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116,\n        0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82,\n        0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0,\n        0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0,\n        0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0,\n        0, 0, 0, 0, 0, 66, -56, 0, 0, 66, -56, 0, 0, 66, -56, 0, 0, 0, 0, 0, 0,\n        66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66,\n        -56, 0, 0, 66, -56, 0, 0, 66, -56, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66,\n        -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66,\n        -74, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -66, 0, 0, 66, -126, 0, 0, 0,\n        0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -88, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0,\n        66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0,\n        66, -88, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0,\n        66, -58, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66,\n        72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0,\n        66, -112, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0,\n        0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 108,\n        0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124,\n        0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78,\n        0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0,\n        0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -60, 0,\n        0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100,\n        0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94,\n        0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0,\n        0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -60,\n        0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60,\n        0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66,\n        -100, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0,\n        0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0,\n        66, -116, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -62, 0,\n        0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0,\n        0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116,\n        0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, 74, 0, 0, 66, 80, 0, 0, 66, 68, 0,\n        0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0,\n        0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 0, 0, 0, 0, 66, 74, 0, 0, 66, 80, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 80, 0, 0, 66, 72, 0, 0, 0, 0,\n        0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, 74,\n        0, 0, 66, 80, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0,\n        0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66,\n        76, 0, 0, 66, 80, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80,\n        0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0,\n        0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -68, 0,\n        0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0,\n        0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0,\n        0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0,\n        66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0,\n        0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0,\n        66, 104, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -86, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -86, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66, -94,\n        0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74,\n        0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        66, -78, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66,\n        68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66,\n        -78, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0,\n        0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66,\n        -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0,\n        0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -104,\n        0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66,\n        104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, -110, 0, 0, 66, 68, 0, 0,\n        0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -104, 0,\n        0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104,\n        0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66,\n        -128, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, -110, 0, 0, 66, 68, 0, 0, 0,\n        0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66,\n        -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0,\n        0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0,\n        66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66,\n        104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 112,\n        0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100,\n        0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -74, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66,\n        112, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0,\n        0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 80,\n        0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -74, 0, 0, 66, 68, 0,\n        0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0,\n        66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0,\n        0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66,\n        -95, 0, 0, 66, -80, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80,\n        0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0,\n        66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0,\n        0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0,\n        66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108,\n        0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0,\n        0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, 88,\n        0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104,\n        0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124,\n        0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0,\n        0, 66, 106, 0, 0, 66, 116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -80, 0,\n        0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0,\n        66, 88, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0,\n        0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 106, 0, 0,\n        66, 116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0,\n        66, -96, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0,\n        0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -94, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 106, 0, 0, 66, 116, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0,\n        0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66,\n        -94, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66,\n        -94, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0,\n        0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92,\n        0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -120,\n        0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66,\n        -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0,\n        0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -124,\n        0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0,\n        66, -82, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0,\n        0, 0, 0, 66, 110, 0, 0, 66, -124, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0,\n        0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 110,\n        0, 0, 66, -124, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86,\n        0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0,\n        0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -88, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66,\n        -60, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0,\n        0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0,\n        0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66,\n        80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -59, 0, 0, 66, -58, 0, 0, 66,\n        -60, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0,\n        0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 0, 0, 0, 0, 66, -59, 0, 0, 66, -58, 0, 0, 66, -60, 0, 0, 0,\n        0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66,\n        -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66,\n        72, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0,\n        0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66,\n        92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96,\n        0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0,\n        0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, 120,\n        0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66,\n        -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        -95, 0, 0, 66, -94, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        66, -104, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -95, 0, 0, 66,\n        -94, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0,\n        66, -124, 0, 0, 66, -118, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -96, 0,\n        0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -96, 0, 0, 66,\n        -110, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0,\n        66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -96, 0, 0, 66, -110, 0, 0, 0,\n        0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66,\n        80, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0,\n        0, 66, -96, 0, 0, 66, -74, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -76, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124,\n        0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120,\n        0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0,\n        0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0,\n        0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102,\n        0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -120, 0,\n        0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0,\n        0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 92, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0,\n        66, 104, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0,\n        66, -70, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -74, 0, 0, 66, 100, 0, 0,\n        0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66,\n        80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72,\n        0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0,\n        0, 0, 0, 66, -108, 0, 0, 66, -74, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0,\n        0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -125, 0, 0,\n        66, -118, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        66, -62, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0,\n        0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0,\n        66, -58, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 124,\n        0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116,\n        0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94,\n        0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -122, 0, 0, 66, 112, 0, 0, 0, 0, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        66, 116, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0,\n        0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0,\n        0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66,\n        -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0,\n        0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -90,\n        0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90,\n        0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90,\n        0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0,\n        0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -62,\n        0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90,\n        0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90,\n        0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 68,\n        0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0,\n        0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106,\n        0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0,\n        0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 92, 0,\n        0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0,\n        0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0,\n        0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, 100, 0,\n        0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0,\n        66, 92, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0,\n        66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -86, 0,\n        0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108,\n        0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0,\n        0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112,\n        0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -110, 0,\n        0, 66, -86, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0,\n        66, 68, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0,\n        0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0,\n        66, -108, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0,\n        66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, 84, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0,\n        66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -87, 0,\n        0, 66, -80, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -87, 0, 0, 66, -80, 0,\n        0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0,\n        0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, -70, 0,\n        0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126,\n        0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94,\n        0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0,\n        0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 84, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0,\n        66, -84, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0,\n        0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66,\n        -104, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0,\n        0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 96, 0,\n        0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0,\n        0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 80, 0,\n        0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        0, 0, 0, 0, 66, -84, 0, 0, 66, -64, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0,\n        66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        -84, 0, 0, 66, -64, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, 110, 0, 0, 66, -122, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        110, 0, 0, 66, -122, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66,\n        -64, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        110, 0, 0, 66, -122, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 110, 0, 0, 66,\n        -122, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66,\n        80, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0,\n        0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -76, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0,\n        0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0,\n        0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0,\n        0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0,\n        66, -76, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0,\n        66, -64, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0, 66, -64, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0,\n        66, -102, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0,\n        0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0,\n        0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0,\n        0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -66, 0,\n        0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0,\n        0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0,\n        0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0,\n        0, 66, -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -116,\n        0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, 106, 0, 0, 66, 116, 0, 0, 66,\n        96, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0,\n        0, 0, 66, -127, 0, 0, 66, -96, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 96,\n        0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100,\n        0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0,\n        0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -98,\n        0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0,\n        0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0,\n        66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -116, 0, 0, 66, 96, 0,\n        0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0,\n        66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0,\n        66, -102, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -58, 0,\n        0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -78, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0,\n        0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -128, 0,\n        0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72,\n        0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -78, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0,\n        66, -110, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0,\n        0, 0, 0, 66, -92, 0, 0, 66, -64, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66,\n        -94, 0, 0, 66, -78, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76,\n        0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0,\n        0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -92, 0,\n        0, 66, -64, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0,\n        0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0,\n        0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -81, 0, 0,\n        66, -64, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -73,\n        0, 0, 66, -60, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124,\n        0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -73, 0, 0, 66,\n        -60, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0,\n        0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0,\n        0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -104, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100,\n        0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0,\n        0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 124, 0,\n        0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60,\n        0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0,\n        0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0,\n        66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0,\n        66, -58, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0,\n        0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66,\n        80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0,\n        0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66,\n        68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0,\n        0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66,\n        80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68,\n        0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0,\n        0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -78, 0,\n        0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116,\n        0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0,\n        0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        66, -78, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0,\n        0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0,\n        0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -100,\n        0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66,\n        112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -102, 0, 0,\n        66, -110, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -102, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112,\n        0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -102, 0, 0, 66,\n        -110, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -102, 0, 0, 66, -110, 0, 0,\n        0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66,\n        -114, 0, 0, 66, -106, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -106, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0,\n        0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0,\n        0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0,\n        66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -94, 0,\n        0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120,\n        0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0,\n        0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -88, 0,\n        0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -94, 0, 0, 66,\n        -112, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0,\n        0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66,\n        -98, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66,\n        76, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0,\n        0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 104,\n        0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0,\n        66, -116, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0,\n        66, -80, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0,\n        0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66,\n        96, 0, 0, 66, 112, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66,\n        -94, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0,\n        0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66,\n        112, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -88, 0,\n        0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0,\n        66, 72, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -115, 0, 0, 66, -100, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0,\n        0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -88,\n        0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62,\n        0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0,\n        0, 0, 0, 0, 0, 66, -115, 0, 0, 66, -100, 0, 0, 66, 124, 0, 0, 0, 0, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 0, 0, 0, 0, 66, -87, 0, 0, 66, -72, 0, 0, 66, -102, 0, 0,\n        0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0,\n        0, 0, 66, -87, 0, 0, 66, -72, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -72,\n        0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66,\n        0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86,\n        0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0,\n        0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -88, 0,\n        0, 66, -58, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0,\n        0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -58, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66,\n        -64, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66,\n        -70, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0,\n        0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0,\n        66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -108, 0, 0,\n        66, -114, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0,\n        0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66,\n        72, 0, 0, 0, 0, 0, 0, 66, -111, 0, 0, 66, -108, 0, 0, 66, -114, 0, 0, 0,\n        0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66,\n        -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0,\n        66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0,\n        0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108,\n        0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0,\n        66, 120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128,\n        0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0,\n        0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 120, 0,\n        0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0,\n        0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114,\n        0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0,\n        0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66,\n        112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0,\n        66, -78, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -58, 0, 0, 66, 108, 0, 0,\n        0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0,\n        66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0,\n        0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -58, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0,\n        66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0,\n        0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0,\n        0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -122,\n        0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0,\n        66, -90, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -122, 0,\n        0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -90, 0, 0, 66,\n        116, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -90, 0, 0, 66, 116, 0, 0, 0,\n        0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -92,\n        0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66,\n        -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -90, 0, 0, 66, 116, 0,\n        0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -90, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0,\n        66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -92, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -92, 0, 0,\n        66, 112, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -90, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66,\n        -114, 0, 0, 66, -92, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -92, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0,\n        66, -82, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0,\n        0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0,\n        66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 116,\n        0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        66, -92, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 88,\n        0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 120, 0,\n        0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 120, 0, 0, 66, 112, 0,\n        0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0,\n        66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -122, 0,\n        0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66,\n        0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0,\n        0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -124, 0,\n        0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0,\n        0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0,\n        0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -112,\n        0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0,\n        0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66,\n        96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0,\n        0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0,\n        66, -64, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0,\n        66, -90, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -64, 0,\n        0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0,\n        0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0,\n        66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0,\n        0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, -124, 0, 0, 66,\n        96, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0,\n        0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, 84,\n        0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0,\n        0, 66, 92, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0,\n        0, 0, 0, 0, 0, 66, 116, 0, 0, 66, -124, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0,\n        66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 66, -78, 0,\n        0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0, 66, -126, 0,\n        0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58,\n        0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -96, 0, 0, 66,\n        -108, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0,\n        0, 0, 0, 66, -88, 0, 0, 66, -68, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66,\n        -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0,\n        66, -58, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -96, 0, 0, 66, -108, 0,\n        0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0,\n        66, -88, 0, 0, 66, -68, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0,\n        0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0,\n        0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66,\n        -102, 0, 0, 66, -102, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0,\n        66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0,\n        0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -110, 0, 0, 66, -114, 0, 0, 0, 0, 0,\n        0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, 100, 0,\n        0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -56, 0, 0, 66, -56, 0,\n        0, 66, -56, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104,\n        0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 0, 0, 0, 0, 66, -56, 0, 0, 66, -56, 0, 0, 66, -56, 0, 0,\n        0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0,\n        66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66,\n        100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66,\n        112, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0,\n        0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -70, 0,\n        0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -90, 0,\n        0, 66, -114, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        66, 76, 0, 0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -90, 0, 0, 66, -114, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -122, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0,\n        66, -124, 0, 0, 66, -122, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 120, 0,\n        0, 66, 120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 0, 0, 0, 0, 66, -102, 0, 0, 66, -90, 0, 0, 66, -114, 0, 0, 0, 0, 0,\n        0, 66, -124, 0, 0, 66, -122, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -124,\n        0, 0, 66, -122, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0,\n        0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66,\n        80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        -102, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66,\n        100, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0,\n        0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 124,\n        0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -102, 0, 0, 66, 68, 0,\n        0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0,\n        0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 0, 0, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        0, 0, 0, 0, 66, -100, 0, 0, 66, -92, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0,\n        66, -100, 0, 0, 66, -92, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0,\n        66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        66, -92, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0,\n        66, -100, 0, 0, 66, -92, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -100, 0,\n        0, 66, -92, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0,\n        66, -98, 0, 0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -92, 0, 0,\n        66, -108, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66,\n        -98, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0,\n        0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0,\n        66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0,\n        66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0,\n        0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66,\n        -114, 0, 0, 66, -84, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66,\n        92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66,\n        -60, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0,\n        0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66,\n        -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0,\n        0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0,\n        0, 66, -124, 0, 0, 66, -108, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -64,\n        0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0,\n        66, -86, 0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0,\n        0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0,\n        0, 66, -70, 0, 0, 66, -62, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -114, 0,\n        0, 66, -98, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0, 66, -116,\n        0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86,\n        0, 0, 0, 0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0,\n        0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66,\n        -70, 0, 0, 66, -62, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66,\n        -98, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66,\n        116, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0,\n        0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        124, 0, 0, 66, -106, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -98, 0, 0,\n        66, 124, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        -106, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0,\n        0, 66, -115, 0, 0, 66, -80, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 80, 0,\n        0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112,\n        0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0,\n        0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -128,\n        0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66,\n        -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0,\n        0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66,\n        -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, 120, 0, 0, 66, 120, 0, 0, 66,\n        120, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0,\n        0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66,\n        -78, 0, 0, 66, -78, 0, 0, 66, -78, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0,\n        0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0,\n        0, 66, 80, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0,\n        0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0,\n        66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66,\n        68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -124, 0, 0,\n        66, 116, 0, 0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -124, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0,\n        66, -64, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0,\n        0, 0, 0, 0, 0, 66, 126, 0, 0, 66, -124, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0,\n        66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0,\n        0, 66, 112, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0,\n        0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0,\n        0, 66, -120, 0, 0, 66, -102, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -120,\n        0, 0, 66, -102, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88,\n        0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112,\n        0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0,\n        0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -120,\n        0, 0, 66, -102, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66,\n        -102, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66,\n        -88, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0,\n        0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        -111, 0, 0, 66, -96, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0,\n        66, -102, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        66, -88, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66,\n        -111, 0, 0, 66, -96, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0, 66, -122, 0,\n        0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0,\n        0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0,\n        66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 66, -98, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110,\n        0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0,\n        66, 124, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0,\n        66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0,\n        0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -98, 0, 0, 66, -98, 0, 0, 66, -98, 0,\n        0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106,\n        0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0,\n        0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -124,\n        0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66,\n        -62, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0,\n        0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66,\n        -100, 0, 0, 66, -62, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, 124, 0, 0, 66,\n        124, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0,\n        0, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -106, 0,\n        0, 66, -112, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -76, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66,\n        -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0,\n        66, -124, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -106, 0, 0, 66, -112, 0,\n        0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66,\n        -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66,\n        -76, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66,\n        -62, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0,\n        0, 0, 0, 66, -117, 0, 0, 66, -108, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66,\n        -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66,\n        -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -76, 0, 0, 66,\n        -96, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0,\n        0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 0, 0, 0, 0, 66,\n        -117, 0, 0, 66, -108, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -116, 0, 0,\n        66, 96, 0, 0, 0, 0, 0, 0, 66, 124, 0, 0, 66, -116, 0, 0, 66, 96, 0, 0,\n        0, 0, 0, 0, 66, 124, 0, 0, 66, -116, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66,\n        -99, 0, 0, 66, -90, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66,\n        -90, 0, 0, 66, -108, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66,\n        -66, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0,\n        0, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -99, 0, 0, 66, -90, 0, 0, 66,\n        -108, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0,\n        0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66,\n        88, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0,\n        0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -106,\n        0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -58, 0, 0, 66,\n        -58, 0, 0, 66, -58, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66,\n        -76, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0,\n        0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -80,\n        0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66,\n        -106, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66,\n        96, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0,\n        0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0,\n        66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0,\n        0, 66, -92, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0,\n        0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0,\n        0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -100, 0,\n        0, 66, -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66,\n        -128, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0,\n        0, 0, 0, 66, 112, 0, 0, 66, 124, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66,\n        -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66,\n        -100, 0, 0, 66, -100, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0,\n        0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0,\n        66, 112, 0, 0, 66, 124, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0,\n        66, -114, 0, 0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118,\n        0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0,\n        66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0,\n        66, 124, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0,\n        0, 66, -114, 0, 0, 0, 0, 0, 0, 66, -114, 0, 0, 66, -114, 0, 0, 66, -114,\n        0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0,\n        0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -64, 0,\n        0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126,\n        0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66,\n        -126, 0, 0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0,\n        0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66,\n        96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0,\n        66, -126, 0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0,\n        0, 0, 0, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 66, -96, 0, 0, 0, 0, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0,\n        66, -80, 0, 0, 66, -102, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0, 66, -80, 0, 0,\n        66, -102, 0, 0, 0, 0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0,\n        0, 0, 0, 0, 66, -113, 0, 0, 66, -78, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66,\n        104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66,\n        108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0, 66, -80, 0, 0, 66,\n        -102, 0, 0, 0, 0, 0, 0, 66, -91, 0, 0, 66, -80, 0, 0, 66, -102, 0, 0, 0,\n        0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -113, 0, 0, 66, -78, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, 104, 0, 0, 66,\n        104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66,\n        68, 0, 0, 0, 0, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 66, 68, 0, 0, 0, 0, 0,\n        0, 66, -112, 0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -126,\n        0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72,\n        0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90,\n        0, 0, 0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0,\n        0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -112,\n        0, 0, 66, -112, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66,\n        -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66, -100, 0, 0,\n        66, -100, 0, 0, 0, 0, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 66, 96, 0, 0, 0,\n        0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0,\n        66, -84, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0,\n        0, 0, 0, 0, 66, -90, 0, 0, 66, -70, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0,\n        66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, -116, 0, 0,\n        66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 0, 0, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0, 66, -84, 0, 0,\n        0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0, 66,\n        -90, 0, 0, 66, -70, 0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66,\n        -82, 0, 0, 66, -82, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66,\n        100, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0,\n        0, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66,\n        -100, 0, 0, 66, -88, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0,\n        66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0,\n        66, 88, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0,\n        0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0, 0, 0, 0, 66,\n        116, 0, 0, 66, 116, 0, 0, 66, 116, 0, 0, 0, 0, 0, 0, 66, -100, 0, 0, 66,\n        -88, 0, 0, 66, -112, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 0, 0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0,\n        0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -70,\n        0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -96,\n        0, 0, 66, -110, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66,\n        -106, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0,\n        0, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 66, 88, 0, 0, 0, 0, 0, 0, 66, -70,\n        0, 0, 66, -70, 0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -70, 0, 0, 66, -70,\n        0, 0, 66, -70, 0, 0, 0, 0, 0, 0, 66, -103, 0, 0, 66, -96, 0, 0, 66,\n        -110, 0, 0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0,\n        0, 0, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0,\n        66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -80, 0,\n        0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0,\n        0, 66, -60, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -60, 0, 0, 66, -92, 0,\n        0, 0, 0, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 66, -106, 0, 0, 0, 0, 0,\n        0, 66, -116, 0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -116,\n        0, 0, 66, -116, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66,\n        -60, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -60, 0, 0, 66, -92, 0, 0, 0,\n        0, 0, 0, 66, -97, 0, 0, 66, -88, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66,\n        -97, 0, 0, 66, -88, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 0, 0, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 66, -60, 0, 0, 0,\n        0, 0, 0, 66, -76, 0, 0, 66, -60, 0, 0, 66, -92, 0, 0, 0, 0, 0, 0, 66,\n        -97, 0, 0, 66, -88, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, -97, 0, 0, 66,\n        -88, 0, 0, 66, -106, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66,\n        84, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0,\n        0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66,\n        84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66,\n        -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0,\n        66, -118, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0,\n        0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66,\n        -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0,\n        66, -80, 0, 0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0,\n        0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0,\n        66, -108, 0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0,\n        0, 66, 100, 0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0,\n        0, 0, 0, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 66, -118, 0, 0, 0, 0, 0,\n        0, 66, -72, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -108,\n        0, 0, 66, -116, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100,\n        0, 0, 0, 0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0,\n        0, 66, -110, 0, 0, 66, -100, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -110,\n        0, 0, 66, -100, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72,\n        0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -112, 0, 0, 66, -108, 0, 0, 66,\n        -116, 0, 0, 0, 0, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 66, 100, 0, 0, 0,\n        0, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 66, 112, 0, 0, 0, 0, 0, 0, 66,\n        -110, 0, 0, 66, -100, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0,\n        66, -100, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0,\n        66, 72, 0, 0, 0, 0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0,\n        0, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 66, 80, 0, 0, 0, 0, 0, 0, 66, 72,\n        0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0,\n        0, 66, -88, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0,\n        0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0,\n        66, 92, 0, 0, 66, 92, 0, 0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66,\n        -62, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 72, 0, 0, 66, 72, 0, 0, 66,\n        72, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0,\n        0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -68,\n        0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0, 0, 66, 92, 0, 0, 66, 92, 0,\n        0, 66, 92, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -62, 0, 0, 66, -86, 0,\n        0, 0, 0, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0,\n        66, -80, 0, 0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0,\n        66, -82, 0, 0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0,\n        0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0,\n        66, -128, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, -80, 0,\n        0, 66, -80, 0, 0, 66, -80, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0,\n        0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -82, 0, 0, 66, -82, 0, 0, 66, -82, 0,\n        0, 0, 0, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 66, -110, 0, 0, 0, 0, 0,\n        0, 66, -90, 0, 0, 66, -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -128, 0,\n        0, 66, -128, 0, 0, 66, -128, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0,\n        0, 66, 76, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0,\n        0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66,\n        -105, 0, 0, 66, -80, 0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66,\n        -90, 0, 0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -128, 0, 0, 66, -128, 0, 0,\n        66, -128, 0, 0, 0, 0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0,\n        0, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -74,\n        0, 0, 66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -105, 0, 0, 66, -80,\n        0, 0, 66, 124, 0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -86, 0, 0, 66, 120,\n        0, 0, 0, 0, 0, 0, 66, -109, 0, 0, 66, -86, 0, 0, 66, 120, 0, 0, 0, 0, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 66, -72, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0, 0,\n        66, -74, 0, 0, 0, 0, 0, 0, 66, -105, 0, 0, 66, -80, 0, 0, 66, 124, 0, 0,\n        0, 0, 0, 0, 66, -109, 0, 0, 66, -86, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0,\n        66, -109, 0, 0, 66, -86, 0, 0, 66, 120, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, -72, 0, 0, 66, -72, 0, 0,\n        66, -72, 0, 0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -112, 0, 0, 66, 116, 0,\n        0, 0, 0, 0, 0, 66, -123, 0, 0, 66, -112, 0, 0, 66, 116, 0, 0, 0, 0, 0,\n        0, 66, 76, 0, 0, 66, 76, 0, 0, 66, 76, 0, 0, 0, 0, 0, 0, 66, -122, 0, 0,\n        66, -122, 0, 0, 66, -122, 0, 0, 0, 0, 0, 0, 66, -90, 0, 0, 66, -90, 0,\n        0, 66, -90, 0, 0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0,\n        0, 0, 0, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 66, 108, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        66, -92, 0, 0, 0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0,\n        0, 0, 0, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 66, -88, 0, 0, 0, 0, 0, 0,\n        66, 104, 0, 0, 66, 104, 0, 0, 66, 104, 0, 0, 0, 0, 0, 0, 66, -94, 0, 0,\n        66, -94, 0, 0, 66, -94, 0, 0, 0, 0, 0, 0, 66, 108, 0, 0, 66, 108, 0, 0,\n        66, 108, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        0, 0, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -66, 0, 0,\n        66, -66, 0, 0, 66, -66, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0,\n        66, -68, 0, 0, 0, 0, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0,\n        0, 0, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0,\n        66, -76, 0, 0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0,\n        66, -74, 0, 0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0,\n        66, -92, 0, 0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0,\n        0, 0, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0,\n        66, -126, 0, 0, 66, -126, 0, 0, 66, -126, 0, 0, 0, 0, 0, 0, 66, -76, 0,\n        0, 66, -76, 0, 0, 66, -76, 0, 0, 0, 0, 0, 0, 66, -74, 0, 0, 66, -74, 0,\n        0, 66, -74, 0, 0, 0, 0, 0, 0, 66, -92, 0, 0, 66, -92, 0, 0, 66, -92, 0,\n        0, 0, 0, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -97, 0,\n        0, 66, -90, 0, 0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -97, 0, 0, 66, -90, 0,\n        0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0,\n        0, 0, 0, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0,\n        66, -64, 0, 0, 66, -64, 0, 0, 66, -64, 0, 0, 0, 0, 0, 0, 66, -120, 0, 0,\n        66, -120, 0, 0, 66, -120, 0, 0, 0, 0, 0, 0, 66, -97, 0, 0, 66, -90, 0,\n        0, 66, -104, 0, 0, 0, 0, 0, 0, 66, -97, 0, 0, 66, -90, 0, 0, 66, -104,\n        0, 0, 0, 0, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 66, -68, 0, 0, 0, 0, 0,\n        0, 66, -86, 0, 0, 66, -86, 0, 0, 66, -86, 0, 0, 0, 0, 0, 0, 66, 84, 0,\n        0, 66, 84, 0, 0, 66, 84, 0, 0, 0, 0, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0,\n        66, 84, 0, 0, 0, 0, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0, 66, -62, 0, 0,\n        0, 0, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 66, -124, 0, 0, 0, 0, 0, 0,\n        66, 84, 0, 0, 66, 84, 0, 0, 66, 84, 0, 0\n      ],\n      \"compressed\": true,\n      \"refCount\": [\n        1, 14, 1257, 890731498, 374221668, 253604528, 650321227, 793411785,\n        959695834, 259937023, 679318644, 560624350, 815504289, 437912711,\n        537835778, 656352711, 768154046, 447744646, 816151886, 237848008,\n        347168801, 349292853, 643946593, 890679179, 364428614, 687504757,\n        484396673, 674678697, 476681853, 1110239934, 235302022, 575603917,\n        566644786, 350555497, 454130325, 492737986, 446444997, 478864503,\n        558547701, 1011233667, 463355641, 568763853, 364277821, 660081620,\n        808048492, 659541431, 906453230, 560775240, 339423827, 786043959,\n        455161553, 772974277, 788075462, 341332677, 702531530, 356746157,\n        454052692, 793952200, 538535356, 334118729, 454585985, 454064120,\n        342254895, 680513489, 686521829, 678482664, 432537945, 778498523,\n        483737874, 666644502, 664463976, 364815644, 885388692, 438493209,\n        352097009, 476601308, 862366586, 688037103, 372255050, 258559718,\n        891383955, 553680254, 484635160, 448710446, 348409645, 761313087,\n        364852355, 598146072, 657273937, 334802373, 578296323, 145926973,\n        465002236, 787083911, 673562882, 703183826, 764298562, 357319020,\n        795686981, 884469075, 778023464, 891458028, 273912611, 693964365,\n        891845519, 363120831, 326551167, 380495310, 1018352344, 454048928,\n        687441502, 348330462, 915001195, 657968130, 808534764, 371300136,\n        798293151, 853725099, 250991796, 454124785, 694085102, 656854093,\n        464425577, 437380865, 787161091, 665457119, 544981452, 673071948,\n        989228900, 664795999, 981826014, 988842809, 686001116, 470800728,\n        591365799, 439109097, 679934225, 130357591, 667728580, 762920903,\n        988255765, 776717853, 692413349, 242183545, 342224473, 251019601,\n        267538542, 658033846, 574103676, 462696983, 265889949, 546167198,\n        236321922, 462249978, 1011933754, 483203552, 566188034, 597644660,\n        846903695, 1064483747, 5\n      ],\n      \"directLocationMap\": false,\n      \"locationList\": [\n        0, 9504, 1257, 38020, 114068, 456284, 798472, 874548, 950596, 1026644,\n        1102692, 1178740, 1254788, 1330836, 1673024, 1749100, 1825148, 1901196,\n        1977244, 2053292, 2129340, 2205388, 2281436, 2357484, 2433532, 2509580,\n        2585628, 2927844, 3003892, 3079940, 3155988, 3498204, 3574252, 3650300,\n        3726348, 3802396, 3878444, 3954492, 4030540, 4106588, 4182636, 4258684,\n        4600900, 4676948, 4752996, 4829044, 5171232, 5513476, 5589524, 5665572,\n        6007788, 6083836, 6159884, 6235932, 6311980, 6388028, 6730216, 6806292,\n        6882340, 6958388, 7034436, 7376624, 7452700, 7528748, 7870964, 8213152,\n        8555396, 8631444, 8707492, 8783540, 8859588, 8935636, 9011684, 9353872,\n        9429948, 9505996, 9582044, 9658092, 9734140, 9810188, 9886236, 10228424,\n        10304500, 10646688, 10722764, 10798812, 11141028, 11217076, 11293124,\n        11369172, 11445220, 11521268, 11597316, 11673364, 11749412, 11825460,\n        11901508, 11977556, 12319744, 12395820, 12471868, 12547916, 12623964,\n        12700012, 12776060, 12852108, 12928156, 13270344, 13346420, 13688636,\n        13764684, 13840732, 13916780, 13992828, 14068876, 14144924, 14220972,\n        14563188, 14639236, 14715284, 14791332, 15133548, 15209596, 15285644,\n        15361692, 15437740, 15513788, 15589836, 15932052, 16274268, 16882624,\n        16958700, 17300916, 17376964, 17453012, 17529060, 17605108, 17681156,\n        17757204, 17833252, 17909300, 17985348, 18061396, 18137444, 18213492,\n        18555708, 19164064, 19240140, 19316188, 19392236, 19734452, 19810500,\n        19886548, 19962596, 20038644, 20114692, 20190740, 20266788, 20875144,\n        20951220, 21027268, 21103316, 21179364, 21255412, 21331460, 21673648,\n        21749724, 21825772, 21901820, 21977868, 22053916, 22129964, 22206012,\n        22282060, 22358108, 22700296, 22776372, 22852420, 22928468, 23004516,\n        23612872, 23688948, 23764996, 23841044, 23917092, 24259280, 24335356,\n        24677544, 24753620, 25095808, 25171884, 25247932, 25323980, 25666196,\n        25742244, 26084460, 26160508, 26502696, 26578772, 26654820, 26730868,\n        27073084, 27149132, 27225180, 27567368, 27643444, 27985660, 28061708,\n        28137756, 28213804, 28289852, 28632068, 28974284, 29316500, 29658716,\n        30000932, 30076980, 30153028, 30495216, 30571292, 30913480, 30989556,\n        31065604, 31141652, 31217700, 31293748, 31369796, 31445844, 31521892,\n        31597940, 31673988, 31750036, 31826084, 32168300, 32510488, 32586564,\n        33194920, 33270996, 33347044, 33423092, 33499140, 33575188, 33651236,\n        33727284, 34069472, 34145548, 34221596, 34297644, 34373692, 34715908,\n        34791956, 35134144, 35210220, 35286268, 35362316, 35704532, 35780580,\n        35856628, 35932676, 36008724, 36084772, 36160820, 36236868, 36579056,\n        36655132, 36731180, 36807228, 36883276, 37225492, 37301540, 37377588,\n        37453636, 37529684, 37605732, 37947948, 38023996, 38366212, 38708428,\n        39050616, 39126692, 39202740, 39278788, 39354836, 39430884, 39506932,\n        39582980, 39925168, 40001244, 40077292, 40153340, 40229388, 40305436,\n        40381484, 40723672, 40799748, 40875796, 40951844, 41027892, 41370108,\n        41446156, 41788372, 42130560, 42206636, 42548852, 42624900, 42700948,\n        42776996, 42853044, 42929092, 43005140, 43081188, 43423404, 43765620,\n        43841668, 44183884, 44259932, 44335980, 44412028, 44754216, 44830292,\n        44906340, 45248528, 45324604, 45400652, 45476700, 45552748, 45628796,\n        45704844, 45780892, 46123108, 46465324, 46541372, 46617420, 46693468,\n        47035656, 47111732, 47187780, 47263828, 47606016, 47682092, 47758140,\n        47834188, 47910236, 47986284, 48062332, 48138380, 48214428, 48556616,\n        48632692, 48708740, 48784788, 48860836, 49203024, 49279100, 49621288,\n        49697364, 49773412, 49849460, 50191648, 50267724, 50343772, 50419820,\n        50495868, 50571916, 50647964, 50724012, 50800060, 50876108, 50952156,\n        51028204, 51104252, 51180300, 51522516, 51864704, 51940780, 52016828,\n        52092876, 52435064, 52511140, 52587188, 52663236, 52739284, 52815332,\n        52891380, 52967428, 53043476, 53385664, 53461740, 53537788, 53613836,\n        53689884, 53765932, 54108120, 54184196, 54526384, 54602460, 54678508,\n        55286864, 55362940, 55438988, 55781176, 55857252, 55933300, 56009348,\n        56085396, 56161444, 56503660, 56579708, 56655756, 56731804, 56807852,\n        56883900, 57226116, 57302164, 57378212, 57454260, 57530308, 57872524,\n        57948572, 58290788, 58366836, 58442884, 58518932, 58594980, 58937168,\n        59013244, 59089292, 59165340, 59241388, 59317436, 59393484, 59469532,\n        59545580, 59621628, 59963816, 60039892, 60115940, 60458128, 60534204,\n        60610252, 60952440, 61294656, 61370732, 61446780, 61522828, 61865044,\n        61941092, 62017140, 62359356, 62435404, 62511452, 62587500, 62663548,\n        63005764, 63081812, 63157860, 63233908, 63576096, 63652172, 63728220,\n        64070408, 64412624, 64488700, 64564748, 64640796, 64716844, 65059032,\n        65135108, 65211156, 65553344, 65629420, 65705468, 65781516, 65857564,\n        66199780, 66275828, 66618044, 67226400, 67302476, 67378524, 67720740,\n        67796788, 67872836, 67948884, 68024932, 68100980, 68177028, 68253076,\n        68329124, 68405172, 68747360, 69089576, 69165652, 69241700, 69317748,\n        69393796, 69735984, 69812060, 69888108, 69964156, 70040204, 70116252,\n        70458468, 70534516, 70610564, 70686612, 70762660, 70838708, 70914756,\n        70990804, 71066852, 71409068, 71485116, 71561164, 71637212, 71979428,\n        72321644, 72397692, 72473740, 72815956, 72892004, 72968052, 73576408,\n        73652484, 73994672, 74070748, 74146796, 74222844, 74298892, 74374940,\n        74450988, 74527036, 74603084, 74679132, 74755180, 74831228, 74907276,\n        74983324, 75059372, 75401588, 75477636, 75819852, 75895900, 75971948,\n        76047996, 76124044, 76200092, 76276140, 76352188, 76428236, 76504284,\n        76580332, 76656380, 76732428, 77074616, 77150692, 77226740, 77568928,\n        77911144, 78253360, 78329436, 78405484, 78481532, 78823748, 78899796,\n        78975844, 79051892, 79127940, 79203988, 79280036, 79356084, 79432132,\n        79508180, 79850396, 80192584, 80800968, 81143184, 81219260, 81295308,\n        81371356, 81447404, 81523452, 81865640, 81941716, 82283904, 82359980,\n        82702196, 82778244, 83120460, 83196508, 83272556, 83348604, 83424652,\n        83500700, 83576748, 83652796, 83728844, 84071060, 84147108, 84489296,\n        84565372, 84907588, 84983636, 85325852, 85668068, 85744116, 86086332,\n        86162380, 86504596, 86846812, 86922860, 86998908, 87341096, 87417172,\n        87493220, 87835408, 87911484, 88253700, 88595888, 88671964, 88748012,\n        88824060, 88900108, 89242296, 89318372, 89394420, 89470468, 89546516,\n        89888732, 90230920, 90306996, 9504\n      ],\n      \"reverseAvailable\": false,\n      \"internalShinglingEnabled\": false,\n      \"lastTimeStamp\": 1257,\n      \"rotationEnabled\": false,\n      \"dynamicResizingEnabled\": true,\n      \"currentStoreCapacity\": 512,\n      \"indexCapacity\": 2048\n    },\n    \"compactSamplerStates\": [\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5861195, -1.605452, -1.5982624, -1.6388674, -1.6310605, -1.6108241,\n          -1.6641783, -1.6470399, -1.662391, -1.66458, -1.6381367, -1.6604348,\n          -1.722461, -1.6904022, -1.6829594, -1.6586255, -1.8328778, -1.6663431,\n          -1.8191967, -1.7615604, -1.7148815, -1.7484502, -1.6753336,\n          -1.7183716, -1.688128, -1.7450564, -2.1241255, -1.755236, -1.716314,\n          -1.6855574, -1.8352082, -1.6686058, -1.7088614, -1.975995, -2.1331093,\n          -1.7441834, -1.7506566, -2.0483587, -2.2547653, -1.8327026,\n          -1.8121274, -1.7302328, -1.7633137, -1.8724989, -1.7519345,\n          -1.7819105, -1.9474361, -1.8973167, -1.8720074, -1.8968571,\n          -1.8915143, -1.8632329, -2.01883, -2.1812334, -2.350511, -1.8697002,\n          -1.8398154, -1.7831405, -1.7950289, -1.932773, -1.715132, -2.1989625,\n          -1.9009744, -1.7144396, -2.7446961, -1.7951992, -2.4029047,\n          -1.9952371, -2.3514855, -2.1877155, -2.3432517, -1.9697201, -2.670341,\n          -2.2128923, -1.972904, -2.251253, -2.5463715, -2.7085016, -2.3085272,\n          -1.9825817, -2.072967, -1.8706789, -1.8189924, -2.3386497, -2.0664465,\n          -3.33256, -2.0368118, -2.2420359, -2.6768208, -1.9206775, -2.5956569,\n          -3.1106741, -1.9571904, -1.9927236, -2.3674033, -1.93867, -2.7404048,\n          -2.3078642, -2.6232505, -2.0971937, -2.4987488, -2.0778205,\n          -2.7914362, -2.1456559, -2.5070734, -2.059889, -2.2235098, -2.9149077,\n          -2.3681848, -2.396997, -2.5068617, -2.5728097, -1.8709942, -2.1455302,\n          -1.9927701, -2.7714732, -2.5182734, -2.1565442, -1.8815223,\n          -2.0475667, -2.3105578, -2.0638764, -2.2973845, -2.3146381,\n          -2.6567106, -2.4039922, -2.6203403, -3.4856248, -5.081313, -3.220412,\n          -4.812422, -1.9047714, -3.6176436, -2.6161025, -3.0316484, -3.013686,\n          -3.5585165, -2.4007401, -2.6348982, -2.766112, -2.9318974, -5.7781105,\n          -3.107272, -2.4127505, -3.3237684, -3.803411, -4.290522, -2.4230351,\n          -3.0978012, -2.1676643, -1.9878286, -2.5379953, -2.336568, -2.6192954,\n          -6.017296, -2.8725154, -6.3045855, -3.6723707, -2.8172421, -2.1898556,\n          -2.3871565, -2.51744, -2.7863479, -2.5990326, -2.4863505, -2.1796257,\n          -1.8518976, -3.370079, -6.063179, -3.258116, -5.121849, -6.345082,\n          -3.6713245, -2.6670954, -3.3832657, -4.917982, -2.97081, -3.0248885,\n          -2.7550912, -2.8822653, -3.8920598, -2.8241985, -4.593358, -4.472623,\n          -3.2764409, -2.8249595, -2.1388478, -2.8180516, -3.9035788, -4.516092,\n          -2.3858354, -3.4240582, -1.9947791, -5.736862, -3.374941, -3.9094043,\n          -5.1914043, -4.203039, -2.6490705, -2.395437, -2.304084, -2.9620337,\n          -3.2512295, -3.9594069, -2.2474992, -2.946548, -4.6885777, -2.8522136,\n          -2.4729125, -2.7774913, -3.1150968, -3.0363977, -4.197292, -2.7846549,\n          -2.6680098, -4.2414436, -3.8353665, -4.186747, -5.6943493, -3.8587017,\n          -3.2020812, -2.5837743, -3.115129, -5.767825, -3.1498506, -4.139573,\n          -2.4234726, -2.7053547, -4.19672, -3.4625618, -2.1197746, -3.3355079,\n          -3.1506562, -2.8635252, -3.1254945, -2.259082, -3.146692, -1.9144876,\n          -4.487445, -4.18177, -2.8036504, -3.9046266, -3.1428185, -3.6163304,\n          -3.2034893, -2.6041164, -2.3078747, -3.577054, -2.5983744, -3.4380496,\n          -3.7947206, -2.8015137, -2.9079344, -5.612013, -3.2981741, -3.4895682\n        ],\n        \"pointIndex\": [\n          6, 1256, 256, 1580125493, 1009964779, 456934816, 1616333668,\n          1152569338, 108808059, 1358432074, 502384566, 1024448265, 1708452424,\n          1129878003, 1087712861, 1944483281, 1546063584, 68288588, 1273962710,\n          1220350266, 306704637, 1103944160, 1939752041, 1617814426, 1196814165,\n          313202232, 821422964, 1232975405, 511497819, 1879840308, 1710721726,\n          57714776, 426392352, 144334141, 919039593, 1301558173, 233030408,\n          984855939, 1016643541, 710104877, 1534287009, 1312146233, 1033950820,\n          1773005522, 1528561472, 976559285, 839263811, 515207941, 88259328,\n          999862042, 278485947, 337423713, 1396659120, 403778703, 1123720273,\n          1280314890, 1229704194, 1575998767, 1600038320, 453521515, 866331345,\n          412877840, 60755345, 14196166, 1493975856, 713593, 487101666,\n          500744337, 548561977, 552017438, 1948597105, 1060517442, 782972501,\n          653096203, 31812331, 1929607279, 1627551512, 1074301150, 19053580,\n          1291784360, 333312049, 1903734870, 966448202, 1883562658, 1350440220,\n          1448644102, 1489459036, 1700831799, 1250\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 3760827122461395656\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6600187, -1.6628351, -1.6632024, -1.6885065, -1.6844633, -1.673482,\n          -1.6683668, -1.703622, -1.6935254, -1.7039138, -1.7054497, -1.6792103,\n          -1.72292, -1.7178843, -1.7129887, -1.7370491, -1.9077396, -1.7476121,\n          -1.8414545, -1.9110444, -1.801772, -1.7981725, -1.7427167, -1.7117282,\n          -1.8734525, -1.758567, -1.7399035, -1.7192584, -1.7428987, -1.766276,\n          -1.8003932, -1.8911016, -1.8654419, -1.9670025, -1.9240105,\n          -1.7802987, -1.8447324, -1.967483, -1.8591907, -2.2108433, -1.9602256,\n          -1.8811072, -1.8307586, -1.8283223, -1.8461, -1.8134967, -1.9581954,\n          -1.8520461, -2.2066836, -1.8763704, -1.8831326, -1.8191657, -2.065619,\n          -1.7961874, -1.8889962, -1.7972072, -1.7482319, -1.8282884,\n          -1.9215875, -1.7745942, -1.934545, -1.9956965, -1.9398265, -2.018418,\n          -1.9581808, -1.8738496, -2.2210963, -2.4836748, -2.6438246,\n          -2.8173716, -2.0052333, -1.8060782, -1.8316134, -2.104083, -2.064001,\n          -2.0468729, -2.700277, -2.055099, -2.107124, -2.8776038, -4.302666,\n          -2.7787154, -2.3238614, -2.3610747, -2.1134987, -1.8448327, -2.189839,\n          -1.8508947, -2.1103697, -2.0987751, -1.9045995, -2.364662, -1.9768771,\n          -2.3049514, -2.373978, -2.013711, -2.3665648, -3.1223068, -2.4642084,\n          -2.2965417, -2.0021834, -2.0005822, -2.0901864, -2.5504415,\n          -1.8880817, -2.134783, -2.2326784, -1.942063, -2.0569196, -1.966368,\n          -1.9174302, -2.1160188, -2.2602866, -2.4733515, -2.23828, -1.9670134,\n          -1.8519195, -2.0879674, -1.9622679, -2.7823813, -2.1206293,\n          -2.3471045, -1.9580667, -3.0643625, -2.1881764, -2.4692035,\n          -2.1028755, -2.6679616, -2.442942, -2.224716, -2.0679758, -2.283862,\n          -1.9222883, -2.846171, -2.9377835, -2.5189304, -4.815355, -3.8114474,\n          -3.1087315, -3.1940696, -2.9174712, -2.1778202, -2.2337835, -1.843772,\n          -4.1040983, -2.920102, -4.2883368, -2.5598578, -2.9980335, -2.3457928,\n          -3.4544246, -2.3266225, -2.9991736, -4.0963154, -2.8809404, -2.822911,\n          -2.7196505, -3.5385666, -3.0671763, -4.2318783, -3.1986744,\n          -5.5718246, -6.046677, -3.9159167, -3.5294237, -2.4407678, -3.2104867,\n          -3.3342721, -3.589185, -4.893906, -2.4044142, -3.7951546, -2.498565,\n          -3.4903483, -3.1365232, -1.9214913, -2.196149, -2.4012504, -3.4272356,\n          -2.222576, -4.959666, -2.093998, -2.2162402, -2.6007705, -3.4578567,\n          -2.2444172, -3.3098018, -3.1957138, -3.1875644, -2.9233613,\n          -3.6141636, -4.2357774, -2.3627012, -3.0946198, -2.704567, -4.9396143,\n          -3.7426734, -3.4212067, -3.576373, -4.9750557, -2.3576858, -2.2333653,\n          -2.5606627, -3.0442796, -2.2103524, -4.6263657, -2.1121235,\n          -3.3557963, -2.6780543, -2.0564954, -4.5643115, -4.3294086, -3.104085,\n          -3.434541, -2.2409294, -2.0642347, -4.495369, -2.8710287, -3.2579474,\n          -2.7349076, -5.1881814, -2.744094, -2.8721514, -5.458582, -2.5830178,\n          -3.0423794, -2.320381, -3.1947503, -6.776149, -2.886313, -2.7594192,\n          -4.1165075, -2.2551475, -2.4149053, -2.3541772, -2.18199, -2.1128635,\n          -4.0967073, -2.4572492, -3.3653438, -5.049421, -2.8168807, -4.1294785,\n          -3.91239, -2.467887, -3.4557223, -3.03364, -4.037852, -3.395431,\n          -3.4063814, -3.2614224, -3.9141514, -3.2676873, -3.3500817\n        ],\n        \"pointIndex\": [\n          7, 1251, 254, 1525571388, 1252725678, 734617135, 667315246,\n          1657761947, 45159910, 1062461257, 366110, 641470608, 1064143194,\n          646559403, 1483898601, 859588358, 53598049, 855516880, 818783012,\n          519999467, 1645049211, 992924574, 220907159, 1841205239, 1342706414,\n          1224164520, 1198234193, 1736488113, 207560065, 345929946, 1488342345,\n          1395321750, 929518491, 853333241, 578273904, 1912236575, 828120018,\n          1770853159, 204242299, 1734994724, 751271264, 1516603618, 1916659496,\n          1225596508, 1873712255, 126275624, 1592779605, 1818674086, 1048352023,\n          273865496, 918513937, 1335984142, 1503695003, 1575075756, 131181001,\n          74515481, 1741445112, 638558354, 1206870861, 1359667607, 111902919,\n          400935972, 1669091394, 682883091, 1510216409, 937889914, 226937294,\n          628214313, 1082311274, 202884689, 509614016, 1324335895, 315225316,\n          574261824, 735561627, 1070805347, 1383128251, 1721308942, 729293802,\n          1235113953, 276931205, 1140715726, 12209130, 1532289813, 1218043062,\n          1428698071, 1424460215, 1549987\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -1114890292068028840\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.7043537, -1.7149372, -1.7166703, -1.7321012, -1.7449932,\n          -1.7227896, -1.7189212, -1.7613436, -1.7871635, -1.7572314,\n          -1.9100767, -1.8015554, -1.7829351, -1.745284, -1.7499497, -1.7995156,\n          -1.8531065, -1.8204864, -1.8014812, -1.7835534, -1.7953764,\n          -1.9277264, -2.0113738, -1.8776013, -1.8830832, -1.8430904,\n          -1.9128852, -1.7552594, -1.7954891, -1.8587546, -1.8159387,\n          -1.8180984, -2.0760508, -1.9116563, -1.8913791, -1.8495423,\n          -2.3682237, -1.85536, -1.8663213, -1.8894315, -1.8001022, -1.9510405,\n          -2.0332441, -2.300753, -1.9995476, -2.3203413, -2.022984, -2.4649758,\n          -1.9842328, -1.9025321, -2.0115871, -1.9769313, -1.9613022,\n          -2.3817139, -1.9380883, -1.9054426, -1.7931068, -1.8823017,\n          -2.0860248, -1.9546908, -1.8849564, -1.8492993, -2.1535194,\n          -1.8650378, -1.9573538, -2.232175, -2.7863712, -2.6460342, -2.3452928,\n          -2.1120064, -2.1006784, -2.188658, -1.8926909, -2.4513972, -2.63265,\n          -1.89713, -1.9279783, -2.0149734, -2.3423433, -1.9609698, -2.8362937,\n          -2.5565622, -2.5604684, -2.0057104, -2.191599, -2.223279, -3.202308,\n          -2.3193972, -2.7031338, -2.5877492, -2.016321, -2.7814374, -2.8090856,\n          -2.025436, -2.1793358, -2.4806178, -4.986853, -2.0887856, -1.992191,\n          -2.2180266, -2.1675045, -2.785531, -2.471143, -2.0163069, -2.1689491,\n          -2.176144, -2.142885, -2.805913, -2.7247162, -1.9667814, -3.225324,\n          -1.9381685, -2.148721, -1.8139349, -2.243579, -2.0078924, -1.9078774,\n          -2.228902, -2.2091773, -2.0057032, -2.161559, -2.1420436, -2.3776362,\n          -1.8866057, -1.9161162, -2.997531, -3.6017506, -1.8844341, -3.650594,\n          -2.0622656, -2.4955025, -6.297177, -2.4560344, -2.827355, -3.3375702,\n          -4.2595506, -3.6476152, -5.298893, -4.6368704, -2.5827596, -4.727267,\n          -2.9171727, -3.3863933, -2.998544, -3.1508188, -2.3191326, -3.1005151,\n          -2.5385091, -2.9375808, -5.3113475, -4.4594173, -2.7024257,\n          -1.9682808, -3.3948848, -5.8313107, -2.652302, -3.31744, -2.7928,\n          -2.4913106, -2.1816418, -3.0713098, -2.9353008, -3.9156651, -3.589956,\n          -3.0757117, -3.907892, -4.369662, -3.1807384, -5.566969, -2.5556684,\n          -3.356561, -4.061002, -2.6578262, -3.358592, -4.061826, -3.035102,\n          -2.3694582, -6.079366, -4.249974, -2.6013722, -2.5957072, -2.3388069,\n          -2.0346706, -3.5047348, -4.197596, -4.9675274, -3.9339316, -4.9094734,\n          -2.1441245, -3.6834774, -2.2312505, -3.477304, -5.026757, -7.4546056,\n          -5.137707, -2.6693206, -2.614613, -3.6004379, -4.7180634, -2.28078,\n          -2.42328, -2.7575917, -2.2931254, -3.3485951, -3.7159684, -3.7249277,\n          -4.1952395, -2.490811, -2.0773237, -2.708938, -2.8349159, -2.1985443,\n          -2.5952754, -3.3955815, -2.7381892, -2.8703403, -3.5668871,\n          -2.8732376, -3.704257, -1.9876751, -5.8961663, -3.6108832, -5.704544,\n          -2.116727, -2.1399906, -3.2782805, -3.2339072, -4.4758325, -2.4205503,\n          -3.610604, -3.8915148, -4.0522075, -2.0418868, -7.400977, -4.301539,\n          -2.3623009, -3.504025, -2.3166306, -3.0060487, -3.201693, -2.291483,\n          -3.7904406, -3.120234, -2.4891293, -2.1493428, -2.5106375, -3.7456133,\n          -2.1408231, -3.118479, -2.2031925, -2.4266295, -3.3329139, -4.2434278,\n          -4.0106225, -6.332322, -2.2949204\n        ],\n        \"pointIndex\": [\n          1, 1256, 256, 1800496310, 189774369, 1190351877, 196117206,\n          1209024955, 119934254, 1060414962, 477995845, 1078535456, 1080003549,\n          269955583, 484283630, 137292677, 1183501254, 440274210, 1267133467,\n          1592147376, 817636891, 1473156248, 1463437761, 1788508518, 1194396926,\n          1773629862, 1137494071, 989375137, 1445520569, 726923453, 985493177,\n          857505566, 1568025519, 459218697, 474222116, 510601635, 943791427,\n          1120837853, 613003481, 49457990, 1414445346, 927181008, 1087892123,\n          1239384044, 1588103553, 1031660723, 105739915, 1975683509, 280236125,\n          1693635831, 25808913, 313240311, 250470390, 337217296, 1350402607,\n          829898214, 84780018, 1321833974, 140438629, 1625445113, 1235862771,\n          31884402, 731165007, 1124183235, 306538204, 708956841, 1789299587,\n          1665028448, 1790693330, 1617910472, 543099424, 1793169650, 584141920,\n          1896596068, 1551822291, 616405006, 700958677, 1210142631, 122109340,\n          1978669206, 507042740, 1070449643, 1067804100, 1832375323, 1185628390,\n          1315264144, 1390927903, 1837634414, 1255\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -8998098257901219462\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4847924, -1.50701, -1.4874974, -1.509434, -1.5804005, -1.517309,\n          -1.4889102, -1.5924674, -1.5380169, -1.5827285, -1.5945977,\n          -1.6033465, -1.522429, -1.5085082, -1.5259198, -1.6608759, -1.7157124,\n          -1.6005316, -1.7098669, -1.6417782, -1.7827258, -1.6653897, -1.761043,\n          -1.6149433, -1.8048114, -1.6053822, -1.5297601, -1.5150692,\n          -1.5335847, -1.5741924, -1.5903218, -1.6950215, -1.6855102,\n          -1.7289294, -1.7847432, -1.7010726, -1.7668756, -1.9151553,\n          -1.9073898, -1.7360523, -1.8851597, -1.8604245, -1.820512, -1.7844313,\n          -1.6933676, -1.9365067, -1.8104782, -1.8791007, -1.6876923,\n          -1.8407286, -1.87232, -1.613756, -1.66301, -1.5603495, -1.7004238,\n          -1.6415001, -1.721441, -2.1622522, -1.5830246, -1.7812307, -1.5904065,\n          -1.61179, -1.7648445, -2.074227, -2.0032582, -1.6958426, -1.7092977,\n          -2.421047, -1.9955932, -2.127096, -2.2460995, -2.0733979, -2.1900704,\n          -2.218347, -1.9587811, -2.30464, -2.3489394, -2.2056513, -2.4812882,\n          -2.078726, -2.0368192, -2.1401744, -1.9548048, -2.2267509, -2.3673909,\n          -1.8815244, -2.0716653, -1.8121344, -1.9581162, -1.7133777,\n          -1.8833144, -2.7113278, -2.1894011, -2.0413165, -2.2483463,\n          -2.0375435, -2.023743, -1.7335436, -1.8081505, -1.9442836, -1.9203551,\n          -2.0609462, -1.9958264, -1.8094562, -1.7540293, -1.7631687,\n          -1.9121307, -1.7658973, -1.6152104, -2.3850179, -1.7996753,\n          -1.9506682, -2.1833467, -1.836597, -2.4442554, -2.2432363, -2.3038113,\n          -1.69391, -2.0176008, -1.9852964, -2.22754, -1.5936773, -2.037487,\n          -2.4741428, -1.724286, -2.2496772, -2.1840785, -2.8905258, -2.5314753,\n          -3.2166378, -3.863982, -1.7463908, -2.2845786, -2.1900523, -1.9322034,\n          -2.7665477, -5.5880685, -5.0508027, -2.4426787, -2.914124, -3.4584386,\n          -2.4449632, -2.3848393, -2.6335444, -4.1916275, -2.6579905, -2.668138,\n          -4.2272997, -3.0954368, -4.5089917, -2.0358038, -7.6616907,\n          -3.7811038, -2.4330842, -2.4255292, -3.343035, -4.25475, -2.5636013,\n          -3.1577828, -5.828969, -2.755335, -2.1080794, -2.4202938, -3.3008184,\n          -3.6514235, -2.5095358, -3.8863716, -2.957376, -3.5885198, -2.5151875,\n          -2.5355968, -2.064986, -2.024324, -3.3344228, -2.124326, -2.6870174,\n          -1.8441193, -3.208695, -2.3718288, -1.7876742, -5.6086903, -2.0368898,\n          -2.1928809, -4.623443, -3.5671632, -2.7656476, -2.826614, -2.6780763,\n          -2.2869632, -2.5077014, -2.7039807, -2.983468, -2.171145, -2.48659,\n          -3.0943327, -1.8994596, -1.8423891, -2.0048766, -2.3960173,\n          -2.6402557, -2.8885286, -2.8480966, -2.2679713, -4.381718, -2.0998023,\n          -2.8364131, -3.0806136, -2.1412249, -1.8483372, -2.3394818,\n          -3.4883568, -2.0058262, -2.246459, -3.0723298, -2.3015406, -2.499738,\n          -2.8829854, -2.6171253, -2.0047848, -2.90026, -6.3821893, -1.9659716,\n          -2.7831266, -2.6806984, -5.0689373, -2.3406537, -2.19754, -2.8538144,\n          -2.9117985, -3.5243957, -2.9115815, -3.5294943, -3.2356849, -3.04845,\n          -2.9486272, -3.6131105, -1.977847, -6.3758364, -2.1673677, -2.0754735,\n          -2.849869, -3.628077, -2.2775931, -2.9878826, -1.9839334, -2.5885856,\n          -2.2625816, -4.0681295, -3.3519099, -2.0539382, -3.1593044, -4.923024,\n          -2.3891122, -4.795825, -4.269026\n        ],\n        \"pointIndex\": [\n          0, 1255, 255, 710517003, 550006573, 1460889948, 278313358, 1928382954,\n          1457868774, 375750180, 659759259, 1245010209, 1393816621, 1636103232,\n          4955704, 320083063, 7681779, 952566350, 47071417, 1922726604,\n          703184345, 1166656767, 1673490262, 1537542088, 267954450, 123703178,\n          1580307934, 1819137992, 192979481, 1164935904, 1382586570, 1219881396,\n          516977693, 1870190553, 1809325884, 584116891, 147195552, 669041452,\n          837313472, 731650393, 471068713, 1265016102, 1211335961, 1217334750,\n          1427413919, 1822094254, 769160, 265112274, 969608639, 627954958,\n          1097449659, 974211871, 1811479358, 1339015972, 1232402352, 30077149,\n          1551276114, 1814770485, 1115304358, 762235225, 591355174, 627715801,\n          406647944, 1666268896, 1320446352, 474012195, 4550334, 1336588673,\n          916815690, 1115793964, 980593224, 637951823, 1489716944, 414502382,\n          813421999, 1643886547, 1470838323, 1925194630, 1453541655, 975575488,\n          1013479497, 923605226, 390447053, 942631434, 1204611503, 1686576751,\n          1845234451, 1981264463\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 38561163469837675\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4610527, -1.48245, -1.4639573, -1.5015463, -1.5454192, -1.4826655,\n          -1.4737648, -1.6106573, -1.525192, -1.5471185, -1.6125231, -1.505095,\n          -1.5421757, -1.5058594, -1.5234768, -1.6741594, -1.9094331,\n          -1.7995727, -1.6336598, -1.8679664, -1.595384, -1.6628966, -1.6441401,\n          -1.7069467, -1.6833416, -1.9401432, -1.588832, -1.5424707, -1.7687211,\n          -1.5431672, -1.5274942, -1.8162181, -1.7659051, -1.9308692,\n          -1.9288825, -1.9051023, -1.9985346, -1.6765146, -1.6832094,\n          -1.8760269, -1.9810698, -1.6409447, -1.6005028, -1.9358605,\n          -2.1724792, -1.6857752, -1.6740047, -1.8685497, -1.9233545,\n          -1.8496706, -1.6932981, -2.1886141, -1.9640974, -1.6953177,\n          -2.0331624, -1.9634979, -1.6556059, -1.8025886, -1.9032472,\n          -1.5521348, -1.7520154, -1.5808368, -1.5562763, -2.6935897,\n          -2.6085107, -2.0786266, -1.9147294, -1.9673487, -2.1261866,\n          -2.8225563, -3.1242068, -2.2550943, -2.5607169, -2.4968712,\n          -2.1055422, -2.3150427, -1.7537509, -1.737771, -2.0998735, -2.077217,\n          -2.2581348, -2.054537, -2.0298986, -1.9485894, -1.792361, -1.7761369,\n          -2.2579038, -2.0488272, -2.5935009, -2.393297, -2.56187, -3.085866,\n          -1.7331612, -1.8431895, -1.9609962, -2.9646697, -2.5008316,\n          -2.0777664, -1.9832842, -1.9663002, -2.3825483, -1.9030224,\n          -1.8568329, -2.2311988, -2.5922384, -2.0080917, -2.460374, -2.4697714,\n          -1.7773494, -2.2537482, -2.8810356, -2.011637, -2.4875133, -2.241009,\n          -2.056611, -1.8162489, -1.9599434, -2.1026204, -1.9309919, -1.6801635,\n          -2.6104155, -1.9660062, -1.8532897, -1.9000486, -2.7101104,\n          -1.6306072, -1.6173607, -3.9097295, -3.8940182, -2.6971962,\n          -3.3223963, -3.4434423, -3.4364245, -2.1258404, -2.3368773, -2.441312,\n          -2.1282659, -3.1267962, -3.5049665, -3.6809883, -3.9945335,\n          -3.6619272, -5.4106927, -2.2863889, -2.4651685, -2.6824324, -2.964626,\n          -3.6960573, -3.0068357, -2.2048318, -4.7914104, -2.3593392,\n          -2.8784318, -6.197217, -3.3857186, -3.8565915, -4.5197845, -2.6455338,\n          -2.2545123, -2.4242928, -3.8621643, -2.764206, -2.6078105, -2.3123567,\n          -2.1793225, -2.3973627, -3.571396, -2.1274083, -3.0041566, -1.8787048,\n          -1.8227599, -4.072172, -2.113721, -3.4936752, -3.411475, -4.599114,\n          -2.5682142, -2.9804878, -4.6184344, -5.995202, -2.5762093, -3.5060537,\n          -4.381844, -3.2079947, -5.117505, -3.109518, -1.779692, -2.0144317,\n          -3.0996954, -4.316557, -2.463515, -3.9305122, -3.3319016, -2.8997607,\n          -3.4705358, -2.4835873, -5.850858, -1.9866576, -3.528817, -3.0366244,\n          -3.3330505, -2.718776, -2.718735, -1.9553635, -3.5689576, -2.9435978,\n          -4.1096764, -2.6101785, -4.1002913, -2.6639524, -2.9546604,\n          -2.4940739, -2.1748304, -4.535578, -4.664909, -2.7323165, -4.75438,\n          -3.2862427, -3.2614667, -2.484071, -3.7774487, -3.1729262, -7.547099,\n          -2.623589, -2.038379, -3.7038631, -2.7044697, -3.5491626, -4.805202,\n          -2.4135547, -2.1463873, -2.3763375, -2.5803976, -2.7008436,\n          -5.6769996, -4.0309668, -3.6546214, -4.3860655, -3.794969, -3.0905387,\n          -3.366902, -3.4968405, -5.5036583, -2.1061406, -2.2766025, -2.2931263,\n          -2.0818744, -3.2892034, -1.9717209, -4.085529, -3.8437142, -3.004554,\n          -1.7959849, -5.085871, -1.8072813\n        ],\n        \"pointIndex\": [\n          3, 1247, 255, 1199942421, 562999165, 1070673656, 1648005124,\n          1929122355, 1827691693, 1321528356, 1421296621, 1292041204,\n          1667219473, 600183277, 274451700, 305722385, 1287568165, 524209408,\n          1579828185, 1488493102, 1440127259, 1187010258, 1115156372,\n          1611712180, 1609013166, 435872960, 963643107, 1092400314, 260870723,\n          941609578, 330439147, 1452373916, 369867335, 1413235891, 453582315,\n          1807834238, 1883437356, 560520063, 1380675025, 731172719, 822295120,\n          1176500626, 1030630770, 884990575, 1562333977, 704088563, 35590968,\n          1814568983, 825249153, 1832027883, 323386335, 842455318, 1335947106,\n          1529407566, 300958270, 979530004, 319923248, 1589275081, 758569902,\n          351543768, 144906887, 375687507, 1671274399, 1111674965, 471064683,\n          451480909, 1673206955, 1014142190, 1607091130, 1352018168, 516567802,\n          746070329, 1688411892, 1085929213, 630386208, 719834486, 202105144,\n          751110218, 793271136, 1220769635, 814291763, 213857926, 1774853900,\n          1546837229, 1143631568, 1537069672, 1597334650, 1918918486\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -6370827395103410126\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5445218, -1.5465407, -1.5504966, -1.547764, -1.560619, -1.5533254,\n          -1.5759339, -1.5785238, -1.6054047, -1.5841495, -1.576058, -1.5569911,\n          -1.5982715, -1.612058, -1.5798635, -1.5861368, -1.6388417, -1.6204617,\n          -1.6824679, -1.6229888, -1.6407508, -1.6453023, -1.5819863, -1.566079,\n          -1.5928731, -1.6855135, -1.783996, -1.7316606, -1.6485487, -1.7861702,\n          -1.6092962, -1.6261423, -2.1053495, -1.6423863, -1.7105014,\n          -1.7465937, -1.6570605, -1.8360097, -1.9010681, -1.6361474, -1.77335,\n          -1.6803484, -2.0501294, -1.7019562, -1.7934632, -2.0073125, -1.768919,\n          -1.6914185, -1.7652011, -1.66249, -1.6217879, -1.8140017, -1.7681481,\n          -1.9364661, -1.8415885, -1.7473153, -1.9307748, -1.6952391,\n          -1.7963362, -2.0055091, -1.8705586, -1.8039247, -1.6238333,\n          -1.8887676, -2.1948328, -2.1852083, -2.309651, -1.8040459, -1.8527874,\n          -1.8589574, -1.801687, -2.8378289, -3.095742, -2.419655, -1.6732262,\n          -1.8548335, -2.3534513, -2.5250976, -2.485404, -1.7551335, -2.627682,\n          -1.804988, -1.9720448, -1.7464341, -2.1275232, -2.0847194, -2.2849941,\n          -1.864146, -1.8242195, -1.986032, -2.1918647, -2.9306834, -3.3716435,\n          -1.7709521, -2.3122852, -2.3475082, -2.1665761, -2.0726194,\n          -2.2897077, -1.8331085, -1.9517444, -1.7562053, -1.8337712,\n          -1.8928871, -2.0578384, -1.8316461, -1.8601738, -2.2226956,\n          -2.4449635, -2.0460827, -1.9298851, -1.9729121, -1.8871083,\n          -1.9636409, -2.0409806, -1.7165915, -2.568095, -2.0460265, -2.384036,\n          -3.0289366, -2.0829391, -3.1761105, -2.3723962, -2.9863064,\n          -1.8120085, -1.9704975, -1.7034762, -1.9443214, -3.274785, -3.090201,\n          -2.448313, -3.8890853, -2.2987845, -2.445577, -3.3676233, -2.4992247,\n          -1.8911582, -7.054657, -3.7401574, -2.9138978, -2.9643297, -3.8779972,\n          -1.8223864, -3.0082738, -2.850572, -3.2850523, -4.746652, -4.583561,\n          -2.705282, -3.0956216, -3.2120516, -4.7790074, -2.0452852, -2.407949,\n          -3.5124986, -3.506919, -2.8316567, -2.4972572, -4.2897453, -3.2318325,\n          -2.0848386, -2.7511337, -2.825714, -2.2840638, -2.1273985, -4.573858,\n          -3.5196013, -2.3099709, -1.9768105, -2.6367333, -3.5040345,\n          -2.2665486, -2.7077358, -2.5872097, -3.828445, -3.6953075, -4.895928,\n          -2.4355423, -2.0899951, -2.0495608, -2.7160594, -3.9311116,\n          -3.3196929, -4.747052, -3.7480927, -4.6403666, -3.5007823, -1.8442141,\n          -2.6454961, -4.8937225, -5.3806396, -4.3192015, -2.6148129, -2.677922,\n          -2.215932, -3.3537917, -2.6788485, -2.9680216, -4.078896, -1.8529885,\n          -2.0642333, -2.340028, -1.9987302, -3.3039026, -2.392308, -2.1998007,\n          -3.9700627, -2.3132365, -1.9336995, -3.8793678, -3.0573602,\n          -3.6135347, -4.379453, -1.9174479, -3.2993047, -3.3862681, -2.5543268,\n          -2.916683, -6.335731, -2.5580328, -2.3565786, -2.3169065, -2.1767182,\n          -2.2160468, -2.853828, -4.616713, -3.1134565, -3.1778045, -2.9624515,\n          -2.313375, -2.166796, -1.996399, -3.6453948, -4.732854, -3.3619401,\n          -2.051775, -2.1180842, -2.8457713, -2.5686061, -3.9750416, -3.261386,\n          -3.2534878, -2.3567991, -3.843067, -3.2058804, -3.351548, -4.033013,\n          -3.2406118, -4.746862, -1.873855, -2.789624, -2.095337, -2.0180707,\n          -5.4625974, -3.1724863, -2.760957\n        ],\n        \"pointIndex\": [\n          2, 1238, 256, 858419605, 469297476, 1490599725, 1005017321,\n          1605967623, 1016732321, 374594265, 466246996, 838168704, 252609730,\n          1175134998, 25339492, 23200487, 1619181267, 805357786, 461366673,\n          633311396, 1065483570, 787080239, 1444081752, 1298668912, 244844311,\n          1410626199, 291709064, 1305596026, 763341988, 342862589, 629194162,\n          1611349848, 414202777, 934582831, 1219546, 168058702, 1328702083,\n          1554074388, 1766367113, 918632197, 1621630829, 886727885, 1063006415,\n          1128582551, 231273008, 1488830882, 681790444, 1700116165, 285796646,\n          1137975848, 405847511, 8924900, 1405645472, 1738960049, 679238652,\n          64002872, 666276996, 734704128, 371354107, 1484993525, 508819804,\n          1850027246, 792953121, 1580734430, 438885431, 95284342, 408412746,\n          1577909765, 861150228, 749262352, 961671547, 1291606525, 1279110026,\n          684142540, 187173978, 1579637657, 227902840, 1686808732, 1616456420,\n          872611235, 866293550, 1390806705, 1131570308, 1076710441, 1656938670,\n          1218981380, 1514339915, 1813021837, 1236\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 8772489341801893028\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.0728947, -1.5259328, -1.5271281, -1.5343946, -1.560383, -1.5384734,\n          -1.5312777, -1.5520419, -1.6264254, -1.6279063, -1.5762386, -1.54961,\n          -1.5745214, -1.670223, -1.5665997, -1.6381234, -1.7441238, -1.6393917,\n          -1.6364237, -1.6332041, -1.6408373, -1.6322473, -1.7203869,\n          -1.7163656, -1.7157981, -1.6053427, -1.6213709, -1.6732472,\n          -1.8829713, -1.5831262, -1.5804985, -1.8121722, -1.9040909, -1.855875,\n          -1.7723962, -1.756655, -1.7166331, -1.7561811, -1.7495925, -1.7460877,\n          -1.8043892, -2.0242321, -1.6566799, -1.8006837, -1.7637908,\n          -1.7983268, -1.8492807, -1.938855, -1.8046734, -1.9368708, -1.7219111,\n          -1.647257, -1.8743986, -1.9102929, -1.6376833, -1.8193051, -1.7348002,\n          -1.9335852, -1.8950299, -1.6522033, -1.8217105, -1.8316988, -1.583854,\n          -1.8243084, -1.9072149, -2.0794513, -1.9411082, -2.0080197,\n          -2.3706264, -3.8453596, -1.9341033, -1.7909273, -2.22557, -1.8950368,\n          -1.8222072, -2.0923498, -1.9481359, -1.9054334, -1.8847692,\n          -1.9231081, -1.8395206, -1.816015, -2.1411002, -2.6224537, -2.1703074,\n          -1.8760118, -1.7247542, -2.1039886, -1.941013, -2.1875296, -2.6584635,\n          -2.4773474, -2.0004907, -2.427941, -1.8578634, -1.993672, -1.9411371,\n          -2.4460554, -2.4973977, -2.587589, -2.143168, -1.9661105, -1.8435761,\n          -1.7624182, -1.8321925, -2.2877474, -1.8888198, -1.9872756,\n          -2.3712525, -2.053333, -2.304413, -1.8495423, -1.8473682, -1.7911122,\n          -2.255487, -1.9753639, -2.8610258, -2.6455243, -1.9160482, -2.0074358,\n          -1.7680875, -2.0996945, -2.1236439, -3.1808512, -2.3633845,\n          -2.0920115, -1.5991555, -2.0979178, -3.580935, -3.3564754, -2.1720605,\n          -3.337978, -2.8830314, -2.6280017, -2.653904, -2.2192788, -2.0428448,\n          -4.37789, -3.6127176, -3.9183333, -4.027198, -2.2029734, -2.4479206,\n          -4.0643425, -2.4019227, -2.8332531, -3.725749, -2.8588874, -2.51254,\n          -2.4987812, -3.2159305, -5.0033402, -3.5965152, -2.3439965, -5.258561,\n          -2.116983, -2.1509147, -2.1029966, -2.2929664, -2.0077374, -2.858487,\n          -2.585806, -5.1803718, -2.1305256, -2.8780572, -3.130301, -3.3613229,\n          -3.407688, -2.8195002, -3.5945868, -2.1957138, -2.7049334, -3.6004672,\n          -3.220083, -5.7187304, -2.3223257, -4.639869, -2.3436725, -2.6810954,\n          -3.5382938, -2.8885171, -5.0835733, -3.452059, -2.9016135, -4.5785546,\n          -2.4024642, -2.6227605, -5.620513, -2.71551, -1.8989049, -2.6357558,\n          -2.3210332, -2.2360513, -2.0786529, -2.1857975, -4.588789, -2.746207,\n          -2.5993383, -3.357406, -3.7500455, -4.308708, -6.10445, -2.2139173,\n          -3.3152285, -3.428218, -2.914128, -2.5067766, -3.2157886, -1.8737192,\n          -2.5115092, -1.9986128, -2.4794385, -4.047551, -2.1282003, -2.304178,\n          -2.4129395, -3.613385, -3.106013, -2.9582293, -4.256424, -2.186439,\n          -2.4176476, -2.6717122, -2.3630428, -2.2466805, -2.6661818,\n          -1.8820276, -2.5826948, -2.4682412, -2.6981506, -3.0258257,\n          -2.1245618, -3.3690262, -3.0876715, -4.162996, -4.5142965, -3.7879217,\n          -2.9665167, -2.791171, -3.5369656, -2.3917305, -2.2554045, -2.6315799,\n          -2.3195806, -2.5932949, -2.5324724, -2.643968, -3.478174, -3.8480647,\n          -4.838305, -4.7640142, -5.7706485, -3.7859595, -1.890822, -3.0025935,\n          -6.5981126\n        ],\n        \"pointIndex\": [\n          0, 1255, 256, 451332295, 476844765, 353449948, 560430769, 524584,\n          316601115, 394867709, 1089903807, 681991040, 1250248753, 1465322267,\n          1221666542, 1459131120, 1521923180, 158727042, 493217101, 1954551683,\n          1608806476, 816219709, 739309430, 1924093989, 586175080, 1083141184,\n          1514287099, 383171479, 335468273, 353301328, 1087214337, 1155612821,\n          422631183, 1753804395, 473325151, 699257600, 555443726, 1751772757,\n          869201381, 787347922, 1146538503, 1656581780, 596168698, 1687312381,\n          1815453422, 101975884, 95255200, 954426855, 809120055, 1809655805,\n          119992139, 1700763601, 559408775, 912305271, 932752032, 1105958944,\n          649154500, 717035920, 98100460, 310978596, 147069943, 1203687429,\n          1553807374, 1786111910, 1834199016, 1235945729, 482756131, 500599205,\n          516810663, 936616447, 1778816992, 587427863, 185535326, 1799358676,\n          1004637632, 200082407, 746739784, 1457045496, 804396910, 840863913,\n          858856371, 916721559, 1447419325, 1154031319, 1679446170, 1585138145,\n          1452617747, 1976564591, 153\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 5684369017740460239\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4911071, -1.4948497, -1.4967381, -1.5310053, -1.5021715,\n          -1.4987854, -1.5207069, -1.5417862, -1.6222419, -1.5158104, -1.575677,\n          -1.5464264, -1.5918624, -1.6376232, -1.5584885, -1.5438135,\n          -1.7702165, -1.7226433, -1.669732, -1.6314669, -1.5866266, -1.6306599,\n          -1.5853622, -1.6076901, -1.7002006, -1.7073035, -1.6342174, -1.733735,\n          -1.7246922, -1.5689619, -1.6232954, -2.2763486, -1.5731705,\n          -1.7965978, -1.9153309, -2.4673932, -1.9342862, -1.6788498,\n          -1.8781089, -1.7806269, -1.9041122, -1.6793627, -1.6082995,\n          -1.6951773, -1.6438974, -1.6304628, -1.8834292, -1.7857707,\n          -1.8056086, -1.7213256, -1.8343105, -1.7301867, -1.9566417,\n          -2.0860317, -1.8436301, -1.7512348, -2.2028763, -1.982251, -1.7824874,\n          -1.5767955, -2.0471509, -1.7501755, -1.810705, -2.4892054, -2.5443416,\n          -1.584471, -1.8527033, -2.0867581, -1.9452515, -2.5781212, -2.4684212,\n          -3.2122548, -2.4690588, -1.9902788, -2.1398559, -2.1078768,\n          -3.1068273, -2.477722, -2.173951, -1.9940237, -2.3296304, -2.3379073,\n          -1.9337028, -1.7934093, -1.7880102, -2.239576, -1.7530417, -1.8559335,\n          -1.7293302, -1.6868128, -1.8564905, -1.7350532, -1.7223151,\n          -2.2533605, -2.07792, -1.903341, -1.8106148, -1.9080479, -1.8621694,\n          -1.7881052, -1.7325954, -2.2205815, -2.3311322, -2.471289, -2.7758646,\n          -2.4677973, -2.3256052, -2.9003494, -2.2866623, -2.0297635,\n          -1.9258659, -1.8620938, -2.0275686, -2.7264936, -2.297328, -2.0284185,\n          -2.2965612, -2.2951462, -2.0015132, -2.1415493, -2.186847, -2.1374779,\n          -2.862179, -2.3183577, -1.7961106, -2.3331249, -1.8480538, -2.5384324,\n          -5.1544867, -3.026151, -5.105571, -3.2670376, -3.3749888, -3.138002,\n          -3.2801085, -2.3727238, -2.2907364, -3.2307806, -3.6300344, -2.747922,\n          -2.8748686, -3.0952058, -2.580954, -4.8073583, -3.73439, -2.5728266,\n          -3.479313, -2.0984015, -4.709388, -2.9657722, -2.2065113, -2.3346984,\n          -2.6162956, -3.309454, -3.9598234, -2.563438, -5.2532086, -3.0477846,\n          -2.2498612, -3.195835, -2.1837943, -2.565356, -2.3919415, -4.014871,\n          -2.4170105, -3.0006533, -2.1708453, -2.0211153, -4.1956906,\n          -3.5515172, -4.3578415, -2.9570305, -4.4781837, -2.6243632,\n          -2.7781944, -2.0778558, -2.5789433, -2.71087, -3.7467637, -2.1903725,\n          -2.521412, -2.07979, -1.9037058, -2.5047266, -2.299304, -2.8141892,\n          -2.1038964, -2.707932, -3.5448616, -3.4673579, -2.5393233, -2.2156534,\n          -3.419818, -1.8480599, -2.6456294, -1.9716583, -2.0067434, -2.6639864,\n          -3.4829478, -2.005168, -2.0726898, -3.549752, -2.0862799, -2.6057477,\n          -3.413592, -4.0227575, -2.3710632, -2.8646724, -5.5461245, -4.6805344,\n          -5.1303153, -2.591475, -2.4750278, -3.6639812, -2.8366256, -4.1023946,\n          -2.9231687, -2.5331335, -2.953019, -2.6595, -2.183007, -2.2965136,\n          -2.019997, -4.1200275, -3.0445871, -2.3739119, -3.5416205, -6.0304656,\n          -2.7609074, -3.1862056, -3.6426349, -2.0630581, -2.0421371, -2.590611,\n          -3.177964, -4.961881, -4.281495, -2.7978115, -7.188777, -2.5101128,\n          -2.6189158, -2.3621762, -4.326628, -3.0647418, -2.8780253, -4.0096097,\n          -5.4045615, -4.073947, -2.9886022, -2.6005065, -2.0170605, -4.1553526,\n          -2.8099904, -4.852867, -3.1955338\n        ],\n        \"pointIndex\": [\n          0, 1256, 255, 1125197114, 82857922, 1461216781, 1048294166,\n          1233241938, 1072651067, 1396385587, 379592286, 817359583, 1161848244,\n          1080687724, 910355443, 1066392929, 947891366, 472461480, 509175965,\n          1695562823, 628293117, 205968688, 1283772976, 1904959238, 1848038017,\n          1962492536, 926798269, 417231182, 1726165423, 1306473602, 145177136,\n          1697987296, 465815333, 594818656, 1629364985, 1574606, 950373332,\n          1814719573, 629547692, 841675527, 1435863509, 798550530, 1121046315,\n          1580042310, 1723551849, 267455636, 1428579383, 464360136, 1756191336,\n          303660858, 1753359406, 1561260355, 1049502988, 325953841, 337209313,\n          356530729, 138208649, 381091212, 413443146, 1974615274, 869727989,\n          1209973784, 71686395, 1348683888, 739318597, 501851431, 1803830512,\n          102456874, 1575876331, 1069757925, 1894928579, 1064803339, 1116966482,\n          1918248381, 619871104, 1534741660, 687015658, 1495284408, 1537003602,\n          770132244, 745400391, 858751993, 546446138, 1538264577, 1184394218,\n          1581041753, 1498943238, 1986067271\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -5961011759105000572\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6237711, -1.628153, -1.6416693, -1.6389134, -1.6589046, -1.6476755,\n          -1.696056, -1.696213, -1.6725714, -1.6640321, -1.6763895, -1.6636418,\n          -1.6921222, -1.6966518, -1.7628441, -1.7269291, -1.6983651,\n          -1.6831176, -1.6787539, -1.6936954, -1.7841918, -1.74626, -1.7725044,\n          -1.6873757, -1.7203944, -1.8625002, -1.8564576, -1.73292, -1.8435144,\n          -1.855701, -1.7792147, -1.8557831, -1.8105886, -1.9192698, -1.7947007,\n          -2.216442, -1.8588217, -2.415632, -1.8353658, -1.7428002, -1.8348613,\n          -2.0291662, -1.9624325, -2.0259523, -1.9174504, -1.986738, -2.2747478,\n          -1.8286602, -1.8884032, -1.9634504, -1.7983656, -1.8889049,\n          -1.9644171, -1.9637636, -2.1408167, -1.7981789, -2.1016233, -2.00415,\n          -2.0105994, -2.1065261, -2.141287, -1.9218062, -1.9633807, -1.8852696,\n          -2.7141705, -1.8775206, -2.0123956, -2.3604717, -2.3413675, -2.055255,\n          -2.4611597, -2.3919573, -2.874939, -1.9921714, -1.8891987, -2.7240236,\n          -2.5091233, -1.8974257, -2.3336713, -2.04924, -1.8874867, -2.400239,\n          -2.0225604, -3.2573667, -2.5920815, -2.237946, -2.6125925, -2.4049811,\n          -2.571699, -2.3181803, -3.2518358, -2.529661, -2.347575, -2.3412488,\n          -2.302599, -1.8972478, -2.1023102, -2.2392087, -3.5990577, -2.2514434,\n          -1.9948503, -1.9095303, -1.8573929, -2.6482105, -1.9383067, -2.068555,\n          -2.6944559, -2.0890465, -2.3229322, -2.2482884, -2.4190507,\n          -1.9795253, -2.1934295, -3.05396, -2.646185, -2.3653991, -3.164765,\n          -3.1067479, -2.0248604, -2.127616, -2.3515325, -2.2254272, -2.1740985,\n          -2.2719512, -2.085722, -2.4028237, -2.0253525, -1.9308548, -2.6048572,\n          -4.0182304, -4.4086246, -1.9217116, -3.025395, -3.6621032, -2.9816504,\n          -4.321547, -2.4466884, -3.1595805, -2.6285114, -2.931134, -2.7492023,\n          -3.6441271, -3.034636, -2.4775612, -2.9013302, -4.2080584, -4.958086,\n          -2.3812592, -2.7652755, -3.1284442, -4.9177685, -4.059228, -3.0657902,\n          -3.2866814, -2.5140004, -3.395545, -3.394022, -4.1792674, -5.2798567,\n          -2.3089683, -2.8747897, -3.5250952, -4.1287413, -2.963254, -3.250532,\n          -2.1863542, -2.416417, -3.6281595, -3.6139681, -4.0049005, -3.2939267,\n          -2.3069274, -2.6169567, -3.554352, -3.1421096, -4.0918937, -2.8538375,\n          -2.9486341, -4.11142, -2.662579, -3.101394, -4.9599957, -3.5882397,\n          -2.598755, -5.9695992, -2.760794, -2.4963899, -2.5378249, -5.551288,\n          -2.7500703, -2.7607353, -1.9182537, -3.4631934, -2.3276725,\n          -5.1479826, -2.8500469, -4.8279233, -4.042882, -3.806317, -3.0019336,\n          -3.7066808, -2.7486873, -5.8026333, -4.1605725, -2.3630943,\n          -2.1956322, -4.7438784, -3.8152585, -3.0708437, -2.610038, -3.1883404,\n          -4.153042, -3.6384819, -4.625368, -4.6413755, -5.181517, -2.506795,\n          -2.6399064, -2.368988, -2.6294613, -2.5832407, -5.06332, -3.4438636,\n          -2.2855315, -2.087224, -2.5356748, -3.0395882, -4.248704, -4.914877,\n          -3.9012988, -3.2828252, -3.193225, -5.193056, -5.249597, -3.2919266,\n          -3.3869119, -5.7165804, -2.650207, -2.1123464, -2.431872, -2.584526,\n          -4.189752, -2.4254708, -2.8907857, -2.560145, -3.9329863, -2.6766164,\n          -4.6441774, -2.7055318, -6.3359137, -3.4322157, -2.986393, -4.379887,\n          -5.3462477, -2.6393416, -3.140426\n        ],\n        \"pointIndex\": [\n          9, 1253, 256, 961539270, 1673099742, 292864876, 93406520, 1605143282,\n          1130920593, 1312898789, 882579986, 749347743, 1075064541, 838538357,\n          272853188, 548798662, 355454806, 232841713, 1432004312, 469242221,\n          442134772, 1150795113, 995911589, 1768180843, 756759527, 261221975,\n          95213649, 996738658, 1559270182, 1209279388, 358920143, 388999972,\n          605928230, 1345802258, 517882258, 1419006519, 1559783413, 1301851733,\n          1008879669, 1786576935, 1060918451, 1374777785, 1772599360,\n          1137569967, 1585666898, 725303212, 239785903, 310201483, 6508745,\n          537940759, 1873686440, 379902131, 341355727, 1617006820, 312920987,\n          1650707723, 954643590, 911482995, 50904291, 1655019542, 1366056569,\n          1708581567, 438009943, 598454845, 211496435, 1199696213, 1754048226,\n          1684998472, 928944341, 566199032, 197470124, 878762558, 472550127,\n          681922101, 30036178, 1808191772, 1002679164, 960291440, 1123215918,\n          908044003, 943742961, 985172922, 1522888374, 1286150540, 1527864856,\n          1244078233, 1450403722, 1796263298, 1244\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 9095889982550489397\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5965683, -1.605474, -1.5998874, -1.6115742, -1.6243737, -1.6085265,\n          -1.6020123, -1.634418, -1.6425602, -1.6400524, -1.629464, -1.6297138,\n          -1.6337343, -1.6033086, -1.6097484, -1.6624259, -1.6742873,\n          -1.7130513, -1.7128102, -1.6477957, -1.670843, -1.6317906, -1.6782671,\n          -1.6773082, -1.8203907, -1.7286334, -1.806726, -1.6300648, -1.7287886,\n          -1.6415924, -1.6448572, -1.6650454, -1.7876563, -1.7708933,\n          -1.7046733, -1.9888271, -1.972927, -1.8556824, -1.9030229, -1.9477214,\n          -1.8496878, -1.6738344, -1.8104644, -1.6836578, -1.6649296,\n          -1.7982227, -1.7100314, -1.8672014, -1.687297, -2.4574585, -1.9116623,\n          -1.7777321, -1.7388297, -2.0904682, -2.171662, -2.0474243, -2.047354,\n          -1.8827441, -1.7973624, -1.6882379, -1.7449267, -1.6694207,\n          -1.8786082, -1.685836, -2.4038177, -3.2515216, -1.8661153, -1.903327,\n          -2.5530834, -2.1660123, -2.7789965, -2.1661847, -2.5386386, -2.577987,\n          -2.0202804, -2.6948571, -2.0975728, -2.4518397, -2.322574, -2.0369956,\n          -2.3269794, -1.980619, -1.8654743, -2.132766, -1.7511171, -2.1872008,\n          -1.899768, -1.8329855, -2.0645735, -1.793986, -1.9067699, -2.3587954,\n          -1.9459684, -1.7774417, -1.7305173, -2.106843, -2.2295704, -1.7658864,\n          -1.8917775, -2.6521816, -3.1080632, -2.6481214, -1.9151706,\n          -1.9945921, -1.8536875, -2.0041533, -1.8389709, -2.201991, -2.526123,\n          -2.8499606, -2.2844756, -2.0961516, -2.7727692, -2.337646, -2.2693615,\n          -2.6762223, -1.98051, -1.9966611, -2.4551325, -2.1908934, -1.6984668,\n          -2.9126124, -1.754821, -1.8608118, -1.778809, -2.1995912, -1.8802121,\n          -1.754069, -1.9750528, -3.3643022, -3.0671022, -5.499051, -6.351326,\n          -4.5714335, -2.0859892, -2.5843184, -2.7171652, -3.3848102,\n          -2.7166994, -2.4720547, -2.4142833, -4.8606153, -4.5415883,\n          -3.3840468, -2.6464822, -3.5753329, -3.2476146, -2.6213663,\n          -3.1917949, -2.6942282, -2.42582, -3.2648609, -3.1270168, -2.307064,\n          -4.374364, -3.885179, -3.0397565, -2.5333095, -2.4329717, -2.2332306,\n          -2.294948, -3.1335053, -4.5621243, -3.0722396, -2.3758736, -5.0332875,\n          -2.6144414, -3.4925108, -3.4594991, -2.6730094, -1.995602, -5.004476,\n          -2.4933708, -1.9064989, -3.145163, -2.6776807, -2.5378213, -2.7086794,\n          -3.2757256, -1.8860871, -2.3013015, -2.6716664, -3.6417656,\n          -2.7129445, -3.0108495, -2.7780106, -2.0601518, -2.9248378,\n          -2.9490836, -2.2054753, -3.5714936, -3.9003718, -2.609097, -2.988914,\n          -2.3022199, -3.5478654, -3.9104757, -2.8510497, -2.9600012,\n          -3.0319679, -5.06519, -4.875351, -4.142768, -5.731296, -3.408339,\n          -6.5622587, -2.80811, -3.0746014, -3.469099, -4.501739, -2.066536,\n          -2.7396262, -3.2183354, -2.774808, -2.3921945, -4.8227572, -2.8792684,\n          -3.2473035, -2.8974328, -4.7501125, -3.4837053, -2.9300468,\n          -2.5738401, -2.8842216, -3.1261454, -4.581744, -2.9203901, -2.6495404,\n          -3.342783, -2.4231517, -3.1346366, -3.2866194, -3.8044457, -3.9290097,\n          -2.220442, -3.2857993, -2.317145, -3.182098, -2.5939918, -2.685288,\n          -2.73855, -2.1080241, -2.4743004, -4.901883, -4.4570746, -2.675061,\n          -2.5903609, -2.8296156, -3.7289162, -2.9241652, -3.2727346,\n          -4.1241984, -3.8632848, -2.914715, -2.1849167, -2.0690107\n        ],\n        \"pointIndex\": [\n          2, 1256, 256, 1503582068, 559482190, 1337102976, 562729286,\n          1739611809, 967291313, 425541850, 871408859, 1146672822, 1122805082,\n          395320915, 24473670, 630240406, 400611615, 148617549, 867539900,\n          810810247, 751919595, 1609394900, 1136416181, 1929048211, 612684984,\n          1527301945, 1577898435, 305795120, 1880831748, 367138434, 22172056,\n          1874272017, 1304492081, 505980363, 1231071935, 139656808, 961302118,\n          822970169, 1666978862, 173863570, 1554254010, 1236234523, 1635648513,\n          1492888839, 1732013932, 694525718, 872762421, 455204267, 1773693362,\n          781273967, 1676485434, 63093379, 174927860, 312100965, 330474076,\n          637861492, 555140684, 1812290006, 1636756093, 1445119631, 1836815181,\n          456820272, 474696245, 491235964, 502555223, 1940258051, 530706178,\n          1489639818, 523814770, 586608468, 76358020, 653663382, 1327743328,\n          1851203310, 1673754727, 1523023867, 857828327, 1941160626, 830782756,\n          1601969504, 1845615641, 1401517138, 1068653717, 1111281952,\n          1140141877, 1971318335, 1757514124, 1743241112, 1254\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -4568414323862123324\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6138266, -1.6200708, -1.614556, -1.6233939, -1.6260647, -1.6201645,\n          -1.6304435, -1.669478, -1.6389856, -1.651674, -1.6661147, -1.6698132,\n          -1.6513977, -1.6688465, -1.6691176, -1.7950977, -1.7260531,\n          -1.7063082, -1.6564782, -1.669986, -1.903447, -1.6774566, -1.7375205,\n          -1.9316454, -1.7064815, -1.7414997, -1.6840204, -1.9541612,\n          -1.9007772, -2.018441, -1.7255052, -1.8393124, -2.0276234, -1.8696972,\n          -1.8448665, -1.7897143, -1.7640897, -1.8238368, -1.8092318,\n          -1.8617193, -1.7547783, -2.0482876, -2.110692, -1.7365395, -1.7024146,\n          -2.3914833, -1.850182, -2.068951, -2.242601, -1.7162504, -1.78647,\n          -1.7638264, -2.090236, -1.7976038, -1.7387117, -2.1137908, -2.472179,\n          -2.1319265, -2.2775311, -2.038422, -2.1415591, -2.094432, -1.726505,\n          -2.575303, -1.9282467, -2.8848033, -2.4613793, -1.9956578, -1.9995972,\n          -2.0498257, -1.8999822, -1.968389, -2.1845083, -1.998322, -2.3053951,\n          -2.185676, -1.8633871, -2.577086, -2.3613966, -1.8704422, -2.3792217,\n          -1.9110062, -2.2505798, -2.1216521, -2.0548737, -2.2406547,\n          -2.1248424, -2.1456785, -2.1103835, -2.0191789, -3.127058, -3.0576043,\n          -2.6811328, -1.8934972, -1.8617606, -2.2808363, -2.4195814,\n          -2.9486563, -2.5926979, -1.9886698, -2.084278, -2.182579, -1.8508092,\n          -2.1939347, -2.5398748, -2.717566, -2.4137757, -1.8228891, -3.6212134,\n          -1.7930357, -2.044137, -2.839537, -2.3011444, -2.7147546, -3.0784369,\n          -2.3831677, -3.5555866, -2.469817, -2.3973162, -2.2536638, -2.0598986,\n          -2.2986326, -2.5735557, -2.1581638, -2.221038, -2.3564098, -1.9919771,\n          -5.206828, -3.0944185, -2.483788, -2.118705, -3.271962, -8.166502,\n          -5.0891895, -3.2259262, -5.996233, -7.8219643, -5.435928, -4.096331,\n          -2.0845773, -2.3608477, -2.382382, -2.571299, -2.3727942, -2.1107092,\n          -3.9825644, -5.0206003, -2.0619795, -2.4728963, -2.3282006,\n          -6.0645623, -3.7792861, -2.5139346, -2.4748445, -2.5115914,\n          -3.2220566, -4.2849445, -3.2265825, -3.7718532, -4.087799, -2.0863242,\n          -3.5737815, -2.8400545, -2.1448634, -2.0088718, -2.6729875,\n          -4.4135776, -2.8767314, -2.8736422, -2.6131053, -2.6826117,\n          -2.5429437, -3.638273, -2.4608417, -2.7961802, -2.2718759, -2.3121276,\n          -2.6518314, -2.3440542, -2.045319, -4.7715955, -3.9468198, -3.5987012,\n          -3.7612112, -4.406169, -3.0142753, -2.9591682, -2.205534, -1.9212265,\n          -3.6447978, -2.0212262, -3.6987958, -3.2881184, -2.9900627,\n          -3.8477633, -5.032719, -5.106605, -3.327592, -2.7105112, -2.4769652,\n          -2.9611087, -3.0738149, -2.8768263, -7.121635, -3.7444797, -2.0688655,\n          -2.374881, -2.4311557, -2.7882764, -2.6756434, -11.394501, -2.7900372,\n          -2.7363017, -2.440126, -2.477741, -4.5160103, -2.7817466, -4.2179155,\n          -4.096831, -2.6643949, -6.123933, -2.4551187, -3.8375704, -4.0790167,\n          -2.8787398, -5.083254, -3.9775193, -4.593228, -2.8289692, -3.9375958,\n          -6.2126374, -3.015584, -3.2844162, -3.608685, -4.183773, -3.0704916,\n          -4.6877394, -2.875446, -3.779652, -2.4573023, -2.2609415, -2.8825245,\n          -2.9201965, -2.360224, -3.3201356, -4.1180134, -3.2411907, -2.2871995,\n          -3.1855159, -2.308338, -4.0716915, -3.783447, -2.4058626, -2.3971012\n        ],\n        \"pointIndex\": [\n          1, 1252, 254, 274547076, 976034967, 315587402, 1649179100, 1843551661,\n          1376484141, 180538131, 980182133, 861182697, 1233326548, 250222193,\n          576939298, 331323215, 540501137, 1394344577, 1834460469, 276498685,\n          1039219316, 1437267118, 1216370238, 1904011025, 1224188161, 268514160,\n          666171480, 1477003370, 1592529095, 13868853, 1195328452, 424364948,\n          68286543, 1021973364, 1840717453, 1628669221, 1482849275, 685094737,\n          742769426, 1260848491, 1258393488, 1697048226, 1027507217, 1381884431,\n          1786484634, 1487562547, 1617412411, 266446638, 1450798852, 1932821008,\n          1644546020, 1238420991, 1122575941, 1858877151, 1752517175,\n          1085366049, 549552616, 772172480, 1700395270, 362405413, 32350723,\n          80062926, 1507765598, 1832014160, 871120223, 1037491377, 167472094,\n          504934741, 177951359, 500670106, 971261282, 1275056365, 588879903,\n          1924589827, 768038482, 687266305, 1938893689, 227921150, 1467022746,\n          1039928366, 1280589980, 1491414349, 1019670730, 1447688018, 658286145,\n          1821877343, 1523321737, 1563638\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -7769275059528997346\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.467028, -1.4670975, -1.4674501, -1.4778205, -1.4689364, -1.4841985,\n          -1.4681206, -1.4894453, -1.5780511, -1.4848005, -1.5843028,\n          -1.4873016, -1.5212706, -1.477742, -1.4705982, -1.528797, -1.6605811,\n          -1.5975436, -1.6015737, -1.518848, -1.4887893, -1.8078047, -1.6950811,\n          -1.4955248, -1.5334237, -1.5501573, -1.653275, -1.7648107, -1.7915306,\n          -1.4838738, -2.0933008, -1.5549726, -1.5799541, -1.8778653,\n          -2.0852897, -1.598548, -1.6157726, -1.6360085, -1.6091611, -1.5994772,\n          -1.7040111, -1.5370618, -1.5624154, -1.930429, -2.0199976, -1.7245322,\n          -1.7301582, -1.7585945, -1.5455968, -1.7815795, -1.6114988, -1.741401,\n          -1.6412951, -1.7372777, -1.6953312, -1.9503698, -1.8603348,\n          -1.8601714, -1.8995856, -1.7357519, -1.6103076, -2.1440177,\n          -2.3009224, -1.7057087, -1.5696069, -1.8150089, -3.4212294, -2.101055,\n          -2.87118, -2.098325, -2.436333, -1.628527, -2.0695157, -1.7023493,\n          -1.9100912, -1.6787161, -1.6959999, -1.923348, -1.7236828, -1.7092505,\n          -2.1551113, -1.8813591, -2.2988782, -2.1261275, -1.6131617,\n          -1.8154691, -1.8053954, -2.2895265, -2.074795, -2.494754, -3.2932467,\n          -1.8201196, -1.7812427, -1.9736366, -2.131185, -2.472943, -2.473403,\n          -1.7512174, -1.6387186, -2.4311333, -2.6189053, -1.7664279,\n          -2.0674803, -1.8106017, -1.9222755, -1.7646991, -1.8440909,\n          -2.0587785, -2.2700589, -2.2341561, -1.8995575, -2.1170647,\n          -2.1305034, -1.9458102, -2.2541769, -1.8630131, -2.3247356, -2.011005,\n          -2.0010917, -2.0726094, -1.7903583, -1.9421533, -2.0684798,\n          -2.3165772, -2.2252998, -2.8308153, -2.3763773, -2.0261903,\n          -2.3376548, -1.6005598, -5.1900268, -3.3060582, -1.9668535,\n          -4.4343815, -5.726982, -6.5618143, -7.1521225, -3.9599025, -3.087707,\n          -3.7807233, -2.1584404, -3.3265028, -4.7556677, -2.0542948,\n          -1.7263491, -2.553137, -3.8031938, -2.0881627, -3.6006567, -1.9714303,\n          -4.027929, -2.9679847, -2.084463, -2.5373173, -1.7270668, -3.3460476,\n          -2.2720094, -1.9646698, -1.7665315, -2.1895826, -3.1057694,\n          -2.5851593, -2.3558624, -2.7720058, -4.548722, -2.794429, -2.9128704,\n          -2.2261264, -2.5785084, -2.3850858, -3.0014517, -2.3341982,\n          -2.0401964, -3.7656128, -4.918224, -2.3436055, -3.3159294, -2.8349006,\n          -2.212059, -2.8434274, -3.250644, -6.301172, -3.4591837, -2.829273,\n          -5.1736913, -1.8322675, -2.6087058, -2.7446747, -1.9850545,\n          -2.7042725, -2.2928898, -2.8400059, -7.0028443, -3.1880243,\n          -2.7262745, -2.933164, -2.522319, -5.064997, -2.2153957, -2.8580453,\n          -2.6116455, -2.7077794, -2.875213, -1.8732623, -4.516843, -3.31612,\n          -2.9638295, -2.528539, -1.9243155, -3.2457132, -3.8606894, -4.193019,\n          -1.9437845, -2.818514, -2.0880072, -4.4739275, -2.1363142, -3.224624,\n          -4.357141, -2.5012124, -2.285748, -2.908747, -2.3669345, -2.3116171,\n          -2.3229961, -2.6012707, -2.9103174, -2.409648, -2.921963, -2.7194922,\n          -2.717911, -2.218835, -1.9833001, -2.865098, -2.5878224, -2.833754,\n          -3.606414, -2.3370578, -3.9532104, -2.3224201, -3.205591, -2.004258,\n          -2.3421495, -3.8120599, -2.659517, -4.6359572, -3.6126306, -2.8724058,\n          -3.2156265, -3.7808232, -5.067774, -3.3008513, -4.3762145, -3.2654536,\n          -4.3478055, -3.4660358\n        ],\n        \"pointIndex\": [\n          6, 1249, 256, 795392217, 638392962, 150061942, 1614475866, 1147699865,\n          1191819344, 279537301, 551298788, 95573517, 1280571584, 800982649,\n          797086801, 1445793454, 146122949, 60691280, 1321646822, 985916881,\n          122025031, 1066252937, 1678302431, 1815055294, 1539998588, 1060685771,\n          430818523, 330705144, 449039476, 662066961, 821265261, 397155296,\n          207819166, 1853345899, 877469758, 756263593, 1024183649, 835997518,\n          1899422496, 203404796, 1368794025, 175599895, 1007365797, 1669139218,\n          1379846804, 1796674342, 1791417225, 7905622, 167346105, 1058947104,\n          1858394257, 1467292877, 298716694, 1047781028, 1629806720, 327779287,\n          294734916, 865485941, 956674799, 388742043, 142688661, 625743782,\n          438297810, 1456649084, 1603635718, 500480003, 512765450, 1883280749,\n          1915537434, 486385823, 17492566, 1499864378, 1410301699, 649940136,\n          673716751, 737319243, 928071726, 1896315503, 834726615, 55181900,\n          896101042, 1209358992, 790024189, 1033022128, 1211074512, 1246998788,\n          1531517863, 1777879970, 1243\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 4541724188348374211\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5965717, -1.6001675, -1.6039135, -1.6013881, -1.6059641,\n          -1.6264149, -1.6109191, -1.6168767, -1.6837949, -1.6154885,\n          -1.6385336, -1.6628644, -1.71156, -1.6524342, -1.6569929, -1.6551793,\n          -1.6284081, -1.7612597, -1.7535372, -1.6980925, -1.8142519,\n          -1.7219537, -1.6645398, -1.717013, -1.6743274, -1.7404604, -1.7513614,\n          -1.7165102, -1.8172222, -1.6647087, -1.6899586, -1.8019162,\n          -1.9604771, -1.7325345, -1.8862405, -1.763366, -1.7779802, -1.7706667,\n          -1.927861, -2.421426, -2.0788026, -1.919324, -1.9871373, -1.7431914,\n          -1.8885114, -1.6989186, -1.7150781, -1.8874427, -1.8246077,\n          -1.6761974, -1.700487, -1.8789678, -1.83192, -1.9647466, -1.9222289,\n          -1.7642535, -1.7872288, -2.122375, -1.8567419, -2.0587893, -1.8710225,\n          -1.7320795, -2.0157423, -1.8617587, -1.9388741, -2.1291158,\n          -2.3377938, -1.8759451, -1.9586152, -2.4294631, -2.0866654, -2.352787,\n          -2.368293, -2.4037426, -1.8538191, -2.3876994, -3.388962, -2.0128324,\n          -2.1810913, -2.4652147, -2.760965, -2.1185374, -2.4552193, -1.9905087,\n          -2.6480858, -2.1137474, -2.64262, -2.3192794, -1.7611603, -2.0723884,\n          -2.1697767, -1.6992674, -1.7801843, -2.0863554, -1.7544373,\n          -2.4301438, -2.3560226, -1.9150974, -2.0139015, -2.266037, -2.0130007,\n          -2.3793597, -2.4544587, -1.9949479, -1.9759467, -1.9641852,\n          -3.6359153, -2.1987228, -2.1116462, -2.1017444, -2.194392, -2.273187,\n          -1.8679112, -2.1140149, -3.5964894, -2.7098334, -3.1281495,\n          -1.8930235, -2.5202293, -2.903093, -2.2792664, -2.310557, -2.0356898,\n          -2.211646, -1.7527771, -2.7364557, -2.0347674, -1.9049851, -2.9417238,\n          -2.533384, -3.1451507, -3.901787, -2.65957, -3.1533628, -4.368806,\n          -2.3373902, -2.8215654, -3.2734756, -3.0295422, -2.7848854,\n          -2.8667374, -2.545905, -2.3083808, -2.7107656, -3.1513333, -7.0312195,\n          -2.4134543, -4.3557734, -2.4205604, -2.0935204, -2.3436384,\n          -3.6850507, -2.594825, -3.7675693, -4.1618886, -2.4946504, -2.3559964,\n          -5.823009, -3.4856424, -3.8307014, -3.9933295, -3.2719202, -2.831582,\n          -2.1212502, -2.7761233, -4.082748, -2.6616712, -2.6365888, -2.0699306,\n          -5.487333, -3.191381, -3.5252602, -2.469596, -3.3871434, -3.2145245,\n          -3.2415304, -2.7563546, -2.6392884, -4.5272846, -2.825336, -3.0379832,\n          -3.686642, -3.3420174, -2.0666325, -4.6929336, -2.4952083, -2.9958043,\n          -4.134448, -2.3739684, -1.9288124, -2.210782, -2.81132, -3.663362,\n          -2.3939419, -2.3698654, -3.599692, -3.1023452, -3.5171332, -3.2203267,\n          -2.2895916, -3.1158347, -2.0357246, -5.3228574, -3.8063614,\n          -4.1641517, -2.4883325, -3.3343816, -2.5615892, -3.0179958,\n          -3.1521022, -2.9317567, -2.186532, -2.6344547, -5.570923, -4.1481714,\n          -2.3711085, -2.6832128, -2.4854255, -3.639394, -4.0815396, -2.7622623,\n          -5.2310658, -2.480021, -3.1463213, -3.8098817, -3.2311997, -3.0973024,\n          -4.378947, -2.1186829, -8.498683, -3.7873533, -2.9777024, -3.6439102,\n          -3.3209803, -4.437536, -2.9741552, -1.9432625, -2.984181, -2.892447,\n          -3.4080548, -4.1191716, -3.8573587, -3.966833, -4.4431806, -2.609818,\n          -2.5466335, -4.1361165, -3.8374505, -4.8389907, -2.4686964,\n          -3.7323036, -3.4660387, -4.2187986, -2.5719957, -2.15731, -2.0255377\n        ],\n        \"pointIndex\": [\n          0, 1253, 256, 1163611739, 961304106, 517944468, 562997821, 1727370439,\n          742897309, 396628742, 511453566, 1096347742, 1721157150, 983949737,\n          430285, 168060032, 140638341, 1326071702, 472850819, 852727987,\n          1611448073, 790833056, 1332688549, 1873278899, 549179258, 1330390782,\n          899569511, 975063105, 1675110494, 51875298, 1269258147, 1692507144,\n          1619167988, 221196918, 1071487429, 1831608236, 1639183998, 1759677201,\n          664469218, 1206540885, 1038238500, 1084208527, 1099085665, 1304517068,\n          1598745802, 1269019311, 42447411, 1505522038, 34865842, 1539704003,\n          1066873370, 233723665, 292835379, 1204790347, 806415541, 329431560,\n          89350503, 1736721386, 32635574, 592479413, 392883167, 1806938453,\n          1388860555, 433930275, 1481100752, 1934702137, 17665771, 1028187503,\n          1538389950, 1324475678, 1887458445, 1879803483, 1715261712, 184791230,\n          1878106178, 1137472729, 192408864, 1901791939, 1037148005, 486557147,\n          919595168, 950974538, 1178624483, 1125249092, 1179853800, 1285762245,\n          1768686352, 1860189661, 1247\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 2974389039810781744\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6764992, -1.6958274, -1.6906761, -1.6997926, -1.7197431,\n          -1.7105455, -1.699538, -1.7480061, -1.7183363, -1.7617513, -1.7665355,\n          -1.7205622, -1.7229751, -1.7095234, -1.712063, -1.7610425, -2.0025625,\n          -1.7190348, -1.8428125, -1.7734516, -1.8737025, -1.784302, -1.7758138,\n          -1.8650142, -1.9094932, -1.7641912, -1.7876068, -1.770861, -1.9609163,\n          -1.7481569, -1.8737556, -1.7909487, -1.8254586, -2.0344076,\n          -2.0933356, -1.8535492, -1.7756407, -1.9534725, -2.2208147,\n          -1.8998641, -1.8039731, -1.9330158, -1.8930149, -1.8418735,\n          -1.8315548, -1.8492849, -1.7767801, -2.0619454, -1.9183084,\n          -1.9914724, -2.1994247, -1.893529, -1.9167924, -1.8534486, -1.8540635,\n          -1.8360285, -1.8018483, -2.0149078, -2.156264, -1.7783345, -1.8414056,\n          -2.0906248, -2.3711946, -1.8415192, -2.3714786, -2.9022906,\n          -1.9636358, -2.8221817, -2.3486662, -2.4554226, -2.6083274,\n          -2.6525185, -1.9475011, -2.313022, -1.8175609, -2.8928878, -2.000999,\n          -2.89704, -2.7926826, -2.1103365, -1.9930633, -2.6354682, -2.0399613,\n          -2.033366, -2.20878, -2.487807, -2.1843228, -1.8659741, -1.881563,\n          -2.3812628, -1.9711432, -2.1501844, -2.2721488, -1.813301, -1.8368465,\n          -2.1128387, -2.3040423, -2.6563406, -1.9628875, -2.2360194,\n          -2.3045492, -2.2451193, -2.5775018, -2.047439, -2.3106627, -2.1124313,\n          -1.9980546, -2.2577305, -1.9977155, -2.0085564, -2.1718853,\n          -3.9304676, -1.8986434, -1.9047749, -1.9229028, -2.1014469, -2.537519,\n          -2.6529217, -2.31025, -1.9334949, -1.796481, -2.9063954, -2.0648232,\n          -2.3255317, -2.2250478, -2.5736194, -3.5998192, -2.1379678, -2.723008,\n          -2.939169, -3.053105, -3.9497592, -3.1534941, -3.3767736, -2.1855912,\n          -3.4670146, -6.3716393, -4.2738733, -2.434441, -4.7714667, -4.589805,\n          -4.959077, -2.7561874, -4.3232794, -2.740808, -2.8243487, -3.9605234,\n          -3.4604313, -2.7065961, -6.068086, -6.0786867, -3.5492153, -5.709071,\n          -2.1999042, -4.1939697, -3.6098812, -5.782508, -4.0327687, -6.203905,\n          -2.1922278, -2.2782393, -2.4290404, -2.4427545, -3.199056, -5.023345,\n          -2.2551022, -2.9791067, -3.2474163, -2.243982, -3.5441117, -2.7690384,\n          -3.6095276, -3.255176, -2.6776273, -2.6551788, -2.4600167, -2.2966177,\n          -1.9961336, -2.4975839, -2.5526073, -3.0071547, -2.8752391, -7.322864,\n          -3.6169648, -2.8751423, -4.7001524, -6.7427464, -3.6106853,\n          -2.4639018, -4.794964, -3.2962768, -2.7963517, -2.5175323, -3.3550315,\n          -3.2839267, -4.0063868, -2.938738, -3.2653677, -2.0275402, -2.5693622,\n          -3.3391628, -4.112069, -2.4103444, -2.5815146, -2.4570982, -2.9206357,\n          -2.6522524, -2.232228, -2.307984, -5.9732804, -4.825717, -3.8391047,\n          -2.2225866, -3.9098666, -4.889138, -3.6301153, -2.7953954, -2.2403986,\n          -2.09101, -3.7887583, -3.6717098, -2.5485013, -2.2754288, -4.413657,\n          -4.2619123, -3.6700647, -3.6085973, -2.2921927, -3.1419306,\n          -2.2765403, -2.431947, -2.2881124, -2.2225246, -2.843642, -2.7188275,\n          -2.9016843, -3.3257384, -2.4257905, -4.3817286, -2.0652525,\n          -2.0353162, -3.4925709, -2.8533604, -3.0184164, -2.9160662, -3.452129,\n          -3.3308847, -3.1446595, -2.7629244, -4.060753, -3.3641934, -4.5306907,\n          -2.7997391, -5.0139904, -3.9912064, -2.5082939\n        ],\n        \"pointIndex\": [\n          0, 1255, 256, 1595624174, 420418603, 1336138779, 503927129,\n          1359566407, 280802615, 150136042, 3692996, 665504981, 949475038,\n          50286260, 1125686072, 1282042985, 1872162215, 157102042, 454994246,\n          1587410745, 609665789, 1200082062, 1171214805, 1902230760, 847331506,\n          1598252240, 1445118495, 1610068947, 1194442377, 1896710392, 359547984,\n          497749265, 282571950, 531963305, 678620578, 502828948, 1303457028,\n          733044781, 824345301, 1134757455, 1757924619, 1708658538, 1508296009,\n          949874839, 1764962526, 22071435, 112983632, 1479820342, 1893770907,\n          1318621710, 284053641, 552124488, 735926860, 438794502, 1284105615,\n          326809658, 1917781586, 1860744660, 1153253229, 1497199698, 1969057161,\n          1974720042, 1486644109, 174979513, 1658510367, 440166081, 1232412144,\n          1928663421, 500575328, 648431842, 769368113, 1235260006, 573095955,\n          658807788, 34545359, 208497870, 1805281587, 1692857443, 1679577101,\n          910172434, 805831328, 1941170410, 1772364771, 1400744872, 957287994,\n          1070402280, 1306036065, 1678467741, 1255\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -7325120448755709923\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4789896, -1.4813277, -1.4817868, -1.4927266, -1.4817274, -1.494836,\n          -1.4951668, -1.5189464, -1.4971339, -1.4891075, -1.494705, -1.5260731,\n          -1.6166315, -1.5730482, -1.5089144, -1.5422033, -1.5432531,\n          -1.6478055, -1.5075778, -1.4952298, -1.5665935, -1.583639, -1.5436229,\n          -1.6401356, -1.5861496, -1.6939223, -1.6734645, -1.7411418,\n          -1.6076918, -1.6060724, -1.5620602, -1.5942497, -1.6047797,\n          -1.9373966, -1.6813858, -1.6566732, -1.6520962, -1.7416009,\n          -1.5672114, -1.5435431, -1.5410064, -1.6136974, -1.575288, -1.8296115,\n          -1.635954, -1.6432008, -1.5914276, -1.8453808, -1.8873544, -1.9382782,\n          -1.8738217, -1.7060682, -1.8990436, -1.8289719, -1.6985285,\n          -1.7595074, -1.7536279, -1.7418392, -1.8090104, -1.9695964,\n          -1.6294744, -1.6043773, -1.6309644, -1.6044484, -3.3081942,\n          -1.6289575, -1.6165967, -2.5297084, -2.8675802, -2.765154, -1.864613,\n          -1.958626, -2.2022676, -1.6808785, -1.8817226, -1.7862926, -2.30006,\n          -1.9910297, -2.413693, -1.6648751, -1.8299508, -1.547286, -3.0589058,\n          -1.8049306, -1.7710308, -1.7064773, -1.7509496, -1.8867807,\n          -2.0005727, -1.6649243, -2.0950646, -2.0428991, -2.936637, -2.86679,\n          -3.4534729, -2.1623518, -1.9664167, -2.0203717, -1.9363478,\n          -2.8894434, -2.441115, -2.0852993, -2.1043887, -1.7624108, -1.8068472,\n          -2.2669864, -1.9438679, -2.0263252, -2.809571, -3.5156472, -2.0535638,\n          -2.7014992, -1.9866724, -2.115246, -2.4434855, -1.7675318, -2.2506127,\n          -1.8771663, -1.8373984, -1.99535, -2.0801814, -1.7359631, -2.1916656,\n          -1.6493181, -2.2410676, -1.6915656, -2.0914423, -1.6744939, -2.904893,\n          -4.338845, -4.0180583, -1.713513, -2.0375726, -3.845712, -1.8471489,\n          -2.6868517, -5.378029, -5.954161, -2.868096, -3.3108563, -4.0419126,\n          -5.047232, -3.3756578, -2.0724225, -2.0070157, -3.366248, -2.2862413,\n          -7.0287, -2.3656926, -1.8847853, -2.4822066, -2.2740889, -1.8520479,\n          -2.5333264, -3.8444266, -2.0047996, -2.339066, -3.0678396, -6.6533227,\n          -2.544771, -2.0255227, -2.0628326, -2.344547, -3.2596714, -1.5772738,\n          -3.762522, -3.442151, -2.8425343, -2.5368123, -3.4553957, -3.7509146,\n          -1.9215556, -2.4849117, -1.809928, -2.1047919, -2.1177597, -2.006969,\n          -2.1972163, -2.0462108, -1.9388325, -1.9577699, -2.6966898,\n          -2.8698642, -3.760625, -2.2737677, -3.137625, -3.1042125, -2.9655306,\n          -3.3402255, -8.947269, -3.5496058, -2.331477, -7.22113, -2.038032,\n          -2.2908099, -2.40332, -3.0953493, -6.5161424, -2.2471056, -3.0264566,\n          -3.6173818, -2.663741, -3.1969995, -4.6820235, -2.3960974, -3.5762146,\n          -3.4620936, -4.7139273, -3.4655447, -1.925014, -1.8825349, -2.921921,\n          -2.4039183, -2.037784, -3.0472116, -3.1644752, -2.3709059, -5.91918,\n          -3.0437999, -3.6559024, -5.0809813, -3.1907015, -2.965922, -5.205322,\n          -3.1386008, -2.574078, -2.78269, -3.4993951, -2.2372167, -4.0201955,\n          -3.0609505, -2.824737, -1.7775307, -3.563109, -2.7900605, -2.907892,\n          -2.1786776, -2.11636, -3.7923527, -2.48394, -4.18271, -3.610519,\n          -2.114958, -3.14728, -2.2880132, -3.141728, -2.349664, -3.7127583,\n          -3.242941, -2.8023453, -2.5635731, -2.2638948, -2.2723649, -2.7769027,\n          -3.9965684, -1.709681\n        ],\n        \"pointIndex\": [\n          2, 1247, 256, 927118680, 1728068399, 275525989, 542996925, 1329648133,\n          746207253, 840245279, 470796809, 736120960, 426255950, 54291877,\n          546082091, 867037754, 997001643, 1670410354, 157329379, 571876769,\n          1448766970, 640676627, 312859989, 1820217497, 1073931095, 1711070860,\n          1409810180, 352270578, 1120091963, 443839731, 1122704792, 393898066,\n          78365861, 463014431, 1756508935, 609016580, 172879032, 1579063848,\n          824482771, 1418019519, 1904054526, 983596714, 615713350, 1407659374,\n          1592542167, 231319847, 5983785, 104215344, 815232617, 1515343063,\n          1089772401, 274186175, 740797040, 577096301, 1473655022, 1766792412,\n          1843099405, 1081775419, 1027182103, 377312116, 1860317336, 1114222603,\n          1314021632, 465250561, 1722105342, 1924589275, 498332310, 1918415625,\n          1874800468, 1760567206, 1731511184, 596688843, 176007985, 1576630510,\n          670559844, 1232878898, 1463803296, 1172106116, 854619453, 1599826603,\n          945325839, 1035374295, 1183946262, 1202664088, 1529404870, 1839767242,\n          1601917146, 9204005, 1245\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 5966322168077801063\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.359602, -1.3747983, -1.3669475, -1.3917516, -1.3749169, -1.3978804,\n          -1.3743601, -1.3946548, -1.4078739, -1.4120302, -1.4068153,\n          -1.5031053, -1.4189385, -1.4294072, -1.3918122, -1.4187685, -1.451527,\n          -1.5439616, -1.572372, -1.5431609, -1.4799433, -1.4809572, -1.5019768,\n          -1.5062917, -1.6155772, -1.4287484, -1.4429781, -1.4321111,\n          -1.4374574, -1.4256124, -1.4440918, -1.6143839, -1.4548193,\n          -2.1818051, -1.521527, -1.7077321, -1.594992, -1.6788688, -2.0228424,\n          -1.5561807, -1.7651178, -1.4828888, -1.6438819, -1.6252949,\n          -1.5230577, -1.905284, -1.8408743, -1.5944843, -1.599917, -1.6622891,\n          -1.6484537, -1.4462303, -1.5112286, -1.6332244, -1.4575502, -1.488177,\n          -1.7010106, -1.6937976, -1.5946316, -1.4753292, -1.438668, -1.6429601,\n          -1.4513603, -2.2772639, -1.7143407, -1.5759331, -1.5740432,\n          -2.8094275, -3.3058069, -2.1396427, -1.5359617, -2.1293366,\n          -2.0036054, -1.7648398, -1.8915709, -1.770045, -1.9245359, -2.1247864,\n          -2.9160671, -1.7070603, -1.9920924, -2.032921, -1.9603047, -1.747028,\n          -1.8338137, -2.3822255, -1.9893866, -1.8081368, -2.1531112,\n          -1.7730501, -2.1450498, -3.132966, -2.2067213, -1.924489, -2.756486,\n          -1.7515236, -1.8227607, -2.4800415, -1.8187356, -2.2551625,\n          -2.1342087, -2.6814182, -2.2624876, -2.3270738, -1.9789789,\n          -2.0078046, -1.5552356, -2.0232923, -1.7029153, -1.4964641,\n          -2.9953322, -1.6763792, -2.9508388, -1.9122832, -2.121463, -1.8967429,\n          -1.9978342, -1.6310147, -1.7970418, -2.0497434, -1.4872304,\n          -1.9258558, -2.6047144, -2.3391123, -1.666762, -1.6153685, -1.4929992,\n          -2.3954306, -2.6757016, -2.0115712, -3.2982168, -1.8071309, -4.732093,\n          -2.86006, -1.6296129, -3.9723382, -3.5085964, -3.8349621, -4.332474,\n          -3.9460185, -2.1973403, -2.2070367, -3.469926, -2.7214763, -2.7626624,\n          -4.706231, -2.5175548, -2.3759713, -5.3709946, -2.9945693, -3.9092703,\n          -2.2447438, -1.8524231, -1.9837755, -3.076053, -4.0829, -4.77054,\n          -4.881027, -4.1488724, -4.4731345, -2.218555, -2.7092788, -2.4048133,\n          -3.8219318, -2.6914885, -2.0651407, -3.2814193, -2.644386, -2.9437237,\n          -1.9948936, -4.206107, -6.2308683, -2.5094018, -2.0590875, -2.1312287,\n          -4.2151265, -2.7633343, -5.1526904, -3.3049357, -2.1771789,\n          -1.9143611, -2.174454, -2.895144, -3.2811656, -3.7764373, -2.2945032,\n          -2.8516362, -2.284171, -2.7553618, -3.522863, -4.0175667, -4.0974483,\n          -2.0300355, -4.1277246, -2.806212, -3.3823714, -3.3424401, -2.6951935,\n          -2.4054852, -3.0757685, -4.821749, -2.8912666, -2.3331127, -3.8103104,\n          -3.4520514, -5.378011, -2.5183861, -3.231196, -2.606206, -2.0731435,\n          -2.005029, -2.679055, -2.581851, -1.9455084, -2.0369332, -4.7684417,\n          -2.5067194, -4.576556, -4.450429, -1.8643699, -2.1515625, -3.862825,\n          -5.5100384, -2.1516352, -2.974032, -4.912022, -3.0503857, -3.2343678,\n          -2.8258865, -2.246921, -5.2116704, -2.021993, -1.9658217, -3.0767105,\n          -2.3124611, -4.3563266, -2.8614416, -2.0766013, -5.007502, -3.0026526,\n          -3.7666762, -3.6385636, -2.8427145, -3.7251058, -3.909793, -3.4142978,\n          -3.6704612, -2.6715941, -3.6530285, -1.9842077, -1.8425122, -4.820701,\n          -1.7397573, -3.8442929\n        ],\n        \"pointIndex\": [\n          1, 1254, 254, 784888285, 1186575131, 789189176, 1400803504,\n          1772303693, 308336973, 67293351, 526444841, 1621772241, 1269871206,\n          899643397, 305167009, 310470336, 1651984956, 165952774, 513691375,\n          182197367, 1100310439, 978851783, 1191155089, 1861395843, 1034802101,\n          156803733, 551822515, 317361259, 1519332308, 523016884, 1462094261,\n          394615233, 1016367154, 1135725509, 3108653, 567819053, 186342505,\n          1236149782, 1837210046, 1685297421, 1297883652, 862509255, 1489238425,\n          1111998914, 1221306814, 1889841812, 1290135051, 267762769, 1818043055,\n          279002659, 25181910, 744215253, 1761708314, 485415160, 241274699,\n          340750885, 1918191838, 364652689, 755478218, 470482903, 121135392,\n          402187451, 547412642, 450747522, 1833931018, 493003335, 251741409,\n          844852725, 18667531, 1218005248, 1711986371, 720129522, 609568344,\n          1271094102, 1082130952, 680078710, 1337202443, 735316700, 244707662,\n          1587430410, 1913931677, 1640869653, 990635708, 986766945, 1560781711,\n          1507851713, 1526411016, 1572475\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 5863646346142641006\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.575608, -1.5814409, -1.5781481, -1.6155031, -1.5828708, -1.6040213,\n          -1.5803695, -1.6196128, -1.6891334, -1.5856794, -1.6220452,\n          -1.6704509, -1.6055237, -1.6525494, -1.5969791, -1.6239997,\n          -1.7467704, -1.8449363, -2.0470545, -1.812362, -1.6207516, -1.9919968,\n          -1.6685889, -1.7008271, -1.6975868, -1.6878153, -1.6768947,\n          -1.6593972, -1.7077638, -1.6490749, -1.7063341, -1.7464024,\n          -1.7496744, -1.8403921, -1.794143, -1.863235, -2.5615525, -2.1757193,\n          -2.2156215, -1.9555329, -1.9124904, -1.8627983, -1.6415664,\n          -2.0704143, -2.0065598, -1.7881496, -1.9410346, -1.7241747,\n          -1.9910812, -1.7615238, -2.0226061, -1.7233528, -1.734961, -1.8799304,\n          -2.1976762, -1.8057204, -1.6640658, -1.7348346, -1.7793133,\n          -1.7161245, -1.9093564, -1.7863426, -1.782226, -1.748099, -1.9819493,\n          -1.9970762, -2.3123357, -1.8498224, -2.2264159, -1.8386749, -3.345277,\n          -1.883858, -1.9660522, -2.8395278, -2.9833143, -2.3452234, -2.2418785,\n          -2.795027, -2.6312294, -2.8163066, -1.9591815, -1.9430947, -1.9477563,\n          -1.9072555, -2.464852, -1.7954881, -2.0065253, -2.4018254, -3.7181277,\n          -2.1584997, -2.0945082, -2.143602, -2.129317, -2.2141273, -2.14982,\n          -1.958308, -2.2566347, -2.0064788, -2.3119383, -1.8199244, -1.8838389,\n          -2.1038854, -2.8273778, -2.204648, -1.773783, -2.058429, -1.9341174,\n          -2.0440369, -2.2040834, -2.2547963, -2.5701208, -1.887871, -2.0140364,\n          -2.740925, -2.2461722, -1.9907988, -1.8508942, -2.2053757, -2.3237412,\n          -2.1764266, -1.8121494, -2.2599766, -2.0563395, -2.2131538, -2.101571,\n          -1.8369926, -2.1113522, -1.9830061, -1.8985038, -2.0979133, -5.289009,\n          -2.5313275, -2.0532134, -4.012346, -3.6139667, -2.0709507, -1.9825712,\n          -2.4745681, -2.5805273, -4.426341, -4.355678, -3.5989287, -4.8145404,\n          -2.1951988, -2.3304834, -3.2525547, -4.0202074, -3.2697027,\n          -3.6303856, -4.2800465, -3.1944683, -3.215791, -4.1126847, -2.9608352,\n          -3.155402, -3.1948025, -3.4552639, -3.7967749, -3.6374478, -3.9048827,\n          -3.5179439, -3.2579017, -3.5275407, -2.9210572, -4.9047728,\n          -3.7414958, -2.8654935, -2.240625, -2.3643675, -4.65246, -3.8078144,\n          -1.9313331, -3.3564284, -2.3961694, -3.6561205, -4.623437, -2.4091523,\n          -5.6450777, -5.337534, -2.49797, -2.4037368, -4.982056, -5.2365403,\n          -2.202554, -2.1648428, -2.956364, -3.4817119, -4.1723804, -3.3398435,\n          -3.7754629, -2.468077, -1.9774877, -3.2842422, -5.4654374, -2.7166097,\n          -2.459306, -3.7167258, -3.2403567, -3.3395982, -2.4355524, -2.1046832,\n          -2.7346094, -2.687636, -3.116037, -3.7386253, -4.0494804, -6.3595433,\n          -2.8649182, -2.4392982, -2.4244318, -2.0033848, -3.9102988,\n          -2.9147055, -3.494289, -2.4981205, -4.4003124, -5.03912, -2.496209,\n          -4.6680713, -3.4900355, -2.2696192, -4.592746, -4.233194, -3.0151408,\n          -2.5300114, -3.3627484, -2.2335417, -3.8549244, -3.4596527, -4.547544,\n          -2.9830463, -2.4512615, -6.091157, -2.3038387, -2.8788407, -2.7567303,\n          -4.153861, -3.0705404, -3.3430862, -2.7613983, -4.5446625, -2.7000647,\n          -2.2375746, -2.679622, -5.9046073, -3.7500348, -2.0826974, -3.1326358,\n          -2.3658702, -2.9701922, -2.2291567, -3.1797545, -3.7251885,\n          -4.6901045, -4.6262054, -1.9968588\n        ],\n        \"pointIndex\": [\n          1, 1251, 256, 1306762992, 565865793, 124121482, 1738965356,\n          1408181594, 430654305, 1411568429, 1171867843, 1047777375, 1119647973,\n          266038704, 25368794, 1841871691, 1862650395, 1274069771, 1474173702,\n          1946760321, 1708540483, 1290361016, 1546358337, 1378278783,\n          1583166718, 740915715, 1456470874, 313253948, 62598705, 60392692,\n          229433700, 246820125, 1294601145, 1692135495, 523355825, 798583808,\n          1317097459, 695367492, 1427432701, 1262025497, 1529849795, 1466036129,\n          999579821, 1866554081, 1809138432, 910825491, 373589947, 507923371,\n          1182244497, 1074960353, 1532937894, 1157218619, 1302326523, 320144715,\n          1668549230, 788611575, 585858314, 362707653, 139653123, 396733880,\n          1690075672, 466833284, 1858004361, 191645971, 351239354, 502669945,\n          6692671, 1835161702, 554683535, 592019263, 588578366, 732981881,\n          689650548, 656897546, 664088272, 713102952, 1081854367, 1528558585,\n          1324596181, 1648569542, 1612941687, 1351288215, 469215660, 1033732722,\n          1114450720, 1333201431, 1577051617, 1384545, 1250\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -3485342436052657063\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4646834, -1.4667517, -1.4848248, -1.4955488, -1.4748328,\n          -1.4996586, -1.5102067, -1.5423312, -1.5171639, -1.5108787,\n          -1.5430261, -1.5118963, -1.5083525, -1.5142311, -1.5558296, -1.58652,\n          -1.5521219, -1.5663732, -1.5962416, -1.5707482, -1.5287815,\n          -1.5511978, -1.5490642, -1.8354799, -1.6619667, -1.5551033,\n          -1.5756073, -1.667111, -1.5569456, -1.6726259, -1.564499, -1.6257182,\n          -1.6528468, -1.5773174, -1.6714104, -1.6499722, -1.8766035,\n          -1.6815758, -1.6395549, -1.6178705, -1.6063552, -1.9306438, -1.610218,\n          -1.64699, -1.6621773, -1.8209532, -1.5760742, -1.8630723, -2.0983984,\n          -1.982195, -1.7652756, -1.7825636, -1.6701146, -1.6431956, -1.6910659,\n          -1.7496774, -1.8248948, -1.5788943, -1.7409449, -1.7139764,\n          -1.7687455, -2.3207808, -1.5775722, -2.337894, -1.8376864, -2.0159893,\n          -1.7816584, -2.2645795, -1.8502923, -2.4336076, -1.958431, -1.9566456,\n          -1.6514288, -1.9508808, -2.0869696, -1.8643857, -1.8263228,\n          -1.9006462, -1.9109508, -2.375001, -1.6502724, -2.424987, -1.9207541,\n          -1.9790317, -2.4335063, -1.9445292, -1.767669, -1.7854769, -2.0634897,\n          -2.0707114, -1.7828038, -1.987555, -1.9973936, -1.7249879, -1.617046,\n          -1.9723246, -2.5366223, -2.6620522, -2.2095118, -2.2030852,\n          -3.3440301, -1.9094471, -2.7955794, -1.8991795, -2.0969698,\n          -1.8752227, -1.8043672, -2.1406016, -2.1403084, -2.2700257,\n          -2.9699655, -2.087489, -2.1303475, -1.8860812, -1.9416108, -1.6463115,\n          -2.4058745, -1.9469516, -1.7948481, -1.9236622, -1.968078, -2.482212,\n          -2.043224, -2.6402094, -2.669888, -1.9306325, -1.6355859, -2.4234571,\n          -4.2518926, -2.8369408, -2.070781, -2.1696022, -3.0118892, -2.9100466,\n          -1.9155637, -2.3898082, -3.611016, -2.1233342, -2.417018, -2.6857314,\n          -4.0778794, -2.3300252, -4.301172, -4.639753, -2.4882767, -1.7226529,\n          -4.915581, -2.089176, -2.3308697, -2.1675203, -2.1050093, -5.076467,\n          -2.0216632, -2.3076777, -1.8784783, -4.519786, -2.816605, -6.4273553,\n          -3.5487063, -2.7025511, -2.7042325, -2.0779943, -1.8412688, -2.822589,\n          -3.3284762, -3.7763994, -3.213167, -2.7693353, -1.9806886, -2.5737548,\n          -3.1934195, -2.1217012, -2.1747608, -1.9155222, -2.1172097, -2.39838,\n          -1.9219064, -2.439878, -2.5006132, -5.60645, -2.0933144, -3.2653193,\n          -3.3058648, -3.427139, -3.3744586, -2.867972, -2.6194758, -1.8476988,\n          -3.124068, -2.5218716, -1.6921393, -5.984711, -2.1221485, -2.6376462,\n          -5.501952, -2.9553864, -2.7800412, -4.575756, -2.4023063, -2.88531,\n          -3.1735258, -6.147703, -3.7064834, -2.2638023, -2.342982, -3.5818238,\n          -5.544716, -3.0300126, -5.0124764, -5.1720357, -2.489125, -2.1045215,\n          -2.5063248, -2.5694091, -2.4880934, -4.0698066, -3.3531225,\n          -3.9052632, -3.8701982, -2.6745663, -3.0083475, -4.558422, -3.5705185,\n          -2.1124594, -2.4647415, -3.4744031, -2.2037675, -2.4264262,\n          -5.6101284, -2.4054873, -2.2422447, -4.917252, -1.9251585, -2.590822,\n          -2.6595664, -2.2246978, -2.3974667, -2.8268957, -2.4082587,\n          -1.9532785, -4.3572836, -7.100543, -1.9996116, -2.8446536, -3.6927261,\n          -3.9367738, -2.9279373, -3.4739842, -4.2427716, -4.3492055, -3.573538,\n          -3.8667417, -2.2903125, -4.8911166, -3.1270065\n        ],\n        \"pointIndex\": [\n          11, 1255, 255, 1125095316, 1225162176, 266281107, 145079851,\n          1823422636, 1444177912, 1468908580, 746670828, 1326526904, 1552504204,\n          1641656912, 1809083391, 42269167, 156643563, 500660808, 1613598470,\n          514128595, 1735397476, 943131624, 810301342, 1890323476, 1737317963,\n          1744212142, 1664960398, 1836727015, 827400922, 340694445, 1681448655,\n          997949940, 390304714, 1183284360, 1616489069, 462266295, 508799902,\n          930032643, 921531117, 1011764571, 718598253, 704388491, 1570811030,\n          972584919, 1400711983, 713078717, 789564898, 1610032627, 246278113,\n          1487608946, 1729543265, 130011520, 922436406, 1608818438, 1548858618,\n          1239620460, 322350255, 1023052825, 14181121, 349967450, 361023885,\n          369940490, 1467829099, 1132872206, 1336230914, 621519519, 449632149,\n          1814336297, 1208394719, 1073046822, 498435614, 658340617, 1352005,\n          1664189276, 1404052473, 1826399365, 616785564, 1837282565, 769356216,\n          970320150, 1367241621, 1334791867, 1112420348, 847842300, 995865010,\n          1056532534, 1374257984, 1928407706\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -566338866570349155\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6110169, -1.6257101, -1.6132237, -1.6336024, -1.6341721,\n          -1.6298529, -1.632508, -1.6436764, -1.6744068, -1.6673356, -1.6571995,\n          -1.662185, -1.6644176, -1.7309585, -1.6500595, -1.670506, -1.802056,\n          -1.7575324, -1.80587, -1.7163657, -1.7433038, -1.674397, -1.9227827,\n          -1.8037908, -1.8172027, -1.6667875, -1.7015543, -1.8367963,\n          -1.7550453, -1.6542269, -1.697557, -1.7011623, -1.9053563, -1.8065615,\n          -1.9608865, -1.789415, -2.0639627, -2.0471902, -2.0799065, -1.8248667,\n          -1.9222915, -1.8206872, -1.8429751, -1.7309147, -1.6954039,\n          -2.3076153, -1.9629672, -2.556063, -1.9714468, -2.0777094, -1.9243582,\n          -1.9282019, -1.7226651, -1.7366803, -1.7797271, -1.8666673,\n          -1.8559352, -2.0115576, -2.0343313, -2.3465438, -1.7615066,\n          -1.7925524, -1.7505562, -1.8233827, -1.7639633, -2.5499158,\n          -2.0778117, -2.393876, -2.1003664, -2.4747236, -2.0314233, -2.6079473,\n          -2.0488465, -2.0726097, -2.420869, -2.1173067, -3.612475, -2.2708921,\n          -2.2564926, -2.619866, -2.0709743, -2.1717794, -2.234867, -3.5616586,\n          -1.8582594, -2.2900496, -2.5066276, -1.7869142, -2.2555428, -1.739632,\n          -1.7395467, -2.7097666, -2.593255, -2.9155746, -2.1162503, -2.8907096,\n          -3.022455, -2.0814097, -1.9743068, -2.6041746, -2.7876935, -2.1046832,\n          -2.2939086, -1.9349879, -1.9833627, -2.1678498, -2.122495, -1.9412091,\n          -1.8106226, -1.8935112, -2.7455416, -2.1053557, -2.0693269,\n          -1.9052274, -1.8595452, -3.2098026, -2.1161914, -2.6492712,\n          -2.1443713, -2.5484862, -2.5748754, -1.8049053, -1.8047763, -2.359784,\n          -2.242467, -2.145636, -1.7720666, -1.8494686, -2.4786148, -3.2393236,\n          -2.545857, -2.5782166, -3.0330493, -3.2381384, -2.7825084, -4.427443,\n          -2.4144044, -2.4751902, -2.1578116, -2.694748, -4.1466045, -3.1840563,\n          -2.1466255, -2.6621218, -3.1598535, -2.0791852, -2.5672572,\n          -2.3185058, -2.7774062, -4.564393, -2.682265, -2.536275, -5.773803,\n          -3.7649782, -3.791073, -2.53297, -2.7084506, -3.979404, -2.3734815,\n          -2.8669748, -2.7112288, -2.3446472, -2.3289793, -2.5032165, -3.599362,\n          -2.8568487, -2.863185, -4.8855476, -8.4651, -2.6190147, -3.374161,\n          -3.6828594, -2.9956791, -2.5724895, -3.271289, -2.27042, -3.229629,\n          -2.7546244, -3.3204, -1.8723733, -2.225646, -6.912936, -3.2054763,\n          -4.020286, -8.263332, -3.6173582, -2.6950696, -4.516153, -4.0550213,\n          -4.3354163, -2.9266644, -3.224055, -4.048556, -4.0799303, -3.4536102,\n          -2.6236393, -2.9264743, -4.9663267, -2.129841, -2.8336856, -3.5423813,\n          -3.6727676, -3.2670007, -2.616068, -3.2305954, -3.4472659, -3.2471404,\n          -2.6500194, -2.8602796, -1.9934101, -3.1204846, -2.2264602,\n          -2.9697616, -6.267233, -3.3636794, -7.565011, -3.5965343, -1.9816792,\n          -2.0770934, -1.9365543, -2.0132627, -2.8048935, -4.533492, -2.4005785,\n          -5.5340495, -2.0961595, -2.2101908, -2.0628667, -2.1616464,\n          -2.7200656, -2.3370745, -4.169037, -6.5144353, -2.5519133, -5.150039,\n          -3.290528, -4.880377, -2.9813533, -3.8787777, -6.4337726, -2.9560385,\n          -3.278851, -3.3013442, -3.0591831, -2.538268, -1.9257783, -2.6350656,\n          -2.376288, -4.180156, -2.671247, -6.815842, -3.3597534, -2.939458,\n          -4.6065555, -2.9247332, -1.9990978\n        ],\n        \"pointIndex\": [\n          6, 1255, 256, 856366176, 1331580477, 121162326, 1861397757,\n          1784140675, 722625927, 369512698, 999326733, 631722989, 1147316518,\n          1889250899, 676780164, 948684465, 823038541, 58269054, 495469040,\n          867805346, 849223592, 821504320, 558589563, 1557329491, 832437078,\n          29889774, 1526663079, 1698982164, 464463946, 1587517277, 69230377,\n          1550349704, 884708891, 1714424156, 1320667169, 158292080, 579023462,\n          611679432, 1030184143, 211940121, 795876162, 1703808486, 1738281610,\n          1786066150, 1465617222, 232889955, 23590000, 1347610470, 1502168760,\n          389273338, 548657299, 112569024, 1150635185, 749910194, 10760582,\n          1374598658, 928145740, 364345111, 534775007, 1345172731, 435641728,\n          1850689153, 1457779896, 577917354, 458279036, 520015476, 1256635884,\n          724898515, 1907397043, 548371635, 1238248680, 1598492494, 32791065,\n          639123069, 634188682, 1881397161, 674880170, 1385660437, 779881167,\n          1149958022, 864778067, 1885143112, 1006943369, 1209156139, 1638461945,\n          1223199723, 1902694839, 1917072287, 1249\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 4189210767075612828\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5809543, -1.6052395, -1.5824018, -1.6341741, -1.6220462,\n          -1.6090232, -1.587091, -1.672407, -1.7943244, -1.6540173, -1.6431609,\n          -1.7253053, -1.656684, -1.7000749, -1.5897003, -1.7035328, -1.7050048,\n          -1.7974013, -1.8766737, -1.7098727, -1.8727963, -1.710227, -1.6779032,\n          -1.7963215, -1.7875885, -1.6611732, -1.6593343, -1.8127413,\n          -1.8638108, -1.6368163, -1.5975319, -1.7131653, -1.7733179, -1.754794,\n          -1.7803135, -1.8625493, -1.8076078, -2.101797, -1.9411423, -1.9826176,\n          -1.7513057, -1.915989, -2.0829697, -1.7271876, -2.0167289, -1.9224572,\n          -2.0267813, -1.8872782, -1.9540212, -1.8734661, -1.8287128,\n          -1.8533505, -1.6984493, -1.7122878, -1.767857, -1.833617, -1.8400757,\n          -1.9171269, -1.8674517, -1.67116, -1.650148, -2.213368, -1.6337502,\n          -1.9484588, -2.0833864, -1.794819, -1.8741286, -1.7609497, -1.8035247,\n          -2.120493, -1.8810593, -2.0289762, -2.8589911, -2.7678647, -1.8345987,\n          -2.4043863, -2.262942, -1.956295, -1.9800524, -2.6081913, -2.3866434,\n          -2.3147933, -1.8962301, -2.3161876, -2.0217319, -2.3371081,\n          -2.0991838, -2.1103528, -2.0742226, -2.9598522, -2.7799296,\n          -1.9263036, -2.0695686, -2.340692, -3.773643, -2.1007228, -2.529532,\n          -2.0259535, -2.5129123, -2.2907164, -1.8977658, -1.9150738,\n          -1.8403314, -2.215647, -2.2819843, -1.8791007, -2.59138, -1.7411674,\n          -2.092544, -2.0886123, -2.1413665, -2.1140273, -1.920856, -1.8703722,\n          -2.0212836, -2.1943898, -2.301725, -2.5547743, -2.0424213, -2.4508665,\n          -2.4707499, -1.8075922, -1.8725858, -3.5640264, -2.6105704, -2.165559,\n          -2.1232579, -2.6026196, -3.8884327, -2.1737967, -2.728757, -2.7798426,\n          -2.0043423, -2.439043, -2.4831007, -1.8531009, -2.0460627, -2.9669995,\n          -3.371271, -2.7603664, -5.145329, -3.5285287, -1.9487286, -2.1082284,\n          -2.2038436, -4.115193, -5.2521424, -3.1030037, -3.3301845, -2.8608727,\n          -2.355765, -2.9226215, -5.292759, -2.8541307, -5.8843527, -2.9329383,\n          -4.2045536, -2.4997535, -2.1682842, -3.929541, -2.8143623, -3.2451994,\n          -4.985799, -2.5752323, -5.0215697, -3.2431254, -2.9068696, -2.3326433,\n          -5.0619807, -4.032271, -2.251037, -2.4568508, -2.573857, -3.296703,\n          -3.1517873, -2.388365, -3.1821537, -2.3441288, -3.32923, -3.3475256,\n          -5.413077, -4.935657, -3.4767509, -3.1256387, -2.2770538, -2.696921,\n          -4.8854313, -3.588679, -7.3792324, -3.8347638, -4.33273, -3.9180882,\n          -2.7039533, -2.640478, -3.0467978, -2.8800092, -3.728069, -2.8766658,\n          -3.2154396, -3.0740054, -2.8467777, -3.4180677, -2.7143333, -2.076263,\n          -2.0485897, -5.0210037, -2.7091832, -2.9298787, -2.4252849,\n          -3.3718836, -2.4471207, -3.6353476, -2.2861705, -2.7411432,\n          -5.5288014, -1.7537444, -3.9634967, -2.5614748, -4.5685825,\n          -2.5693524, -2.8589904, -4.052606, -3.0085058, -8.247205, -3.4645865,\n          -2.7214427, -2.5838864, -4.3286037, -2.4010165, -3.414822, -2.885385,\n          -2.4666367, -4.323651, -3.8383358, -3.139137, -3.4275022, -3.237697,\n          -3.1840107, -2.252056, -3.050251, -3.1270928, -2.7886403, -3.3036432,\n          -3.7236898, -4.4391513, -3.3557675, -2.2811415, -4.0825143,\n          -3.5760353, -3.5455575, -2.8113878, -3.084719, -5.0754757, -6.223174,\n          -2.2824337\n        ],\n        \"pointIndex\": [\n          1, 1255, 255, 1017284302, 1089734496, 10949292, 1652093439,\n          1596861574, 1262420369, 30885259, 1899089182, 702589526, 226343027,\n          650443794, 938940964, 204420704, 449388811, 684278980, 484967400,\n          1865413300, 624197316, 792817202, 1057627947, 1911730982, 49351379,\n          269342876, 1366723898, 465991636, 317674381, 568846759, 474593987,\n          25762502, 397721334, 876847266, 945371139, 786347852, 1099848013,\n          1463711320, 1779232933, 1218049435, 1038543087, 1886678781,\n          1338489441, 1355967911, 191822678, 73117530, 1172672466, 1929280355,\n          265143882, 276538306, 286271887, 508121392, 1622063844, 952218103,\n          130496378, 500479695, 333865593, 467046039, 1913019693, 1509216529,\n          384146715, 1617139672, 1314713586, 1846961643, 962770663, 1626111478,\n          456196157, 1804321800, 518594326, 547655169, 361125140, 92283015,\n          1293741157, 1103822982, 1531462748, 741344085, 721349132, 1695309884,\n          772484344, 435411073, 1857039100, 915276005, 979134019, 1420299320,\n          1245253122, 1506772618, 105372024, 1976398838\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 5716410955948198572\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4723679, -1.4748684, -1.4796734, -1.4989817, -1.4847839,\n          -1.5271605, -1.4858755, -1.6206623, -1.5259806, -1.5336944,\n          -1.5135266, -1.5312008, -1.544349, -1.4907415, -1.5200644, -1.8607095,\n          -1.71595, -1.6295044, -1.6614683, -1.5787373, -1.5668296, -1.6186606,\n          -1.7220145, -1.6395398, -1.637389, -1.5587585, -1.6250982, -1.574669,\n          -1.5053121, -1.5367, -1.6281161, -1.9685827, -2.0332274, -1.8441141,\n          -1.8605316, -1.6806389, -2.0616343, -1.7017778, -2.018361, -1.6003023,\n          -1.7140663, -1.5801271, -1.5731896, -1.6345347, -2.0808349,\n          -1.9523935, -1.7433275, -1.9722502, -1.6819129, -1.7351564,\n          -1.6872911, -1.6488663, -1.6682271, -1.6863096, -1.7568988,\n          -1.6824553, -1.9729172, -1.5136033, -1.5075728, -1.9928776,\n          -1.7682136, -1.9389695, -1.6388328, -2.048581, -2.7856648, -2.678215,\n          -2.0568538, -2.279208, -2.3106277, -2.098067, -2.046037, -2.4830816,\n          -2.1151178, -2.151959, -2.2791963, -1.7717863, -2.2011366, -2.4435546,\n          -2.215945, -1.8173331, -1.6686308, -1.7595752, -1.8025055, -1.6080472,\n          -1.6296883, -1.7254086, -3.258416, -2.5550792, -1.8049688, -2.0818071,\n          -3.0147269, -2.116284, -2.5157568, -2.0794017, -1.9321847, -2.090745,\n          -2.0223753, -1.7609588, -1.833841, -3.121823, -2.0643542, -2.269138,\n          -2.14882, -2.6094937, -2.3333528, -2.1854448, -1.7575346, -1.717381,\n          -1.8458064, -1.797531, -1.922467, -2.4076557, -2.198741, -2.005758,\n          -2.3674817, -1.5723048, -2.205375, -1.9498025, -1.6438376, -2.7989624,\n          -2.2850306, -2.0528595, -1.8120686, -2.024037, -1.9523389, -2.3633564,\n          -2.2165782, -5.296845, -2.6003857, -3.3466606, -3.048333, -2.975929,\n          -4.850967, -3.008884, -2.6241615, -3.4881067, -7.5118446, -3.2324533,\n          -2.4897316, -2.1628304, -3.6258216, -3.848587, -3.0612652, -2.6791778,\n          -3.2420094, -2.194795, -2.1248574, -2.9027042, -2.3620207, -2.8662558,\n          -2.6906807, -1.7725453, -2.0574784, -4.02859, -3.6893065, -3.6238759,\n          -2.5562239, -3.8399627, -3.0008607, -4.83231, -2.5115073, -5.7743516,\n          -2.8788157, -2.198657, -2.6013656, -1.9853501, -4.5019355, -2.4918034,\n          -6.2223206, -2.4242911, -1.8496494, -3.8475597, -4.650009, -3.535911,\n          -4.1198554, -2.7007039, -2.8500903, -2.1353986, -3.5506113, -2.997891,\n          -4.204195, -4.4949207, -3.9670541, -2.1987236, -4.757135, -5.226631,\n          -2.7338564, -3.3281708, -4.295948, -2.6056557, -2.1256652, -3.6111758,\n          -2.6671848, -3.766492, -2.9153528, -4.150468, -4.9099946, -2.6949108,\n          -1.8693603, -4.0355754, -4.0624247, -2.5225918, -2.5343404,\n          -2.8199022, -3.2567928, -2.29005, -2.2990692, -3.128523, -3.4083765,\n          -4.047287, -2.9685643, -2.8527641, -2.6012478, -4.909524, -2.082991,\n          -3.2168367, -2.2418272, -1.862048, -2.1094995, -2.2545705, -3.5991924,\n          -2.4222345, -3.9604158, -2.5520163, -3.1371665, -3.4637997,\n          -2.9052608, -2.9302106, -3.5320182, -3.6258898, -2.5361264, -4.311318,\n          -1.7770904, -2.2072034, -2.2768364, -3.1182168, -2.3346496, -2.716515,\n          -2.167842, -2.8909123, -3.067276, -5.121871, -2.5801742, -2.2827775,\n          -2.8184876, -2.1159203, -2.0943155, -2.2802548, -3.1016576, -2.298147,\n          -5.552916, -3.8240209, -3.769819, -4.703353, -4.1772947\n        ],\n        \"pointIndex\": [\n          2, 1251, 255, 1580393585, 937796393, 369078070, 1088167617,\n          1820918241, 107317162, 702518978, 180405041, 1542201070, 1152682873,\n          525044586, 190877682, 1447823620, 389110928, 169082333, 225169030,\n          76478458, 673272180, 1582572407, 1170513083, 1761994507, 1762524214,\n          860487169, 1138576973, 989335400, 1211982194, 374589419, 866698654,\n          1165170093, 83589305, 1044900109, 1061977604, 366177616, 1673983651,\n          649306908, 82352927, 719766844, 788108580, 909109540, 1536814447,\n          1113609650, 1595319572, 1819558734, 748175170, 570534497, 291551019,\n          1665537188, 53056636, 118843013, 1841587709, 509988965, 1681295645,\n          1837604773, 727677842, 28256415, 1720623001, 554211226, 148756275,\n          822589756, 419589759, 1527317775, 161592381, 1866867503, 1169484848,\n          967047195, 539531095, 574220620, 1360574594, 1325502980, 199192459,\n          1139894476, 1418390616, 686476847, 214632009, 1699060011, 457644879,\n          830359452, 1326960, 1094603602, 1163385703, 1692138214, 1252348902,\n          1298375863, 1502677091, 1952872299\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -5848974963479365812\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.585127, -1.5865705, -1.5962614, -1.6085825, -1.596154, -1.630115,\n          -1.6023626, -1.6131486, -1.6124258, -1.6522865, -1.6071877,\n          -1.7168533, -1.6414491, -1.6560875, -1.6027256, -1.7860086,\n          -1.6391898, -1.7215892, -1.6661743, -1.7551733, -1.6553258,\n          -1.8516773, -1.7214888, -1.797124, -1.8066955, -1.6430796, -1.6840556,\n          -1.821842, -1.715812, -1.7092791, -1.6264325, -1.8273116, -1.8516202,\n          -1.8577257, -1.8426911, -1.8244375, -1.8282402, -1.7095118,\n          -1.8318115, -1.9583569, -1.9977958, -1.85578, -1.6765971, -2.2481804,\n          -2.2947147, -1.9133995, -1.8031982, -2.179512, -2.439674, -1.8818381,\n          -1.8582158, -1.9036092, -1.7120684, -1.8141488, -1.7251884,\n          -2.0264452, -1.8911572, -1.8964896, -2.2973378, -2.332079, -1.8439084,\n          -1.7138196, -1.7435124, -1.8923931, -2.0120213, -2.3480647,\n          -2.4271514, -2.6368625, -2.3307495, -1.9175985, -2.4014747, -1.911802,\n          -1.8705333, -2.248779, -1.97523, -1.9332904, -2.081916, -2.0845778,\n          -1.8409702, -3.1433861, -2.1671054, -2.2489593, -2.0810833,\n          -1.9405694, -1.9257721, -1.6870952, -1.7625767, -2.321876, -2.5269024,\n          -2.659979, -2.3002388, -2.2201302, -2.3069, -2.1379435, -3.4479458,\n          -2.259122, -2.186968, -2.7688255, -2.7101648, -1.8897277, -2.687137,\n          -2.4541807, -2.3081462, -2.0229635, -2.012898, -1.8474618, -2.2427373,\n          -1.9241083, -2.318026, -2.2848606, -1.7845504, -2.4813504, -2.520892,\n          -2.1248918, -1.9119974, -1.9928011, -3.4082627, -2.4585147,\n          -2.3150163, -2.4207702, -2.5795302, -2.2035196, -2.4063442,\n          -1.8016686, -2.0338738, -2.1928742, -1.7814916, -2.5912824,\n          -3.2466595, -2.373099, -2.3038535, -3.493652, -3.802891, -2.490586,\n          -2.7070038, -2.9038296, -3.8930063, -2.906122, -5.50004, -2.2162237,\n          -1.9646659, -2.7175615, -3.1687071, -2.3840399, -2.0239818,\n          -1.9653404, -6.275639, -4.7269425, -2.6616986, -2.9380362, -3.3301127,\n          -2.207981, -2.511488, -2.1947012, -3.2296078, -2.3584638, -2.737618,\n          -3.1801414, -4.861876, -3.4092803, -5.0118575, -2.5117936, -3.616494,\n          -3.2554538, -3.9249454, -2.7289634, -3.2955859, -5.0465126,\n          -5.5747933, -4.9433146, -2.879198, -2.9027388, -1.7449051, -2.3635547,\n          -2.967493, -4.3145137, -2.5878572, -2.690465, -3.0125632, -3.3281112,\n          -3.3097851, -2.305855, -3.4226742, -3.9484427, -4.643662, -2.438772,\n          -4.3584895, -3.1132748, -5.7082057, -3.675992, -3.7442765, -3.782986,\n          -2.5213556, -4.175735, -2.4869661, -3.764512, -3.3696184, -3.76912,\n          -2.955294, -2.455391, -3.1660938, -3.0430512, -2.9759614, -2.8468833,\n          -2.6680672, -2.6251414, -3.0337079, -2.1241593, -2.333803, -2.6122713,\n          -2.257843, -2.6162577, -2.5634978, -2.5290399, -2.7698011, -4.878759,\n          -2.4674652, -4.0861626, -2.8145604, -2.60121, -3.1464317, -2.9476178,\n          -2.1492608, -3.8819375, -3.5344517, -3.1717658, -6.760232, -2.2543252,\n          -2.871668, -2.0001733, -2.7306046, -2.9972532, -2.0358295, -3.7240984,\n          -3.764603, -4.331714, -3.5213747, -2.6330638, -2.3694458, -2.447882,\n          -2.9419396, -4.247825, -2.8564208, -3.6029963, -2.990521, -3.9543157,\n          -2.5195994, -2.9512708, -1.9221497, -2.3364508, -2.380883, -2.5922043,\n          -2.7262871, -2.2678275\n        ],\n        \"pointIndex\": [\n          3, 1256, 254, 1198457468, 1360952450, 26435319, 922891573, 1706818842,\n          291132708, 351232349, 1635555717, 1039896802, 1655975837, 1270526413,\n          284925168, 119876470, 377448719, 422047276, 689131536, 1610442304,\n          764310952, 1031548427, 999931524, 1834911269, 608116459, 65495470,\n          1912444118, 669922830, 1541005390, 1176102810, 1607813297, 447779532,\n          1448688105, 826406687, 1845115835, 1829745202, 1675928325, 1163469776,\n          783631286, 1021157742, 838796421, 1029355220, 1576427283, 1479123821,\n          1553168870, 1255746814, 1802114069, 60950518, 161781672, 281234833,\n          1533432341, 1634075043, 1005880675, 1354111505, 1399435003,\n          1882301970, 334464726, 1125525131, 27011306, 658430381, 1673032764,\n          693793304, 833452546, 424407824, 165459134, 621642345, 1649361131,\n          216798098, 836256009, 1440847747, 555067145, 14824669, 1565147410,\n          1091818195, 672786451, 1580259905, 814844409, 5187055, 1264954531,\n          1034275911, 248635736, 1346824182, 1293371189, 1559256902, 1343891668,\n          1543671026, 1653822480, 1572339\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -7786205468076532075\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4614998, -1.4620378, -1.4752388, -1.4637798, -1.4655621,\n          -1.4821007, -1.4969317, -1.4724, -1.4756373, -1.4745526, -1.5583972,\n          -1.4842486, -1.5277389, -1.5109748, -1.5362148, -1.5139849,\n          -1.6300763, -1.5247805, -1.5487496, -1.5125206, -1.6450458,\n          -1.7908546, -1.5639207, -1.6074495, -1.5013287, -1.6458435,\n          -1.5291392, -1.5445955, -1.6785212, -1.7640712, -1.5476627,\n          -1.5836003, -1.5833039, -1.6434343, -2.0564253, -1.5250986,\n          -1.5665632, -1.6909691, -1.5759987, -1.7525314, -1.5801365,\n          -1.9565464, -1.7089354, -1.8142347, -1.9809933, -1.5648323,\n          -1.5719103, -1.6796517, -1.616994, -1.9254856, -1.58162, -2.0042083,\n          -1.918308, -1.7683012, -1.667526, -1.5528598, -1.7119304, -1.7606535,\n          -1.7034885, -1.8455615, -2.1602297, -1.7621607, -1.6108712,\n          -1.6713388, -1.9926528, -1.9681088, -1.595653, -2.058484, -1.7006544,\n          -2.3276198, -2.0781496, -2.2205381, -1.5307153, -1.6730161,\n          -1.6707381, -2.025199, -2.0504923, -1.7690312, -1.6877911, -1.9043152,\n          -2.062953, -1.6100355, -1.7021533, -2.6426606, -2.976421, -2.3672402,\n          -1.8924923, -1.9037335, -2.3610914, -2.0577476, -2.0118337,\n          -1.6866783, -2.6741467, -1.6573713, -1.8338802, -3.272017, -1.6896793,\n          -1.8266575, -1.6926943, -2.1101823, -2.249881, -2.7457533, -1.9911,\n          -2.1694999, -2.8206563, -1.9598428, -1.9498309, -1.8770298,\n          -2.8078392, -1.747417, -2.0108464, -1.9295949, -1.7438056, -2.2001588,\n          -2.4090858, -1.9628894, -3.2266276, -2.2227662, -1.9527277,\n          -2.3317513, -3.043289, -2.3646955, -2.2974114, -2.3026936, -2.0654562,\n          -2.101165, -2.0241485, -3.2078626, -2.1115484, -3.375203, -3.4619694,\n          -3.1447875, -3.0205579, -2.9116719, -3.0496647, -5.631469, -2.851554,\n          -1.872976, -3.534869, -2.4107833, -2.4627678, -3.9677198, -2.5221946,\n          -4.2509003, -4.347851, -1.7428657, -2.2097461, -3.1804452, -1.7241619,\n          -2.1284466, -2.7373052, -6.491934, -5.604398, -2.7174814, -2.0809023,\n          -2.070087, -3.1101959, -2.0747929, -1.7276424, -2.1270373, -2.531271,\n          -2.9381006, -2.3071568, -3.4840255, -3.1525218, -2.1313589,\n          -1.7235185, -4.1241984, -4.0011544, -4.690316, -3.1714647, -4.109093,\n          -3.5499568, -3.7116437, -2.3376155, -2.4696362, -2.3397548,\n          -3.4042199, -4.819347, -2.3679986, -4.512915, -2.5042548, -3.8902445,\n          -2.830531, -1.718399, -6.8237643, -2.9122827, -2.0612462, -4.492936,\n          -2.6069682, -2.9106145, -5.3700852, -4.678593, -1.8033797, -2.726209,\n          -2.9032097, -2.6872823, -2.6334465, -2.3258445, -3.0594003,\n          -2.6481984, -3.4110737, -2.656895, -4.843449, -2.7703316, -3.7980182,\n          -2.426165, -4.9782662, -2.9232721, -5.033615, -3.3354278, -2.5321763,\n          -2.607751, -2.2415302, -2.4685838, -3.2702944, -3.4449472, -5.0143695,\n          -3.24149, -2.8240068, -2.4171066, -3.502546, -2.3196597, -5.5846643,\n          -5.161543, -1.9018593, -3.8895326, -3.0670464, -2.640248, -3.7918057,\n          -4.233302, -4.47031, -2.205188, -3.6447008, -4.1468525, -3.8840435,\n          -3.281042, -2.0309126, -2.9352825, -2.4804878, -6.0481863, -3.061815,\n          -7.0423317, -3.2203004, -3.7399385, -5.456574, -2.5922499, -2.6433997,\n          -3.053929, -2.068918, -2.4161696, -2.7882383, -3.3886878, -3.43396,\n          -3.1441133, -3.9203095\n        ],\n        \"pointIndex\": [\n          0, 1226, 256, 849056237, 1330298459, 142932, 101177453, 916069392,\n          1286682669, 202402972, 950065220, 823981583, 900478237, 252922177,\n          317882587, 379601170, 1350790257, 1414449496, 675557575, 514428925,\n          629544045, 937023410, 993595804, 1643487381, 1162308875, 262947398,\n          1078715087, 716065612, 308881963, 1458484511, 31188202, 363072657,\n          395901610, 1628006938, 1133723048, 504229182, 1570891080, 833597151,\n          758041359, 1774356767, 1606186339, 818151225, 1802949039, 1044115799,\n          1482425745, 1123123331, 1421308938, 254357493, 112360527, 1735001320,\n          26132880, 287318462, 1675444151, 299646686, 1391296817, 378142025,\n          1386032200, 546779461, 343148943, 1475705046, 1842922609, 370456911,\n          1559704248, 71726850, 86227771, 782278457, 3370593, 1088871044,\n          369336061, 1304912692, 1540853432, 19260151, 559985974, 810439446,\n          940631817, 881244164, 713562624, 978060543, 724323672, 1191640444,\n          1110303134, 827206577, 1679472034, 945460625, 519113940, 1014641438,\n          1361991718, 1723655006, 1226\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 256,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -6279845217872163458\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5291942, -1.5421118, -1.5291986, -1.5688084, -1.5456706,\n          -1.5932994, -1.5844495, -1.6483736, -1.5697545, -1.602868, -1.5526233,\n          -1.5970722, -1.6185194, -1.5882381, -1.6166805, -1.7459667,\n          -1.6640987, -1.5835897, -1.6019938, -2.0524082, -1.6345209, -1.679469,\n          -1.5700582, -1.6075442, -1.6975076, -1.643985, -1.6761684, -1.6274849,\n          -1.6280085, -1.7651222, -1.6474775, -1.7666135, -1.915021, -1.6711018,\n          -1.9053582, -1.6919777, -1.5960433, -1.6567732, -1.7585815,\n          -2.2147882, -2.2913113, -1.6530101, -1.761255, -1.7261058, -1.7055616,\n          -1.597077, -2.0509222, -1.6162806, -1.957403, -1.8172238, -1.7253511,\n          -1.7148412, -1.661545, -1.7858291, -1.7208059, -1.8286492, -1.8421198,\n          -1.7606053, -1.6374458, -1.9475904, -1.8031894, -1.7179695, -1.653238,\n          -2.505366, -1.8491501, -1.93492, -2.1802382, -1.7455585, -2.622658,\n          -1.9209028, -1.9228863, -2.0540936, -1.7093084, -2.35584, -1.6626124,\n          -1.7910569, -1.6780435, -2.0860777, -1.9547757, -2.6014528,\n          -2.4743774, -2.6695936, -3.5200083, -2.4638808, -3.6596913, -1.772456,\n          -2.8461657, -2.3185823, -2.1135888, -1.922694, -1.7556864, -1.7284646,\n          -2.1148572, -2.646042, -2.5979016, -2.307461, -2.490812, -2.1498966,\n          -2.2652745, -1.9508317, -3.3524008, -2.6338513, -1.8379326,\n          -2.0284708, -1.9257252, -1.780004, -2.5826719, -2.2029107, -2.1302207,\n          -1.886495, -1.8473617, -1.9395412, -1.908353, -2.0144832, -2.8429317,\n          -2.0034041, -3.0488722, -2.0849218, -1.97875, -2.5612664, -2.8554602,\n          -1.9041004, -2.3833334, -1.9720411, -2.2629402, -2.0808895,\n          -1.6719692, -2.763051, -2.5273015, -1.9659427, -4.1709867, -3.3429592,\n          -2.7491894, -2.3433127, -3.0188322, -5.337598, -2.0138423, -3.548501,\n          -3.0431871, -3.450968, -2.0977263, -3.5403953, -5.311833, -2.9877274,\n          -2.6960602, -2.1924443, -2.0940046, -3.608719, -3.313874, -1.9078962,\n          -1.7662423, -2.491588, -2.115966, -3.0886564, -3.761888, -3.0473785,\n          -2.3578377, -3.1963868, -2.0134742, -3.3740778, -3.6406064, -3.475895,\n          -2.6934884, -3.090138, -2.8647952, -4.240299, -5.058313, -3.3151815,\n          -2.7376516, -4.3686676, -4.85089, -1.941006, -3.5042632, -2.961024,\n          -3.1213286, -2.3416991, -2.4683144, -3.9237976, -2.263143, -2.2315419,\n          -3.6002877, -4.234399, -2.9994457, -1.8865396, -5.071279, -3.388649,\n          -3.2096522, -3.0233998, -3.828761, -4.304722, -4.7664814, -4.2962723,\n          -3.6441188, -3.5908008, -2.7482326, -2.2110183, -2.3329499,\n          -2.7433815, -2.954774, -2.034433, -4.5096397, -3.4059017, -3.5646608,\n          -3.219465, -2.679943, -2.9957838, -6.4067445, -3.1729321, -2.4494479,\n          -2.172322, -2.711321, -4.449049, -3.6060262, -5.8164296, -2.6889763,\n          -2.3064306, -5.6516323, -4.252806, -2.296698, -2.504121, -2.5867362,\n          -2.494824, -2.026782, -4.488038, -2.5933194, -2.6822002, -2.3643074,\n          -2.945674, -2.6898968, -3.0333946, -3.0024295, -2.045695, -2.069347,\n          -3.964504, -6.586258, -4.176414, -3.3635936, -2.273528, -2.857833,\n          -3.2752182, -3.1722167, -2.9160614, -3.245185, -5.39917, -2.0929382,\n          -2.39346, -3.625814, -4.11937, -2.0917597, -2.3497598, -2.4011214,\n          -3.5981495, -2.1429238, -1.8076006\n        ],\n        \"pointIndex\": [\n          0, 1247, 254, 1207393395, 1282199790, 1517355764, 876092348,\n          1851235249, 1031800199, 382474110, 1568060881, 1438044522, 980294924,\n          1319389343, 300434136, 530712929, 1481478031, 413712687, 473858392,\n          544332911, 40496455, 750176439, 944546922, 1842198435, 264523950,\n          1185384991, 232310, 1422329646, 328917204, 1779388631, 674935981,\n          1746595928, 1637280667, 1198686133, 1793063872, 1330240837,\n          1433095016, 1837934942, 760884769, 690073014, 785842974, 1094420146,\n          956486752, 1191038798, 1807988695, 141741757, 1238555458, 355134539,\n          402731252, 1937625075, 777053484, 948757244, 1053514550, 155850445,\n          1903432337, 1606054283, 1619661043, 1739808340, 888819597, 1528127005,\n          1644004700, 971895458, 46555885, 1170026704, 443882870, 556088262,\n          1784744312, 1547430962, 492864216, 1693527228, 531323225, 547909999,\n          1213620485, 1055342901, 1667520910, 835400368, 1505096003, 710755318,\n          734421719, 887583508, 1880797967, 1782520631, 921168046, 1628462655,\n          1381801248, 1642730788, 1806492239, 1533567\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -6736865570805532185\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6270272, -1.6364937, -1.6316625, -1.6581007, -1.657373, -1.6732012,\n          -1.6403781, -1.666901, -1.716661, -1.7204353, -1.6693875, -1.6810037,\n          -1.6786739, -1.6509911, -1.6405945, -1.6681459, -1.7152675,\n          -1.9488813, -1.7760447, -1.9936274, -1.7521713, -1.7592715, -1.902406,\n          -1.6905272, -1.8479546, -1.751047, -1.7905741, -1.6934325, -1.6789583,\n          -1.6701736, -1.6564382, -1.7715598, -2.0151927, -1.9195334,\n          -1.8028382, -1.9663247, -2.1656272, -2.296082, -1.840917, -2.1362383,\n          -2.0696712, -1.8631237, -1.767524, -1.7651434, -1.8536345, -2.0365477,\n          -2.1101491, -1.7590829, -1.7541672, -1.8802052, -1.9907873, -1.883789,\n          -1.9589584, -1.937726, -1.7951763, -1.9418979, -1.9566839, -2.0244322,\n          -1.9384271, -1.6788492, -1.7451121, -1.7861428, -1.6852266,\n          -2.5215356, -1.9013051, -2.037079, -2.0577483, -3.0182664, -1.9487162,\n          -1.9190514, -1.8165746, -2.0850487, -2.0822523, -2.2335765,\n          -2.4680552, -2.363522, -2.3838506, -2.204269, -4.3974123, -3.3375227,\n          -2.7195897, -2.4727077, -2.1204584, -2.6764693, -2.1020103, -1.989323,\n          -1.9542483, -2.167528, -1.769592, -2.1437624, -2.7654886, -2.1714416,\n          -2.6895342, -2.7113855, -2.3920598, -2.157461, -1.9640378, -1.7977059,\n          -1.7815706, -1.9780982, -2.2057226, -2.116635, -2.3563638, -1.9742705,\n          -2.9931417, -2.392908, -2.28999, -1.9452024, -2.6425848, -2.6401994,\n          -1.9202209, -2.5856595, -2.1273446, -2.2240243, -2.129439, -3.3542998,\n          -2.2884893, -2.4859428, -2.2571838, -1.7519403, -1.9292167,\n          -1.8874947, -2.1244519, -1.9506923, -2.0909913, -2.3313808,\n          -1.7003025, -2.9537804, -3.5986166, -2.7357392, -2.283334, -4.7310257,\n          -2.9838123, -3.3083916, -2.284723, -4.025685, -3.6454818, -2.046851,\n          -3.136488, -4.9756856, -1.9540596, -5.2797184, -4.224498, -3.102977,\n          -3.69534, -2.6488783, -3.182275, -3.6669807, -4.6564136, -3.294526,\n          -2.7974358, -3.3393862, -3.445514, -3.427641, -3.7099073, -2.8673236,\n          -3.67386, -4.8217087, -4.5653443, -3.4503992, -3.6306133, -5.5046973,\n          -4.169761, -2.6254187, -4.3230076, -3.2810805, -2.551275, -3.7663214,\n          -2.8015034, -4.2319894, -2.425944, -2.796178, -2.7693992, -2.4642992,\n          -2.7877843, -4.520319, -3.8294418, -4.1778226, -2.1968145, -2.6842058,\n          -5.268678, -3.4234889, -3.6759183, -2.393645, -4.843274, -2.8643084,\n          -3.3054264, -4.0527077, -3.1848598, -2.6404333, -4.456477, -2.7478602,\n          -2.5648475, -2.6379373, -4.831928, -2.3397806, -1.9601725, -3.099352,\n          -2.1764588, -3.9158375, -2.2227128, -2.9893708, -2.8553996,\n          -2.6301746, -2.118248, -2.6538143, -5.337449, -1.9940683, -2.7780764,\n          -3.9678357, -3.0879936, -3.0710263, -3.4833937, -2.4810443,\n          -4.2350316, -4.8945765, -3.2219045, -2.769202, -3.0021436, -2.9778628,\n          -3.6210992, -2.2141361, -2.673273, -3.2167678, -3.5103796, -8.597359,\n          -3.5799994, -2.4545393, -4.5522933, -2.4809632, -4.8563533,\n          -5.3535905, -4.0583744, -3.7414627, -4.6650076, -2.496172, -5.1984487,\n          -2.5022283, -3.1943207, -2.0961633, -4.3724174, -2.932501, -2.277216,\n          -2.0648792, -2.928093, -2.7486837, -3.541168, -2.4924767, -2.1827936,\n          -2.2467203, -3.017348, -2.658529, -3.090261, -3.782347, -1.7620225\n        ],\n        \"pointIndex\": [\n          5, 1255, 255, 1956468551, 1616157286, 1609746451, 488452057,\n          930233117, 1906460260, 1725851027, 786295560, 153864773, 25689970,\n          1908503004, 281243365, 1314661265, 1468990517, 1176275018, 1622263040,\n          577357105, 1464775084, 709924167, 889810156, 1877489160, 627129837,\n          60480977, 1256707707, 522403349, 1632370727, 1704381890, 887374661,\n          442182691, 1755137153, 535785731, 1750574627, 1588771769, 551572559,\n          1056180673, 630785850, 1720527106, 699769889, 208720950, 1142427647,\n          928764231, 1489358879, 1139773552, 1028238333, 1522940943, 100212860,\n          264566646, 839567687, 308532422, 704930029, 310113401, 1787203725,\n          357426754, 1167751160, 1145759838, 1240995885, 58971194, 1280434039,\n          1530888301, 1606015708, 52961117, 968535734, 653293164, 726161111,\n          926860836, 503605708, 1699615712, 264175794, 985695770, 554952958,\n          595565852, 629290955, 646856226, 197718828, 751350228, 1545208118,\n          1702895684, 762738313, 1039790708, 878663156, 1636895505, 1299674101,\n          1169950849, 1387813939, 1863833561\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 6873559043919805206\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5478095, -1.5580735, -1.5586025, -1.588912, -1.5624557, -1.5931304,\n          -1.5787097, -1.6516792, -1.7199396, -1.5835112, -1.5690824,\n          -1.6260145, -1.6317056, -1.5840569, -1.6037713, -1.7980132,\n          -1.8133217, -1.7965556, -1.7716056, -1.5857275, -1.5960587,\n          -1.7305709, -1.7159228, -1.7437992, -1.6637305, -1.6374578,\n          -1.8194374, -1.6424956, -1.6265041, -1.7596669, -1.6393709,\n          -1.8227845, -1.8477689, -2.161068, -2.0182776, -1.8059037, -2.1590114,\n          -1.9517924, -1.7850544, -1.6919967, -1.6256903, -1.7500316,\n          -1.6069838, -1.7542928, -1.9830415, -1.963632, -1.8992164, -1.7973434,\n          -2.1416883, -1.6798558, -1.7398648, -1.6805445, -1.9159135,\n          -1.8302265, -1.8452566, -1.7396388, -1.724929, -1.8845452, -1.6393856,\n          -1.9198812, -1.799096, -1.7613573, -1.6581032, -1.8339019, -1.9424281,\n          -2.7757666, -2.4357708, -2.515062, -2.4722068, -2.082221, -2.2053227,\n          -2.277163, -1.8077579, -2.7880044, -2.4332013, -2.4218073, -2.0808065,\n          -1.8147348, -2.215576, -1.8046377, -1.7593033, -1.777868, -1.7045307,\n          -2.2772882, -1.9263648, -1.8515847, -1.6312764, -1.9708276,\n          -2.3266296, -2.1762302, -2.0596678, -2.0727565, -2.1242344, -1.955638,\n          -2.0269237, -2.80806, -4.395095, -2.7257683, -2.1824844, -2.2855716,\n          -1.9568323, -1.9669816, -2.5572267, -1.7785479, -2.039832, -2.551597,\n          -2.0521724, -2.0065117, -2.034989, -1.9030262, -2.016063, -2.2545946,\n          -2.2300994, -3.4435954, -1.9981797, -2.0935466, -2.1631384,\n          -2.1183298, -1.6487999, -1.9733895, -2.2379096, -1.8148649,\n          -1.8340958, -2.0860653, -2.1912777, -1.8835979, -1.8014612,\n          -3.1797936, -5.339011, -3.2961295, -2.7358942, -2.8153527, -3.3449876,\n          -3.0131137, -3.142744, -2.559853, -2.584647, -5.9994397, -3.5015721,\n          -3.2383192, -2.6384623, -2.6504884, -2.5027266, -3.348065, -2.329989,\n          -2.7779088, -1.8919544, -3.3424976, -3.620949, -3.2293108, -2.8661213,\n          -2.9987667, -3.2882886, -2.2421741, -2.9003553, -2.4627187,\n          -3.7794378, -2.3688264, -3.7690911, -2.6497767, -3.440573, -2.2225194,\n          -3.4665196, -3.1266437, -3.9963698, -2.394353, -2.536245, -5.682287,\n          -2.7054744, -3.5295506, -3.164668, -2.7427077, -3.303546, -1.8590448,\n          -2.207797, -2.018024, -2.0527718, -3.4058433, -4.380702, -3.5663502,\n          -2.4080958, -2.711812, -4.338985, -2.8664718, -3.122034, -2.5750837,\n          -2.2175708, -4.8892264, -3.1534848, -2.092666, -2.328548, -3.0831187,\n          -3.136449, -7.84776, -6.210704, -2.9239645, -4.766644, -2.2623096,\n          -3.40043, -2.3112707, -2.9403992, -2.5001543, -3.4736223, -5.2151012,\n          -2.6916144, -3.540305, -4.12798, -2.7393646, -2.1685767, -2.746256,\n          -2.8794262, -4.1864066, -4.8762226, -2.3647995, -3.9723904,\n          -2.4144828, -4.3872066, -2.2839453, -2.4254746, -2.037719, -2.6945152,\n          -2.326007, -2.9042437, -4.6150465, -4.993203, -2.8662229, -2.7774584,\n          -3.4765794, -4.0050497, -2.058355, -2.7188106, -2.258685, -3.3639786,\n          -4.753893, -4.302262, -2.3484764, -3.6190546, -2.398666, -5.012479,\n          -2.397243, -4.971586, -3.7006304, -7.620705, -2.0435278, -2.7693655,\n          -4.205434, -2.1007671, -2.8852465, -2.899561, -2.9529257, -3.2352219,\n          -2.7577133, -2.4206066, -3.002688, -1.9380794\n        ],\n        \"pointIndex\": [\n          4, 1248, 255, 797085272, 917719120, 665845471, 611172034, 1738275277,\n          1155010306, 863175673, 502781864, 733921225, 1824565080, 798338759,\n          809817756, 333197546, 386844836, 879126480, 477362820, 1241127680,\n          629887312, 3698656, 1751740488, 1724705441, 486849765, 765800921,\n          302530205, 319812420, 1537887528, 1335616466, 715588314, 409087041,\n          1014476770, 131534629, 155377144, 519665801, 979042494, 556013783,\n          1262786447, 704272033, 1808017009, 1067124924, 983471314, 1115367593,\n          1777420339, 1551781689, 792743958, 1872401644, 1369229168, 176736649,\n          7387787, 1558023078, 191486482, 762348003, 1365673732, 341034796,\n          171526140, 25179000, 1076981259, 1260361787, 521298593, 866513695,\n          502099238, 90562059, 434354796, 151897813, 1186792304, 174131077,\n          1354707555, 1337800387, 964071028, 509614955, 1548920373, 1450283480,\n          818029985, 811502628, 1472466978, 1404935992, 924462353, 1755394524,\n          1192694550, 217950647, 1081151410, 1049347660, 649725313, 1372758617,\n          1480160371, 1929596516\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": -919230996305300857\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.6287293, -1.6435285, -1.637755, -1.6691914, -1.645258, -1.6442953,\n          -1.6410506, -1.6916354, -1.7190585, -1.761682, -1.6499045, -1.6932083,\n          -1.729453, -1.649677, -1.6591613, -1.6945941, -1.8920058, -1.7502831,\n          -1.7589148, -1.844166, -1.8389727, -2.1142673, -1.722116, -1.7554877,\n          -1.8476304, -1.7322166, -1.7614912, -1.7297312, -1.6993097,\n          -2.0708926, -1.6952051, -1.7018081, -1.7204852, -1.9380832,\n          -1.9250269, -2.1003716, -1.810654, -1.7863406, -1.8420278, -1.923125,\n          -2.0683916, -1.8779187, -2.0993567, -2.2440252, -2.4986584,\n          -2.1349447, -1.8286045, -1.8319627, -2.963366, -2.2319872, -2.125827,\n          -1.977528, -1.797223, -1.8572384, -1.9525493, -2.1618528, -1.7674576,\n          -1.7466911, -1.8013276, -2.0905416, -2.21956, -1.8042545, -1.6955078,\n          -1.9114436, -1.7379467, -2.8870742, -2.0274715, -1.9450583, -3.025044,\n          -1.9799057, -2.2283165, -2.344057, -2.1447797, -1.8842825, -2.1390884,\n          -2.1928966, -2.1094954, -2.474272, -2.218279, -2.701682, -2.0378191,\n          -2.3359075, -2.3428192, -1.9566844, -2.5642297, -2.1202443,\n          -2.4547656, -2.5752928, -2.4486563, -2.5106273, -2.9675372, -2.722326,\n          -2.5456028, -2.1745915, -1.8693612, -1.8518169, -2.08939, -3.7104192,\n          -2.977432, -2.5043721, -2.6941476, -2.5780845, -2.8901272, -2.3614352,\n          -2.4190023, -1.9482347, -2.2259426, -2.6558018, -2.9252875, -2.087803,\n          -2.0049474, -2.4985342, -2.263475, -1.8067365, -1.8575746, -1.8400688,\n          -2.374721, -2.1351123, -2.0048347, -2.1655703, -2.787334, -2.4745865,\n          -2.7357888, -2.3446178, -1.8706708, -2.1889246, -1.7161208,\n          -2.8049548, -1.9561678, -2.6307518, -2.2830336, -3.503847, -3.7583747,\n          -2.5329049, -4.5742164, -3.3736012, -2.9679334, -3.1773055,\n          -3.1729085, -5.4354944, -4.8199077, -2.8325222, -2.62505, -2.6489146,\n          -2.9634616, -2.2020204, -2.314265, -2.578448, -2.7460446, -2.487839,\n          -2.949279, -2.9059079, -3.3980107, -2.5645654, -2.7129917, -4.727363,\n          -2.5351973, -3.9054053, -2.6798823, -2.7133641, -6.961175, -4.145108,\n          -2.8550694, -2.4107604, -4.073314, -2.3849046, -3.8040702, -1.9988089,\n          -2.6826816, -2.958969, -2.8348746, -2.4423566, -3.2900076, -3.5124567,\n          -3.0276008, -2.6424477, -3.222973, -2.7755873, -3.1440806, -3.4846766,\n          -3.0851703, -3.4664392, -3.2701242, -4.641508, -5.2022886, -5.167342,\n          -2.9217503, -2.6460018, -4.8421874, -3.2255902, -3.8385496,\n          -2.7864158, -3.049552, -4.733867, -4.454487, -3.9694974, -4.1708393,\n          -4.4392476, -3.2127802, -3.1510036, -2.6468732, -2.9868522,\n          -4.7008367, -3.4651322, -2.6355107, -4.7546005, -3.0883682, -2.952955,\n          -3.146033, -2.7686331, -2.429948, -2.7047617, -2.456339, -3.1010394,\n          -2.527437, -3.8453908, -3.0544543, -2.9318485, -5.533993, -2.0890853,\n          -2.7984624, -5.6635385, -4.536752, -2.9831543, -2.8688128, -2.588757,\n          -2.9604492, -2.4847069, -2.9993773, -4.0434513, -3.3012896, -2.394382,\n          -2.7405553, -4.2170815, -4.238273, -3.1518064, -4.3944273, -2.0580623,\n          -2.0345478, -4.6425953, -2.7166886, -4.0035863, -3.0050988, -6.148036,\n          -3.6872802, -5.858176, -2.7695863, -3.1716938, -2.6447742, -2.1457007,\n          -2.0553026, -2.922085, -2.994673, -2.9905138, -1.9678352\n        ],\n        \"pointIndex\": [\n          3, 1253, 255, 1468939989, 348201086, 311481418, 1733845684,\n          1686180818, 280805918, 1795342310, 1372015301, 1359497846, 1486453725,\n          1260359975, 1033476187, 361534460, 1355030892, 1272463141, 805838947,\n          199255874, 972786110, 346438616, 1722030182, 1813644722, 191076355,\n          981070544, 284836642, 310910058, 989748829, 1799011465, 1134929181,\n          1911068180, 1831200079, 1413073718, 1628009934, 1675666577,\n          1909251535, 300972608, 78762105, 704813320, 309117930, 1038878914,\n          1271574588, 94968480, 1541144338, 1719937349, 1811721669, 471265264,\n          1530779806, 1218782107, 390946267, 296023026, 473573629, 1716169005,\n          331840464, 371289284, 644432640, 481715699, 1673291390, 394237884,\n          1112141393, 1866968764, 444447526, 1055838466, 1298556873, 526621749,\n          156912001, 163208658, 195519246, 1823671856, 1244638050, 1730451265,\n          1494404993, 634462173, 1163393280, 808705820, 1525716283, 736456317,\n          1566828723, 572959739, 1176229499, 1011709746, 1060291886, 1303034817,\n          1428442655, 1441529443, 1522581783, 1918611098\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 1235914033936469780\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5345111, -1.5345854, -1.5352143, -1.5418011, -1.5640353,\n          -1.5873953, -1.5440509, -1.5723784, -1.5536164, -1.6138204, -1.564567,\n          -1.6089317, -1.6009803, -1.5548254, -1.5872757, -1.7854967,\n          -1.7077621, -1.618875, -1.5881951, -1.7608906, -1.74024, -1.6480869,\n          -1.5943167, -1.6137693, -1.6191088, -1.6046709, -1.637026, -1.5612868,\n          -1.5652598, -1.5936134, -1.656476, -2.0284445, -2.0513895, -1.7272295,\n          -1.8274367, -1.7010891, -1.7508259, -1.6084881, -1.7268528,\n          -1.9210428, -1.8405379, -1.9598054, -1.7992055, -1.6998166,\n          -1.7354546, -1.675926, -1.6926972, -1.8770659, -1.8663892, -1.7692633,\n          -1.6374545, -1.6231381, -1.8434621, -1.7333738, -1.948892, -1.5747668,\n          -1.7588967, -1.6761369, -2.1003804, -1.6753234, -1.6181744,\n          -1.8790652, -1.661982, -2.0956454, -2.200539, -2.0995784, -2.225362,\n          -2.2545896, -1.9625192, -1.8587768, -1.8545741, -1.7148182,\n          -1.8189396, -1.7725823, -1.8485171, -2.118984, -2.387815, -2.064594,\n          -2.0679607, -2.118417, -2.3495748, -2.288614, -2.195366, -2.6614716,\n          -2.2825482, -1.9241642, -3.0691502, -1.7099928, -1.9844245,\n          -1.9936858, -2.0395646, -1.6921089, -1.9774458, -3.1731887,\n          -1.8646569, -1.8896581, -2.0159445, -1.9981244, -2.1396825,\n          -1.8050421, -1.7693808, -1.7676467, -1.754945, -1.6373693, -2.9621675,\n          -2.3039734, -1.8951974, -1.9601555, -1.8143328, -2.4989946, -2.312548,\n          -1.8358022, -2.3151543, -2.063485, -1.8310093, -2.309061, -1.8352809,\n          -2.2402127, -2.1357348, -1.9181471, -2.1218538, -2.4286084, -1.619118,\n          -2.3334422, -2.0407221, -2.2614598, -1.6817223, -2.7123013,\n          -2.9958603, -3.2728026, -2.8368137, -2.5721009, -2.917388, -2.8172667,\n          -5.1267014, -2.5259824, -3.426527, -4.2924347, -4.367054, -3.8996263,\n          -2.4546254, -2.8780682, -3.369605, -1.9061285, -3.7905815, -3.2067957,\n          -2.7068586, -2.0716114, -2.2800162, -2.4191968, -3.452595, -2.120203,\n          -2.369795, -4.5425644, -2.4015663, -2.3730764, -2.5273366, -3.0901675,\n          -2.9005687, -2.5268888, -2.7519774, -2.4042478, -2.5676286,\n          -3.3872604, -2.3477516, -2.5910897, -2.490365, -2.697967, -3.0550823,\n          -3.553559, -2.7715716, -3.4370391, -3.0085537, -3.6120863, -4.689063,\n          -1.7739089, -2.0211194, -2.5067132, -2.825091, -2.0644937, -2.060346,\n          -3.5862482, -2.9754694, -2.7869902, -2.8265479, -2.8258586,\n          -2.4236476, -5.5222154, -4.011036, -5.4852524, -2.257591, -2.3536267,\n          -2.1434913, -3.0729537, -2.503225, -3.1518953, -3.8945484, -3.5541847,\n          -3.574473, -3.5645008, -1.8395015, -2.1069777, -2.280898, -1.9562199,\n          -1.8099992, -2.681795, -4.0627627, -2.3272598, -3.1536226, -3.6603744,\n          -5.376876, -4.4856424, -2.3590813, -2.0735247, -2.5921485, -3.2830138,\n          -2.987672, -5.36235, -2.7352164, -2.5040228, -3.3372958, -3.66952,\n          -2.796431, -1.9083732, -4.185178, -3.1007457, -2.485229, -2.1849375,\n          -2.8226488, -2.0973449, -2.3647096, -3.5712779, -3.2152066,\n          -2.2329075, -2.6364183, -2.7903652, -2.5476396, -2.842387, -2.2279704,\n          -2.3730114, -2.0485094, -3.2631235, -3.3598142, -2.7192006,\n          -2.5139086, -2.722003, -2.3466024, -2.7279682, -3.6686144, -2.3619401,\n          -3.394032, -2.5690548, -3.4673102, -2.3773215\n        ],\n        \"pointIndex\": [\n          3, 1249, 254, 749619764, 515508889, 953230363, 644120088, 1783281798,\n          1438421982, 1006494365, 1449632781, 933462234, 1241790507, 263002233,\n          291621465, 1311156237, 1622877794, 593024514, 160824440, 84490796,\n          633171281, 823183843, 1353930027, 1936991999, 101048797, 501282098,\n          1527536864, 55971400, 1902732214, 1733274098, 203359947, 140136544,\n          1712388138, 1071987294, 471161193, 522293587, 621210748, 1601017186,\n          1920764524, 1363619146, 1807520155, 461573448, 983873570, 1331965006,\n          1549976524, 825674653, 525980398, 270665261, 683061037, 1187318760,\n          855226838, 581585821, 1425257349, 1051440673, 224458911, 361881947,\n          239661597, 1076350786, 1396027055, 234628851, 1629539413, 401512819,\n          1390505173, 427967097, 1304488839, 445090758, 969427552, 1095121632,\n          33234972, 1633348644, 80685532, 1329840547, 1835358407, 590560865,\n          726010070, 1284999414, 1598481684, 744808929, 757904728, 1398773041,\n          961639521, 1918522845, 1505982507, 1142351156, 588675351, 1779866454,\n          1923068132, 1554810\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 254,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 5961685213686464613\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.5257835, -1.532419, -1.526319, -1.5459954, -1.5403318, -1.5273062,\n          -1.5318718, -1.5854601, -1.600429, -1.5905855, -1.576832, -1.5444384,\n          -1.53936, -1.6083705, -1.5532775, -1.7179025, -1.6046104, -1.6268706,\n          -1.6039013, -1.5928894, -1.8332025, -1.6485378, -1.6818303,\n          -1.5466363, -1.5505174, -1.5745901, -1.5889266, -1.676313, -1.6876225,\n          -1.680915, -1.6252509, -1.8780425, -2.0918608, -1.6844703, -1.7588052,\n          -1.9396161, -1.6796147, -1.7520231, -1.6064339, -1.7949163,\n          -1.6063402, -1.9180142, -1.8818871, -1.6793885, -1.67244, -1.9063159,\n          -1.9686803, -1.6347135, -1.8534031, -1.6582611, -1.7758393,\n          -1.9086692, -1.6868783, -1.6774342, -1.6174681, -1.8093826, -1.827288,\n          -1.8499967, -1.7659829, -1.7565888, -1.8133397, -1.6693784, -1.648273,\n          -2.521096, -2.2045913, -2.2039254, -2.3295937, -2.1437054, -2.4531891,\n          -2.0813184, -1.8641522, -2.979186, -2.1612687, -2.1348293, -2.16697,\n          -1.8916192, -1.9236215, -1.7352973, -2.264334, -1.8202388, -2.2695205,\n          -1.9896787, -1.6497238, -2.0752726, -1.9621438, -2.7148943,\n          -1.9871827, -2.7365506, -2.356889, -1.6996434, -1.7351856, -2.150795,\n          -2.0257459, -2.240612, -2.4337668, -1.8312027, -2.4079015, -1.8643912,\n          -1.9720566, -2.5028517, -1.9584169, -2.0469992, -2.5444942, -2.001135,\n          -3.569225, -2.0345218, -1.8616279, -2.5987465, -2.119698, -2.0288913,\n          -1.7859378, -2.6630232, -1.8724684, -2.778035, -2.3339639, -2.0099356,\n          -2.07525, -1.8595737, -1.7922893, -2.3891761, -2.560148, -2.8437333,\n          -2.0655832, -1.7084434, -2.1388345, -2.6832392, -1.7777342, -2.556761,\n          -3.1471772, -2.4486399, -3.2626753, -2.743352, -2.6989517, -2.662875,\n          -4.866611, -4.188736, -2.7151356, -2.8587973, -2.47859, -3.8088737,\n          -2.5240817, -1.9957222, -3.138415, -3.2717984, -3.1498153, -2.743579,\n          -2.2526274, -2.50212, -3.9200556, -4.119592, -3.521935, -2.277947,\n          -2.908566, -2.2615526, -3.7282572, -3.1510122, -1.7886689, -3.250285,\n          -2.5849755, -1.9076731, -2.898602, -3.594523, -2.5473993, -2.3753328,\n          -2.273242, -5.8790035, -2.177618, -2.3663774, -2.6003609, -2.8216631,\n          -2.1137898, -2.9474978, -3.3929472, -2.0721483, -2.0494266, -4.468507,\n          -3.115191, -4.9723973, -3.4666471, -2.448805, -2.201985, -2.967275,\n          -2.3713338, -3.3682654, -4.0202723, -3.386346, -2.711816, -2.4446042,\n          -4.2688355, -3.407556, -2.7723808, -3.4934607, -2.4770794, -4.2625604,\n          -3.5663607, -2.7192361, -2.441945, -2.3298247, -2.1854405, -2.5347698,\n          -5.375053, -3.7296784, -2.2532272, -2.3139634, -2.177199, -5.497376,\n          -6.019287, -4.070514, -2.4715588, -3.6453009, -7.0692167, -2.936739,\n          -2.3720882, -4.3689184, -2.1131155, -3.1359951, -5.7391376,\n          -2.3628266, -2.1523256, -2.4543364, -2.521942, -2.490915, -3.972962,\n          -6.118891, -3.3457332, -2.1690152, -2.7532642, -3.3654513, -3.868791,\n          -3.8921962, -2.535788, -3.98272, -2.2380013, -2.293273, -3.3237154,\n          -2.5618138, -2.3382132, -3.2277465, -3.8876097, -4.109236, -4.762749,\n          -3.6733208, -2.9050758, -3.2219455, -2.8485487, -2.9965434,\n          -3.7959926, -3.4480162, -2.7039433, -3.3766913, -3.6542602, -3.55788,\n          -4.494692, -4.54066, -2.5475478\n        ],\n        \"pointIndex\": [\n          1, 1250, 255, 1614311906, 1037541425, 1422066171, 762040836,\n          1923048011, 150118905, 953173356, 76440872, 194981059, 999426963,\n          1706086193, 1594445842, 864392687, 351906466, 1636326003, 1659863674,\n          1654131598, 588612615, 1268906687, 1679779764, 1574393566, 1011133818,\n          264827597, 1867083960, 299138962, 865870158, 657862061, 1240158043,\n          1277437500, 1474414712, 158764031, 673816356, 609030105, 173575334,\n          774861641, 1670772511, 1562991630, 1103180464, 794742985, 971023046,\n          1177154699, 1549743205, 1225277499, 251542656, 258018327, 263774789,\n          1283647599, 871791224, 291457405, 295674933, 1157160941, 102459636,\n          314173493, 1723731252, 342455699, 1839343407, 398751663, 631047258,\n          1210030052, 1710892087, 1924823826, 903475066, 456327805, 491520307,\n          506106568, 1158303350, 892079952, 536437045, 1117345195, 564512855,\n          1395768170, 1925479604, 1765493362, 1627026636, 1751327113,\n          1188066073, 1917447254, 827535050, 1642368473, 903014297, 1441126861,\n          1130810732, 1224744483, 1811933289, 1935752407\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 3094355644247862573\n      },\n      {\n        \"version\": \"2.0\",\n        \"weight\": [\n          -1.4739364, -1.4854518, -1.4760784, -1.4875935, -1.4953938,\n          -1.4984169, -1.4879522, -1.5093524, -1.4926078, -1.5171238,\n          -1.5125519, -1.5012732, -1.5166982, -1.4958954, -1.5366755,\n          -1.5484987, -1.5611908, -1.5979398, -1.5603845, -1.6203203,\n          -1.5704353, -1.6633575, -1.5579628, -1.55296, -1.795618, -1.6760824,\n          -1.5176575, -1.5144597, -1.5277035, -1.5477281, -1.7140696,\n          -1.6394001, -1.6954261, -2.1438859, -1.8238163, -1.6423, -1.6687416,\n          -1.8934213, -1.6380768, -1.9984225, -1.8914065, -1.9348294,\n          -1.8795712, -1.7462192, -1.8659137, -1.8379618, -1.5810261,\n          -1.7192446, -1.7008163, -1.8184123, -1.8110999, -2.1787684,\n          -2.2982676, -1.5314186, -1.7548642, -2.2257524, -1.5917569,\n          -1.5637295, -1.5673133, -1.6050491, -1.7135774, -2.049922, -2.0660481,\n          -1.7038921, -1.8500408, -1.9410862, -2.1084352, -2.2458594,\n          -2.4118128, -2.025019, -1.9305323, -2.0011356, -2.5528111, -2.0804389,\n          -1.7682977, -2.9587407, -2.0774496, -2.0376956, -1.8831936,\n          -2.3587713, -2.2914603, -2.080167, -2.398474, -2.1165843, -2.2489762,\n          -3.3198617, -2.0660028, -1.7743989, -2.0366867, -1.9493996,\n          -2.1968048, -1.9214082, -1.865101, -1.6677953, -1.7295773, -1.7557685,\n          -1.7215736, -2.1169577, -3.8750894, -1.8312486, -2.2261262,\n          -1.9203904, -2.4061456, -2.5151784, -2.3806129, -2.3084571, -2.506206,\n          -1.9816073, -2.0561907, -1.980592, -2.1949437, -2.2723653, -2.3749077,\n          -1.7950431, -1.8964046, -2.211347, -2.4007275, -1.5689955, -1.6514187,\n          -3.7187233, -1.6301489, -2.086895, -2.1163597, -2.6241238, -3.9407856,\n          -2.7351525, -2.1547961, -2.0345523, -2.2548847, -2.1943324,\n          -4.6028876, -2.1080987, -2.2166712, -2.4134626, -4.2891135,\n          -3.1646965, -3.283529, -3.0363867, -2.654098, -2.5246723, -2.9533222,\n          -2.7589624, -6.8319306, -3.45097, -2.302588, -4.1987743, -3.018338,\n          -3.9248986, -2.2258532, -3.859081, -2.3803658, -4.1095366, -3.1288927,\n          -2.689985, -4.6837077, -3.3727543, -2.7500443, -3.597516, -2.7279165,\n          -2.3633702, -2.3965745, -3.7145114, -3.6214883, -2.0868714, -2.301757,\n          -3.4451625, -4.5945196, -3.219573, -3.4956493, -2.6333444, -3.1672225,\n          -3.5964112, -4.2114186, -3.3155928, -2.5097873, -3.0633156,\n          -1.8225551, -2.144348, -2.2268782, -2.6874378, -2.5341742, -4.0607824,\n          -2.2209594, -2.2272894, -4.5724764, -2.147491, -2.140911, -1.6880952,\n          -6.806554, -2.9362721, -2.1946857, -3.549542, -1.7758566, -1.8011234,\n          -3.9622927, -3.233758, -2.7135792, -4.252686, -4.0482345, -1.9278389,\n          -1.9454994, -2.5433922, -2.2774193, -4.3522453, -2.2904599,\n          -3.0752342, -2.6221926, -5.464108, -4.4305067, -3.8604193, -2.6281645,\n          -2.3101032, -2.92189, -2.6759644, -2.7982519, -2.4083624, -2.7625816,\n          -4.315713, -2.2733822, -5.1711693, -3.2551575, -2.2869558, -2.6063263,\n          -2.4167163, -3.0222101, -6.9500675, -3.7502477, -3.0935733,\n          -2.0001183, -3.9397662, -2.036932, -5.9469604, -3.63629, -3.1122005,\n          -2.8454168, -3.0099926, -1.9705479, -2.064685, -1.900059, -4.272635,\n          -3.928694, -1.9878477, -1.6362039, -2.645227, -2.5863435, -4.9102893,\n          -2.174294, -3.0281172, -3.4842854, -5.2957344, -5.0918097, -3.1096747,\n          -6.0454197, -3.172589, -2.2439806\n        ],\n        \"pointIndex\": [\n          4, 1256, 255, 1223797928, 1623677654, 1728694488, 1360127196,\n          1237665187, 300008325, 23900434, 533047755, 1038903454, 1296350950,\n          416240210, 954777070, 871612658, 159631247, 440691661, 552496932,\n          410244749, 1009372704, 1600468221, 1374403740, 1918402956, 695135605,\n          1803858985, 1315684846, 1679339055, 1636784866, 1699017295, 672251182,\n          369725123, 1413805818, 517289020, 1133930449, 1906422654, 856131894,\n          865341588, 447977189, 1608462439, 1478336717, 824417366, 1036955096,\n          1376703219, 1627803349, 1415150729, 1522913492, 1806293611,\n          1160439800, 78400409, 1722060119, 1655226301, 1957291000, 315667447,\n          52729117, 1006468, 340123706, 1055091144, 1208275682, 377204605,\n          1179474234, 1073440127, 2998993, 986081357, 805778975, 23156479,\n          1704113892, 854499111, 1629578411, 586232179, 1886933506, 1077656819,\n          645784931, 279521799, 1182801783, 719702406, 1683648103, 771515289,\n          1543232884, 1777928976, 308431624, 1199169854, 1663530999, 1137583284,\n          1505515416, 1819345697, 1817788662, 1965613555\n        ],\n        \"storeSequenceIndicesEnabled\": false,\n        \"size\": 255,\n        \"capacity\": 256,\n        \"initialAcceptFraction\": 0.125,\n        \"timeDecay\": 1.0e-4,\n        \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n        \"maxSequenceIndex\": 1256,\n        \"compressed\": true,\n        \"randomSeed\": 7829655809554688956\n      }\n    ],\n    \"compactRandomCutTreeStates\": [\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 482049803, 309553323, 126599654, 183052701, 152692025,\n            810097597, 520795982, 186960441, 354890868, 654637203, 858723802,\n            771537355, 487196625, 141715079, 715197657, 744288717, 532366533,\n            700948526, 485148609, 589528758, 874417663, 862655872, 420262554,\n            126768542, 760361961, 122808912, 768002537, 756806186, 510626389,\n            378520593, 85155340, 371617678, 122624128, 270573733, 40487961,\n            543923644, 238230115, 298782203, 141902733, 518077810, 126914412,\n            523006182, 5565\n          ],\n          \"cutValueData\": [\n            66, -110, 90, -115, 66, -109, 119, -68, 66, -112, -121, 60, 66, 68,\n            45, 29, 66, 103, 4, 88, 66, -107, -60, 23, 66, -112, -105, 45, 66,\n            -120, 26, -107, 66, -106, 95, -105, 66, -101, -27, 13, 66, -99, -69,\n            -91, 66, -124, 25, -27, 66, -126, -61, -35, 66, -118, -60, 108, 66,\n            -97, 102, -78, 66, -109, -63, 28, 66, -97, 23, 105, 66, 117, 119,\n            -84, 66, 112, -6, -37, 66, -82, 31, 113, 66, -116, 56, -48, 66, 102,\n            -65, -50, 66, -128, -126, -98, 66, 110, -121, 56, 66, -96, -25, -68,\n            66, -89, -86, 49, 66, 103, 87, -128, 66, -118, -91, 75, 66, 126,\n            -72, 50, 66, -86, -88, -61, 66, -84, 119, 16, 66, -76, 2, 50, 66,\n            -81, -100, 21, 66, 108, 49, -47, 66, -69, -12, 0, 66, -104, -91, 13,\n            66, -119, 3, 50, 66, 110, 33, -114, 66, -76, 51, -58, 66, 89, 66,\n            68, 66, -78, 59, 39, 66, -110, 70, 5, 66, -119, -55, -114, 66, 101,\n            -124, 121, 66, -72, 117, -100, 66, -68, 38, 77, 66, 94, -106, 15,\n            66, -97, -62, -75, 66, 87, 89, 33, 66, -93, -3, 122, 66, -122, 87,\n            77, 66, -118, -11, -12, 66, -95, -127, 90, 66, -67, 77, 24, 66, 72,\n            108, -65, 66, 95, 62, -64, 66, -72, 5, 119, 66, -112, -99, -53, 66,\n            118, 50, -43, 66, -80, -99, 4, 66, 98, 12, 13, 66, 73, -36, -116,\n            66, 118, -42, 32, 66, -78, -16, -59, 66, -100, 68, 10, 66, -100,\n            -119, 69, 66, 97, -34, -94, 66, 70, -54, -29, 66, 118, -62, 96, 66,\n            -122, 95, -113, 66, 125, 74, 85, 66, -104, 102, 63, 66, -108, -47,\n            26, 66, -73, -94, -123, 66, 91, 114, -124, 66, -88, 104, 16, 66,\n            -109, -34, 80, 66, -124, 78, -73, 66, -99, -49, 45, 66, 112, -127,\n            10, 66, -108, -85, 18, 66, 89, 97, 73, 66, -74, -98, -24, 66, -115,\n            -12, -19, 66, -89, -41, -63, 66, -120, -50, -53, 66, 85, 118, 116,\n            66, -90, -90, 107, 66, 112, -109, 111, 66, -115, 5, 0, 66, -84, 123,\n            14, 66, -85, -77, 29, 66, -98, 73, -58, 66, -112, 101, -38, 66,\n            -104, -68, -17, 66, -119, -76, -122, 66, -67, -81, -66, 66, 111, 86,\n            -59, 66, -123, 114, 110, 66, -107, 20, -100, 66, -88, -76, -13, 66,\n            104, 41, 28, 66, -123, -104, -119, 66, 96, 28, 36, 66, -118, 10,\n            -86, 66, -112, -31, 61, 66, 122, -92, -18, 66, -121, -5, -6, 66, 89,\n            43, -63, 66, 80, 95, 53, 66, -84, 51, -91, 66, -72, 1, 127, 66, -78,\n            -36, -70, 66, 79, -126, -2, 66, 95, 119, 127, 66, 108, 19, 60, 66,\n            114, -51, -90, 66, -118, 96, -58, 66, 108, 122, -82, 66, 119, -33,\n            -112, 66, 88, -9, 85, 66, -113, 111, 27, 66, -125, 123, 60, 66,\n            -120, -91, -13, 66, -97, -40, -5, 66, 83, 43, -51, 66, -105, 3, 6,\n            66, -81, -13, 126, 66, -88, -27, 39, 66, -123, 23, -111, 66, -109,\n            116, -85, 66, -86, -33, -116, 66, 98, 28, 127, 66, -81, 67, 50, 66,\n            -122, -34, 30, 66, 111, 111, -11, 66, -78, 114, -37, 66, 119, 55,\n            89, 66, -85, -97, 46, 66, -100, 108, -88, 66, -97, 16, -40, 66,\n            -105, 46, 17, 66, 99, 12, -96, 66, 113, 16, -13, 66, -82, -85, -46,\n            66, 112, -13, -59, 66, -126, 25, 10, 66, -111, -4, 74, 66, -78, -86,\n            61, 66, -82, 29, 123, 66, -83, 97, -34, 66, -88, 102, 126, 66, -93,\n            126, -99, 66, -81, 51, 70, 66, -79, 89, 72, 66, -97, -97, -36, 66,\n            -57, 88, 122, 66, -102, 79, 6, 66, 82, 65, -86, 66, 97, -7, 71, 66,\n            114, 89, 77, 66, 88, 40, 114, 66, 103, -3, -1, 66, -69, 64, 94, 66,\n            119, 74, 117, 66, -93, -41, 51, 66, 120, -86, 52, 66, -117, 80,\n            -103, 66, -92, 19, -29, 66, -61, 81, 77, 66, -62, -6, 13, 66, -92,\n            75, 3, 66, -100, -109, 24, 66, -116, 16, 64, 66, 71, -85, -97, 66,\n            -84, 14, 119, 66, -103, -9, -32, 66, -103, -119, -124, 66, -86, 61,\n            122, 66, -118, 23, -64, 66, -94, -38, 85, 66, -88, -92, 35, 66, 83,\n            71, 62, 66, -109, 93, -14, 66, -114, -71, -88, 66, -83, 119, -116,\n            66, -71, -123, 102, 66, -95, -77, 6, 66, -113, 71, -12, 66, -125,\n            97, -93, 66, -126, 88, -31, 66, -99, -121, 66, 66, -106, -85, -47,\n            66, -110, -100, -48, 66, -106, -15, 67, 66, 121, 34, -43, 66, -98,\n            62, 119, 66, -120, 68, 72, 66, 110, -27, 107, 66, -109, 124, 86, 66,\n            -106, -21, -96, 66, -125, 11, 66, 66, -86, 98, -56, 66, -102, -103,\n            106, 66, -89, -3, -125, 66, 119, -14, -6, 66, -80, 115, -122, 66,\n            -79, 90, -26, 66, -107, -60, -44, 66, -96, 48, 15, 66, -74, -88,\n            -30, 66, -69, 56, -115, 66, -118, -86, -69, 66, -125, 51, -103, 66,\n            -92, -64, -1, 66, -119, 53, -32, 66, -72, 28, 8, 66, -121, -81, 8,\n            66, 103, -101, 122, 66, -96, -60, -73, 66, -88, 102, 15, 66, -118,\n            127, -58, 66, -123, -37, -87, 66, -60, 19, -48, 66, 117, -78, 53,\n            66, -88, -42, -114, 66, -88, 104, -128, 66, -111, -122, -62, 66,\n            -115, -20, -92, 66, -111, -112, -66, 66, 106, 75, 94, 66, 116, -125,\n            -68, 66, -77, 93, -112, 66, 106, -50, 65, 66, -128, 32, -114, 66,\n            -127, 94, -124, 66, 120, 12, -7, 66, 86, 94, -86, 66, -123, 124, 34,\n            66, 114, 93, -11, 66, 96, -82, 40, 66, 123, 59, 10, 66, 125, -57,\n            91, 66, -93, 52, 83, 66, -100, 37, 94, 66, -109, 55, 86, 66, -112,\n            -2, 97, 66, -117, -15, 43, 66, -103, 36, 79, 66, -97, 87, -90, 66,\n            -77, -117, -116, 66, -121, -29, -83, 66, 121, -16, -1, 66, 127, 113,\n            29, 66, -73, 34, 18\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1072824191, 486529471, 132093450, 892461317, 153379350,\n            35233988, 21617329, 152299184, 16\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1054997887, 467661783, 83623930, 637379628, 136901326,\n            58821506, 541822520, 957235200, 515\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -4423608042667571926,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 508926833, 376812577, 1073405887, 803272566, 383352682,\n            796178119, 752602543, 483882278, 39054066, 867271797, 34768183,\n            627817038, 462470014, 215063649, 469595442, 514132970, 40872030,\n            36873947, 581342778, 359845187, 64406823, 107591233, 131545002,\n            93441401, 748471629, 173630950, 840804685, 401046562, 380335662,\n            37067065, 749545335, 899780331, 50157867, 935015887, 737232222,\n            192223463, 484480234, 708107174, 882356085, 203460394, 216864482,\n            182243245, 23\n          ],\n          \"cutValueData\": [\n            66, -94, -88, -9, 66, 118, -7, -36, 66, 96, 48, -96, 66, -88, 64,\n            53, 66, 108, -41, -102, 66, -98, 20, -88, 66, 71, -107, -87, 66, 70,\n            3, -93, 66, -103, 98, -70, 66, 84, -66, 20, 66, -105, -114, -81, 66,\n            -121, -36, 46, 66, -85, 127, 20, 66, -127, -61, 70, 66, -95, 70,\n            109, 66, 95, 125, 71, 66, -107, 75, 89, 66, -119, 49, -18, 66, 99,\n            -65, 9, 66, -83, -3, 114, 66, -91, 4, -102, 66, 99, 30, -53, 66,\n            -120, -123, -42, 66, -119, 116, 89, 66, -99, -29, -25, 66, -116,\n            -51, 19, 66, -103, -42, 53, 66, 83, -56, -2, 66, 112, -39, 101, 66,\n            -77, -10, 111, 66, -100, -50, -108, 66, 125, -42, -22, 66, -98, -22,\n            84, 66, -125, 43, 77, 66, -98, 63, 124, 66, -84, 43, -66, 66, 115,\n            86, -120, 66, -103, -77, -27, 66, -95, 18, -23, 66, 109, -8, -111,\n            66, -113, 81, 14, 66, -111, -125, -18, 66, 82, 20, 126, 66, 124,\n            -99, -52, 66, -113, -61, 118, 66, -76, 13, 109, 66, 118, 61, 56, 66,\n            84, 91, -73, 66, -63, -94, -51, 66, -119, 29, -109, 66, 112, -86,\n            59, 66, -121, 30, 109, 66, -99, 93, -65, 66, -105, -115, 48, 66,\n            -115, 2, -55, 66, -126, -12, 72, 66, 103, -65, 20, 66, -75, -107,\n            -58, 66, -100, 51, -18, 66, -100, 95, 88, 66, 110, -99, 38, 66, 125,\n            -42, -126, 66, -110, 97, -75, 66, -121, -2, 102, 66, -118, -20, 6,\n            66, -100, 88, 4, 66, -94, -21, 85, 66, 122, 5, 110, 66, -93, -4, 35,\n            66, 85, 84, -68, 66, -84, -105, 4, 66, -76, -49, -102, 66, -86, -13,\n            -11, 66, 86, 66, 107, 66, -98, 117, 53, 66, 98, 30, 87, 66, -94,\n            125, 90, 66, 125, 101, -29, 66, -117, -34, 124, 66, -88, 1, -26, 66,\n            117, 121, -12, 66, -116, 16, -58, 66, -119, -38, 125, 66, -123, 127,\n            -47, 66, 89, 40, -26, 66, 85, 25, -33, 66, 94, 96, 125, 66, 112, 76,\n            72, 66, 87, -81, 17, 66, -116, -2, -33, 66, -96, 76, -109, 66, 106,\n            -113, 83, 66, -128, 8, -121, 66, -120, -112, -69, 66, -89, -91, 104,\n            66, 115, -76, -77, 66, 118, 95, -73, 66, 119, 45, -89, 66, -121,\n            120, -41, 66, 110, 36, -82, 66, -113, -44, -93, 66, -116, -57, 118,\n            66, -84, -99, 67, 66, -114, 101, 58, 66, -92, -100, 58, 66, -74,\n            -61, 11, 66, -112, 107, -100, 66, -113, -81, 11, 66, -88, 68, -17,\n            66, -88, -1, -44, 66, -119, 92, -30, 66, -104, 82, 35, 66, -75, -65,\n            -52, 66, -91, 6, -47, 66, -79, -21, 0, 66, 103, -66, 119, 66, -110,\n            -34, -27, 66, -94, 10, -89, 66, 105, 75, -109, 66, -73, -128, 105,\n            66, -100, -37, -76, 66, 112, -111, 37, 66, 74, 34, 59, 66, -94,\n            -124, 77, 66, -91, -102, 109, 66, 95, -104, -42, 66, -112, -110, 18,\n            66, -128, 60, 34, 66, -103, -88, -55, 66, 85, -11, 91, 66, -128, -1,\n            25, 66, -125, -116, -125, 66, -98, 11, -113, 66, -102, 45, -51, 66,\n            -87, 37, -126, 66, -87, -60, -60, 66, -109, -112, -59, 66, -86, -31,\n            102, 66, -116, -49, 65, 66, -114, 21, 1, 66, -97, -60, -127, 66,\n            111, -73, 64, 66, -82, 52, -21, 66, -105, -76, -125, 66, -102, 5,\n            -11, 66, -112, 19, 110, 66, -74, 104, 92, 66, -93, 26, -97, 66, 74,\n            115, 92, 66, -94, -36, -60, 66, -114, 0, 18, 66, -76, -69, 107, 66,\n            -87, -62, -33, 66, -92, 111, 121, 66, 125, -62, -33, 66, 121, -72,\n            -10, 66, 92, -65, 2, 66, -107, -22, -108, 66, -110, -92, -105, 66,\n            100, -28, 100, 66, -105, 8, -16, 66, 73, -80, 83, 66, 115, -106,\n            -86, 66, 122, -51, 42, 66, -75, -50, -42, 66, 112, 44, 52, 66, -111,\n            21, 119, 66, -113, 10, 102, 66, -86, -70, 9, 66, 105, 63, 85, 66,\n            -121, -73, 111, 66, -94, -6, -38, 66, -72, -4, 92, 66, -100, 84, 51,\n            66, -92, -110, -46, 66, -125, -94, 121, 66, -86, 24, -83, 66, -103,\n            120, 34, 66, 108, 30, 14, 66, 69, -62, -56, 66, 106, -27, 100, 66,\n            -100, 66, 103, 66, -127, 124, 48, 66, -112, 89, 103, 66, 95, 37,\n            -54, 66, 125, -29, -9, 66, 98, -57, -61, 66, 78, -48, -9, 66, -109,\n            37, -103, 66, -77, 50, -41, 66, -74, -84, 93, 66, -89, 102, 109, 66,\n            123, -38, 95, 66, -94, 97, 115, 66, -96, -51, 49, 66, 83, 69, 127,\n            66, -122, 25, -93, 66, 101, -23, -114, 66, -106, 5, -107, 66, 84,\n            100, 126, 66, -92, -95, 82, 66, 112, 77, -81, 66, -92, -108, -112,\n            66, 125, -4, -111, 66, -104, 55, 125, 66, -122, -38, -65, 66, -108,\n            112, -95, 66, 99, -22, -77, 66, -119, -66, -101, 66, -78, -73, 28,\n            66, -80, -41, -66, 66, -101, -107, 77, 66, -88, 111, 124, 66, 108,\n            69, -63, 66, -86, 97, 120, 66, -84, 63, 21, 66, -60, 123, 107, 66,\n            -87, -127, -84, 66, -61, 125, -119, 66, -96, -63, -47, 66, 127, 53,\n            3, 66, -88, 78, 101, 66, 110, -114, -91, 66, 109, 97, -74, 66, 91,\n            116, 79, 66, 104, 115, -57, 66, 87, 16, -54, 66, -121, -120, 95, 66,\n            122, -6, 17, 66, -107, 82, -44, 66, 117, -99, -16, 66, 120, 83, -65,\n            66, -109, -123, 114, 66, -85, -124, 2, 66, -79, -80, 103, 66, -112,\n            -13, 75, 66, -121, -74, -84, 66, -117, 28, 24, 66, -105, 9, -26, 66,\n            -102, 79, 90, 66, -121, -95, 12, 66, -107, -37, -3, 66, -98, 22,\n            -122, 66, -90, 123, -22, 66, -117, -101, -92, 66, -98, 76, -114, 66,\n            -92, 61, -59, 66, -69, 3, 111, 66, 117, -63, -97, 66, -98, 87, 49,\n            66, -103, 50, -38, 66, -122, 39, 93, 66, -126, 50, 90, 0, 0, 0, 0,\n            0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 1104862922, 1160430947, 602631239, 973599817,\n            1112041301, 769349326, 601860200, 1016410184, 715592624, 1097691496,\n            1155855487, 581151415, 985085023, 446\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1162261466, 1162084076, 645166417, 1140984142,\n            725231066, 643375763, 602043649, 1159956112, 774043901, 643311625,\n            710468725, 586090876, 585972841, 364\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 5623816131219899495,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 530380726, 858261536, 498543680, 480659814, 272727935,\n            742689430, 688554251, 656105280, 524438278, 244566623, 525113071,\n            2223965, 459501897, 806978539, 380482694, 6057741, 857166212,\n            519700115, 169625971, 156948473, 368423804, 428012101, 372187640,\n            262130689, 427730813, 745130370, 481473952, 459312062, 498523251,\n            702664939, 459256921, 865174426, 635585514, 181906227, 118811245,\n            394423262, 238339688, 538393390, 852456440, 696402172, 358755081,\n            385617401, 6349\n          ],\n          \"cutValueData\": [\n            66, 119, -60, -124, 66, -126, 73, 98, 66, 111, 86, -35, 66, -90, 51,\n            -113, 66, -81, -11, -121, 66, 90, 74, 48, 66, 99, -121, -26, 66,\n            -127, -104, 7, 66, 85, -101, -13, 66, 112, 8, -114, 66, -107, -65,\n            60, 66, 96, -18, -31, 66, 97, 97, -32, 66, 118, 57, 89, 66, -110,\n            42, 127, 66, -73, 67, 82, 66, 84, 57, 77, 66, -127, -55, -32, 66,\n            74, -110, 91, 66, -79, 116, -50, 66, -122, 0, -99, 66, -125, -77,\n            31, 66, -114, 57, 43, 66, -123, 86, 80, 66, -114, 64, 85, 66, 81,\n            42, 114, 66, -109, -45, -116, 66, -105, -23, 37, 66, -71, -104, -73,\n            66, 96, 52, -10, 66, -100, 58, 24, 66, -91, 82, 86, 66, 83, 92, -67,\n            66, -87, -98, -5, 66, -72, -7, -68, 66, -102, -59, 69, 66, -91, 14,\n            89, 66, -86, -23, 24, 66, 100, -118, 55, 66, -113, 83, 19, 66, -104,\n            117, -1, 66, -63, -14, -127, 66, -69, -34, 78, 66, -73, 97, -128,\n            66, -124, -96, 2, 66, -109, 53, -13, 66, -122, -93, 23, 66, -90, -6,\n            29, 66, 121, -76, -96, 66, -93, 88, -13, 66, -77, -6, -2, 66, -102,\n            74, 103, 66, -113, -69, 88, 66, 122, -56, -105, 66, -115, -80, 27,\n            66, 76, 52, -40, 66, 102, 103, -90, 66, 126, -47, -102, 66, 121, 39,\n            121, 66, -125, 62, -58, 66, -92, 30, 48, 66, 97, -90, 72, 66, -104,\n            -98, 11, 66, 104, 21, 42, 66, -106, -28, -10, 66, -109, 115, 80, 66,\n            106, -40, -14, 66, -120, -72, 26, 66, -112, -123, 74, 66, -104, 116,\n            -53, 66, -127, -41, -107, 66, 103, 90, 71, 66, -110, 59, 35, 66,\n            -115, 83, -76, 66, -112, 23, 109, 66, -95, 124, -32, 66, 99, 88,\n            -111, 66, -83, 76, 58, 66, -67, 86, -107, 66, 102, 91, -53, 66, 73,\n            -32, 55, 66, -99, -60, -82, 66, -111, 119, 102, 66, -111, 90, 71,\n            66, 72, -23, -54, 66, -121, -81, 111, 66, -72, 81, 54, 66, -95, 121,\n            -95, 66, -116, -81, 27, 66, -124, 53, -15, 66, -75, -30, -65, 66,\n            -92, -71, 68, 66, -78, 57, 114, 66, -119, -94, -71, 66, -77, 87,\n            -71, 66, -110, -123, 79, 66, 105, -72, -57, 66, -103, -72, 65, 66,\n            -68, -27, -82, 66, 77, 108, 95, 66, 105, 107, 63, 66, -75, 105, 83,\n            66, -65, 81, 26, 66, -119, -87, -52, 66, -125, 64, -44, 66, 97, -62,\n            -39, 66, -107, -37, -61, 66, 118, -2, -52, 66, 113, 79, 85, 66,\n            -111, 44, -29, 66, -105, 74, 16, 66, -84, 38, 16, 66, 105, 3, 12,\n            66, -128, 49, -110, 66, 114, -78, -7, 66, -72, 50, -25, 66, -79, 82,\n            -2, 66, -120, 10, -49, 66, -117, -94, -122, 66, 84, 86, -7, 66, 119,\n            15, 59, 66, -128, -35, 24, 66, -95, 23, 6, 66, -123, -46, -22, 66,\n            -126, 49, 98, 66, -68, 14, -11, 66, 95, 110, -39, 66, 84, -86, -127,\n            66, -103, 1, -113, 66, 117, -22, -68, 66, -83, -36, -51, 66, -66,\n            -122, -33, 66, -76, 111, 99, 66, -77, 113, 1, 66, -94, 101, 4, 66,\n            -121, -93, -102, 66, -112, -9, -13, 66, -95, -77, -110, 66, -71, 90,\n            -9, 66, 77, -93, 45, 66, -112, -31, 125, 66, -77, -106, -103, 66,\n            -121, 120, 97, 66, -63, 83, 90, 66, -106, 116, -69, 66, 79, 10, -55,\n            66, -93, 126, -78, 66, -114, -112, -54, 66, 107, 8, 16, 66, -116,\n            -94, 4, 66, -116, 27, -22, 66, -106, -106, -88, 66, 104, 125, -98,\n            66, -99, 124, 104, 66, -80, -86, 107, 66, -96, 57, 55, 66, -107, 71,\n            -105, 66, -113, -58, 107, 66, 122, -44, -39, 66, -101, -66, 5, 66,\n            -92, 71, 81, 66, 96, -70, -42, 66, -68, 26, 75, 66, -117, 102, 13,\n            66, -105, 31, 108, 66, -100, 109, -35, 66, -115, 51, 68, 66, -103,\n            -41, 121, 66, 81, 88, -71, 66, -102, 0, 56, 66, -80, -28, -75, 66,\n            -83, -55, -39, 66, -128, 103, -108, 66, -109, 127, -70, 66, -111,\n            74, 43, 66, -111, 78, -80, 66, -102, 15, -120, 66, 99, -114, -74,\n            66, -116, -107, -8, 66, -120, -122, -113, 66, 80, 38, -49, 66, -71,\n            124, 16, 66, -69, 85, -104, 66, -109, -62, -13, 66, -104, 54, 72,\n            66, -119, -85, -73, 66, -74, 126, 69, 66, 125, 12, 86, 66, -91, -38,\n            12, 66, -114, -14, -114, 66, -106, 28, 54, 66, -92, -36, 34, 66,\n            114, -25, 54, 66, -78, 38, 91, 66, -115, -117, 64, 66, -101, 114,\n            90, 66, -125, -66, -101, 66, -61, 114, -113, 66, -126, -59, -125,\n            66, -108, 105, -27, 66, -126, -4, 62, 66, -112, 75, -119, 66, -97,\n            119, -17, 66, -95, -8, 70, 66, 119, 42, 115, 66, 110, -95, 19, 66,\n            -80, 84, -5, 66, -117, -22, 57, 66, -108, -3, 43, 66, -97, 113, 18,\n            66, -119, 43, -27, 66, -70, 85, 39, 66, -91, 99, 5, 66, -100, -51,\n            26, 66, -65, -59, 112, 66, -106, -111, -68, 66, 116, -19, -58, 66,\n            -105, 31, -90, 66, -120, 61, 15, 66, 119, 65, -5, 66, -126, 55, -28,\n            66, -122, -105, 57, 66, -93, -46, -123, 66, -110, -22, -84, 66, -87,\n            109, 116, 66, -106, -76, -17, 66, 94, -72, 122, 66, 91, 88, -85, 66,\n            -119, -75, -26, 66, -115, 48, 12, 66, -102, 100, 87, 66, -92, -64,\n            -105, 66, 97, 24, -82, 66, -97, -115, 32, 66, -97, 91, -73, 66, -98,\n            -78, 87, 66, 104, -116, 33, 66, 121, -19, -73, 66, -89, -25, 7, 66,\n            -118, 107, 0, 66, -100, -85, 70, 66, -101, 103, 86, 66, -87, -86,\n            59, 66, -100, 17, -6, 66, -108, 65, -15, 66, -87, -55, -45, 66, -79,\n            46, 85, 66, -93, 80, -52, 66, -126, -104, 92, 66, 110, 18, 67, 66,\n            -120, 0, -15, 66, -113, -98, 94, 66, -86, 52, -60, 66, -114, 50,\n            -65, 66, -121, -115, 113\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1033566207, 7846973, 1067573159, 37132719, 212909427,\n            549106746, 2877018, 179473574, 8193\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1071480575, 185058845, 999947994, 310774698, 198563097,\n            1880176, 34223184, 172131500, 561\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 3899095098967794180,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 654261202, 1005930677, 728332218, 216766382, 858449742,\n            527600474, 330466978, 526456695, 115145981, 781878902, 976599139,\n            1002015797, 572987187, 980024407, 325359835, 981135163, 918014794,\n            712759738, 86695769, 263579555, 865767219, 610786366, 379166434,\n            597656865, 623486427, 207309618, 213166626, 127002862, 225038326,\n            749726009, 938727258, 82094013, 259106650, 666086890, 670115431,\n            51767507, 304123445, 632864178, 99801802, 1009329397, 402628166,\n            202757719, 959\n          ],\n          \"cutValueData\": [\n            66, 69, 55, 27, 66, -119, 29, -25, 66, -82, -110, -41, 66, -69, -51,\n            99, 66, -126, -50, 30, 66, -95, -123, 12, 66, -109, -108, 39, 66,\n            -82, -51, 60, 66, -114, 48, 7, 66, -85, -94, -89, 66, 89, 60, -85,\n            66, -81, -99, -55, 66, -113, -65, 2, 66, -126, -11, 125, 66, -90,\n            -104, -45, 66, 109, 67, 22, 66, -125, -112, -59, 66, -60, -14, -47,\n            66, -69, -27, 116, 66, 98, -37, 35, 66, 93, 43, -108, 66, 100, 118,\n            -106, 66, 96, 44, 90, 66, -115, 85, -11, 66, -109, -14, 121, 66,\n            -101, -126, -81, 66, -104, -64, 89, 66, -122, 31, -62, 66, -104, 85,\n            -67, 66, -111, 35, 48, 66, -95, -10, 58, 66, -88, -15, 36, 66, -99,\n            16, 116, 66, -118, -39, -71, 66, 102, 65, -55, 66, 100, 78, -32, 66,\n            -112, -53, 57, 66, 97, -65, -88, 66, -120, -71, -121, 66, -69, -100,\n            33, 66, -97, 35, 10, 66, 124, 30, 63, 66, -116, -58, -88, 66, -74,\n            -23, 56, 66, -91, 70, -120, 66, 83, 101, 15, 66, 87, -97, 21, 66,\n            -109, -124, -44, 66, -86, 95, -75, 66, -79, 84, -49, 66, -118, -65,\n            2, 66, -90, 21, 6, 66, -76, -52, 84, 66, -102, -77, 86, 66, 106,\n            -122, 23, 66, -112, 8, -43, 66, -118, 10, 124, 66, -86, 127, 102,\n            66, -69, -88, 97, 66, -91, -125, -22, 66, 94, -88, -9, 66, -127, 53,\n            -128, 66, 96, 109, -78, 66, -124, 123, 84, 66, -128, 2, -81, 66,\n            126, 96, 79, 66, -98, 76, -67, 66, -108, 112, -24, 66, -117, -61,\n            10, 66, -109, 22, -102, 66, -92, 22, -121, 66, -118, 68, -5, 66,\n            -112, -26, 86, 66, 82, 27, -7, 66, -126, 108, 91, 66, -93, -58, 29,\n            66, -84, 36, -80, 66, -119, -72, 31, 66, -123, 56, 3, 66, 96, -52,\n            -84, 66, -109, -109, -70, 66, -92, 3, 54, 66, -89, 92, 31, 66, -104,\n            21, -85, 66, -64, -108, -9, 66, -100, 0, 58, 66, 119, 80, -33, 66,\n            116, -125, 97, 66, -66, -70, -81, 66, 91, 65, -14, 66, -111, -12,\n            34, 66, 75, -111, 42, 66, -127, -10, 72, 66, 114, -13, 66, 66, -108,\n            45, -25, 66, -105, -122, -70, 66, -67, 54, 111, 66, -78, 46, 77, 66,\n            -68, 62, -33, 66, -100, 87, -112, 66, -87, -99, -15, 66, -63, -29,\n            -47, 66, -80, -119, -47, 66, -66, -5, -103, 66, 97, 63, 99, 66, -63,\n            -48, 118, 66, 92, -3, 75, 66, -74, 6, -36, 66, -89, 95, -76, 66,\n            -93, 21, -92, 66, -116, -16, -48, 66, -107, 39, -78, 66, -99, 2, 3,\n            66, -77, -64, -122, 66, -92, -6, 51, 66, -78, -4, 61, 66, -85, -120,\n            76, 66, -116, 120, -38, 66, -116, -14, 106, 66, -82, 41, 75, 66, 92,\n            85, -2, 66, -128, 104, -90, 66, -123, -24, 52, 66, -122, 11, -99,\n            66, -86, 99, 119, 66, -96, 52, 34, 66, -110, 80, 54, 66, 110, 50,\n            -94, 66, -101, 68, 82, 66, -87, 104, 114, 66, 93, -126, 19, 66, -91,\n            11, 104, 66, 92, -73, -18, 66, -98, -119, 101, 66, 109, -82, 94, 66,\n            -111, 76, -90, 66, -126, -6, 52, 66, -120, -29, -15, 66, -89, 79,\n            -52, 66, -117, -9, -48, 66, -128, -122, 58, 66, -81, 59, -126, 66,\n            -100, 86, -31, 66, -92, 78, 4, 66, -79, -53, -121, 66, -71, 52, -31,\n            66, 82, -89, -111, 66, -86, -8, -92, 66, -99, 121, 118, 66, -102,\n            -3, -16, 66, -102, 64, -67, 66, -111, -67, 79, 66, -118, 73, -53,\n            66, -124, -120, -79, 66, -112, 11, 73, 66, -99, -69, 74, 66, -66,\n            -21, 105, 66, -94, -32, 83, 66, -114, 89, -125, 66, -95, 95, 27, 66,\n            -102, 56, -57, 66, -64, -43, -33, 66, -106, -66, 68, 66, -62, 105,\n            69, 66, -108, 96, -64, 66, -102, 4, 77, 66, -115, -40, -97, 66,\n            -118, -29, 58, 66, -60, 64, -38, 66, 112, -100, -47, 66, -108, -82,\n            -116, 66, -105, -70, 9, 66, -113, 114, -77, 66, -78, 34, -10, 66,\n            -94, 127, 29, 66, -81, -34, 61, 66, -85, 24, -125, 66, -61, -15,\n            111, 66, -119, -30, -81, 66, 84, -64, 45, 66, -126, 39, -72, 66,\n            -96, -4, 5, 66, -85, 122, -118, 66, -90, 23, 122, 66, -126, -90,\n            -106, 66, 122, -43, -52, 66, 120, -127, -69, 66, -119, 68, -23, 66,\n            -74, 2, -35, 66, -108, -108, 78, 66, -116, -52, -127, 66, -98, -47,\n            53, 66, 106, 52, 72, 66, -127, -109, 127, 66, -81, -60, 11, 66, -93,\n            85, 49, 66, -105, -78, -84, 66, -110, -101, -13, 66, -108, -30, 122,\n            66, -92, 89, -72, 66, -92, 102, -47, 66, -93, 34, -49, 66, -125, 69,\n            114, 66, -69, -16, -6, 66, -85, 13, 29, 66, -81, -65, -109, 66,\n            -111, -79, -76, 66, -81, 63, -6, 66, 87, -8, -106, 66, -92, 80, 5,\n            66, -120, -10, -38, 66, -69, -50, -92, 66, -104, -9, 47, 66, 125, 4,\n            90, 66, -117, 114, 81, 66, -93, 122, 66, 66, -128, 34, 99, 66, -85,\n            -10, -89, 66, -94, 28, -126, 66, -96, -12, 2, 66, -66, 8, -59, 66,\n            -115, -75, -20, 66, -81, -26, -115, 66, 118, 72, -54, 66, 118, -119,\n            -57, 66, 115, -88, -52, 66, -97, -34, 122, 66, 89, -60, -1, 66,\n            -126, -48, -115, 66, 109, -23, 86, 66, -116, 57, -12, 66, -126, -49,\n            97, 66, 124, -97, 93, 66, -91, 125, -117, 66, -108, -54, -59, 66,\n            -119, 19, 93, 66, -77, -65, 65, 66, -89, -78, 79, 66, -89, 27, -117,\n            66, -102, -67, 12, 66, 86, -97, 103, 66, -95, -66, -65, 66, -128,\n            102, 50, 66, -90, -1, 46, 66, 109, -47, -109, 66, -102, -15, 23, 66,\n            112, 77, 106, 66, -105, -13, 94, 66, -92, -55, -62, 66, -81, 17,\n            -115, 66, -114, -103, -38, 66, -127, -29, -39, 66, 119, 90, 64, 66,\n            -109, 12, -127, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162260734, 1033121303, 975725665, 1099472426,\n            1097956745, 731607295, 645677852, 1011604586, 716650712, 731549203,\n            710864306, 712422943, 710349719, 1120\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1033120547, 1161730016, 601059770, 1160056957,\n            1157450056, 774221296, 630621112, 970145554, 587515571, 600459695,\n            1098281905, 581721952, 581130817, 1336\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -7971869639547712875,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 978921285, 994007269, 471914914, 452950318, 129979958,\n            83547427, 480812537, 174955119, 366916790, 379754205, 70045301,\n            593870679, 324643107, 82025451, 975680853, 112561835, 240470142,\n            765319901, 1016262463, 1024589503, 605260962, 593682359, 464855001,\n            368409769, 223430526, 489252134, 620297641, 254379710, 313257399,\n            463113138, 648330745, 468004651, 103794910, 339715569, 465107301,\n            443099255, 328904566, 849832795, 582960731, 98542957, 233622623,\n            81833694, 189\n          ],\n          \"cutValueData\": [\n            66, -93, -26, 21, 66, -86, 71, 90, 66, -61, -103, 106, 66, 74, 63,\n            -50, 66, -115, 59, 76, 66, -81, 92, -30, 66, -67, 34, -95, 66, 96,\n            -106, 78, 66, -125, 80, 19, 66, 125, -60, 127, 66, 82, -48, 19, 66,\n            95, -109, -38, 66, -86, 96, -78, 66, 121, 80, -77, 66, 94, -64, -88,\n            66, -128, 17, -120, 66, -97, -74, 117, 66, 91, -75, 0, 66, 110, 39,\n            -50, 66, -101, -127, 18, 66, -87, 125, -29, 66, -86, 38, 64, 66,\n            -91, -73, -87, 66, -71, -36, 55, 66, 116, -113, -56, 66, 116, -17,\n            63, 66, 119, 52, -85, 66, -92, -117, -12, 66, -89, -128, -102, 66,\n            91, 27, 95, 66, -77, 110, 64, 66, -105, 113, -127, 66, -112, -104,\n            94, 66, -90, 104, -59, 66, -77, 69, 29, 66, 87, 112, -113, 66, -110,\n            84, -61, 66, -108, -55, -107, 66, -103, 66, -75, 66, -88, 112, -52,\n            66, -121, 8, -28, 66, 93, -12, -7, 66, -110, 91, 102, 66, -109, 18,\n            -79, 66, -110, -57, 47, 66, -123, 82, 58, 66, 120, -52, 34, 66,\n            -123, -80, -70, 66, 95, 38, -46, 66, 100, -88, -62, 66, 79, 127,\n            -74, 66, -105, 62, -9, 66, 110, 1, 60, 66, 114, 9, 110, 66, -79,\n            -74, -67, 66, 81, -38, 76, 66, 105, 17, -60, 66, 119, -100, 31, 66,\n            -96, 118, 35, 66, -90, 109, -108, 66, -120, -94, 17, 66, 121, 89,\n            -121, 66, -95, -62, 116, 66, -125, -81, 3, 66, -120, -23, -70, 66,\n            -114, -37, 53, 66, -121, 29, 35, 66, -114, 43, 88, 66, -116, 71, 42,\n            66, -90, -125, 105, 66, 118, 92, 104, 66, 118, -46, -121, 66, 126,\n            -102, 1, 66, 119, 34, -20, 66, -68, 78, 59, 66, 109, -56, 42, 66,\n            -103, -70, -35, 66, 115, 94, 67, 66, -82, 3, 56, 66, -92, -7, 88,\n            66, -84, 11, 51, 66, -107, 38, 1, 66, -104, -38, -89, 66, 121, 97,\n            -15, 66, 99, -71, -126, 66, 104, 76, -15, 66, -85, -96, 67, 66, -93,\n            -30, 45, 66, -106, 58, -90, 66, 119, 6, -29, 66, -79, -121, -14, 66,\n            -108, 5, -43, 66, -67, -39, 39, 66, -77, -97, -35, 66, -127, 59, 22,\n            66, -84, -44, 106, 66, -100, -25, -87, 66, -100, -119, 89, 66, -124,\n            10, 115, 66, -91, -72, 81, 66, -128, -16, 103, 66, 113, -106, 77,\n            66, -113, 101, 86, 66, 80, 101, 15, 66, 120, 106, -77, 66, 117, 17,\n            41, 66, 118, -1, -88, 66, 87, -46, 78, 66, 106, -72, 55, 66, 94, 12,\n            80, 66, 92, 106, -123, 66, -111, -96, 116, 66, -119, -9, 126, 66,\n            114, 94, 120, 66, -69, -29, -76, 66, -124, -122, -125, 66, -62, 107,\n            -91, 66, 98, -126, -112, 66, -98, 29, 63, 66, -72, 125, 5, 66, -71,\n            -28, 46, 66, 101, -112, -12, 66, -102, -16, -124, 66, 86, 39, 55,\n            66, -121, 82, 10, 66, -102, 81, -14, 66, -106, 30, -47, 66, -127,\n            -85, -118, 66, -117, -30, 22, 66, -107, -41, 12, 66, 109, -121, 57,\n            66, -98, -84, -39, 66, -123, 107, 6, 66, -98, 13, -38, 66, -61, 52,\n            -55, 66, -89, 11, 46, 66, -118, 85, 20, 66, 113, -62, 48, 66, -126,\n            49, 7, 66, 96, -4, 98, 66, 110, -72, -86, 66, -101, -50, 29, 66,\n            -109, 99, 83, 66, -69, 104, 110, 66, -90, 42, 122, 66, -99, 30, -73,\n            66, 117, -28, 107, 66, -86, -57, 27, 66, 116, -48, 80, 66, -114, 98,\n            -36, 66, -118, 41, -64, 66, -105, -49, -91, 66, -111, -109, 55, 66,\n            -107, -63, 56, 66, -65, -28, 20, 66, 117, 89, 25, 66, -120, -107,\n            27, 66, -112, -63, -25, 66, -69, -107, -121, 66, -119, -31, 34, 66,\n            -126, -98, 63, 66, -106, 64, -41, 66, -95, -60, 76, 66, 105, -113,\n            -36, 66, -97, -104, 64, 66, -80, -50, 86, 66, -111, -92, -107, 66,\n            -108, -93, 114, 66, -84, 80, -105, 66, 125, -41, -105, 66, 90, -76,\n            -60, 66, -72, 40, 112, 66, -101, 86, 44, 66, -103, 37, -7, 66, 122,\n            -118, -45, 66, -70, 117, 61, 66, 123, -24, 20, 66, 100, -16, -104,\n            66, -107, 35, -96, 66, -91, 95, 78, 66, -122, 122, 64, 66, -90, -41,\n            103, 66, -114, -38, 90, 66, -71, 58, -43, 66, -96, 17, -48, 66, -70,\n            103, 88, 66, -116, 53, -54, 66, -91, 104, -47, 66, -121, 10, -120,\n            66, 110, -97, -121, 66, -85, 74, 46, 66, 88, 122, 19, 66, -81, -108,\n            -80, 66, -108, -77, -44, 66, 96, 64, 82, 66, -102, -71, -23, 66,\n            -103, 80, 89, 66, -120, 86, -104, 66, -77, 101, -80, 66, -96, -9,\n            109, 66, -109, -71, -40, 66, -95, 21, 10, 66, -114, 13, 101, 66,\n            -101, 87, 2, 66, 122, -27, 8, 66, 118, 50, -116, 66, -117, 83, -108,\n            66, -98, 42, 24, 66, -119, -19, 35, 66, -94, 18, 25, 66, -117, 79,\n            86, 66, -103, 90, -44, 66, 101, 108, 34, 66, -91, 48, 78, 66, -67,\n            -52, -115, 66, -124, -30, -62, 66, -74, 31, 43, 66, -91, 100, 20,\n            66, -115, 0, 86, 66, -82, -12, -26, 66, -114, 26, 8, 66, 91, 26,\n            105, 66, 124, 39, 56, 66, 77, -61, -60, 66, -109, 123, -95, 66,\n            -107, 64, -95, 66, -124, -12, -82, 66, 69, 70, -69, 66, -114, 10,\n            -29, 66, -121, 89, 117, 66, -124, 101, 2, 66, -108, -3, 91, 66, 123,\n            47, -107, 66, -97, -84, -31, 66, -109, 98, 89, 66, -112, -51, 11,\n            66, 98, -72, -7, 66, -92, -80, 25, 66, -94, 33, 95, 66, -111, 78,\n            124, 66, 125, -94, -112, 66, -82, -81, -13, 66, 102, 113, -102, 66,\n            -112, 95, -113, 66, -79, 107, -119, 66, -100, 102, 124, 66, -79,\n            -73, -5, 66, -113, 96, 115, 66, 98, 57, -23, 66, -114, -56, -79, 66,\n            -109, 55, 24, 66, -84, 86, -102, 66, -112, -13, 120, 66, -83, -29,\n            87, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 768463685, 1028309171, 631350899, 1119188465, 755098891,\n            597318908, 970174987, 600796250, 754973449, 1155678233, 754991735,\n            710529692, 970211435, 1094\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1157477768, 774832199, 631350935, 1032528356,\n            1143129229, 716893402, 970855216, 973511581, 1097759536, 968617804,\n            1097692441, 968551981, 1112217467, 1096\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -5616425619850912141,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 510656361, 600278606, 253326725, 595140644, 831584705,\n            585219198, 728696459, 41477397, 870711578, 44988593, 54787746,\n            14824058, 750086845, 380106249, 599335497, 180759439, 806212701,\n            310562823, 403506273, 421878371, 653367040, 357206430, 79900404,\n            380801954, 404657871, 601496835, 847113032, 878399556, 401445895,\n            154883848, 26336039, 614143973, 269541196, 369730541, 384029582,\n            488060360, 7562094, 36883374, 234619561, 832217179, 172982005,\n            531182728, 2082\n          ],\n          \"cutValueData\": [\n            66, -116, -46, 3, 66, -120, -122, -64, 66, -59, 109, 80, 66, 69, 4,\n            -20, 66, -72, 29, -42, 66, 125, -26, -90, 66, -104, -102, 56, 66,\n            110, -19, -125, 66, -125, 28, -21, 66, -94, -94, -57, 66, -91, -110,\n            9, 66, -104, 16, 53, 66, -89, -41, 61, 66, 108, 110, 29, 66, -80,\n            -56, -81, 66, -104, -104, 42, 66, 124, -56, 101, 66, 85, 96, -64,\n            66, -70, -36, 110, 66, -67, -36, 27, 66, -64, 124, 75, 66, -72, -85,\n            -58, 66, -69, 30, 77, 66, -117, -100, -79, 66, -98, 53, -70, 66,\n            113, 96, -64, 66, 88, -7, 67, 66, -71, -119, 53, 66, 91, -115, -23,\n            66, 127, -59, 86, 66, -72, -15, -35, 66, -105, 93, -77, 66, -71, 66,\n            99, 66, -83, 93, -28, 66, 100, 92, 8, 66, -102, 15, 74, 66, 78, 90,\n            -98, 66, -124, 65, 3, 66, 83, 3, 23, 66, -124, -53, -26, 66, -66,\n            -38, -52, 66, -109, -120, 109, 66, -87, 27, 37, 66, -105, -54, -42,\n            66, -81, 91, -112, 66, -61, 48, -121, 66, 98, -85, -64, 66, -127,\n            15, 109, 66, -92, 117, -65, 66, -94, -47, -19, 66, -110, -67, -20,\n            66, -111, -12, -13, 66, 107, 95, -67, 66, -113, -108, -86, 66, 78,\n            -127, -89, 66, -108, 25, -26, 66, 116, -42, -25, 66, 92, 119, 45,\n            66, -83, -44, -24, 66, 81, -60, -47, 66, -74, 81, -119, 66, 95, 37,\n            65, 66, -73, 29, 95, 66, 86, -30, 44, 66, -116, 96, -70, 66, -110,\n            109, 109, 66, 93, 33, -48, 66, 108, 1, 73, 66, 115, 98, -107, 66,\n            104, -31, 26, 66, -121, 46, -126, 66, -118, -50, -6, 66, 94, 76, 62,\n            66, -85, -51, -26, 66, -79, -64, -51, 66, -127, 51, -93, 66, -127,\n            57, -35, 66, 91, 12, -21, 66, -118, 113, 75, 66, 99, 56, 106, 66,\n            -108, -124, 77, 66, 126, 123, -75, 66, 124, -41, 17, 66, -86, -99,\n            -123, 66, 115, 76, 125, 66, -77, -31, -7, 66, -117, -14, -50, 66,\n            -114, 94, -26, 66, -101, -109, -12, 66, -119, 36, 80, 66, -99, -34,\n            -65, 66, -108, -98, -97, 66, -76, 62, 31, 66, -75, -81, -4, 66, -61,\n            -81, 6, 66, 119, 122, 23, 66, -123, 99, -60, 66, -107, 55, 23, 66,\n            -75, 28, -26, 66, 110, 22, 5, 66, 87, -120, -24, 66, -115, 83, 112,\n            66, 105, -28, -108, 66, -123, 77, -2, 66, 111, -122, 66, 66, -93,\n            -65, 116, 66, 88, -57, 60, 66, -86, 27, 64, 66, -115, 93, -100, 66,\n            -105, 21, -51, 66, 104, -68, 46, 66, -64, 116, 3, 66, -117, 74, -58,\n            66, -81, 8, 15, 66, -94, 55, -78, 66, 121, -108, -86, 66, -88, 106,\n            -45, 66, -81, -2, 44, 66, -113, 86, -123, 66, -91, 65, 40, 66, -115,\n            13, -3, 66, -83, 54, -42, 66, -75, -68, 40, 66, -114, 69, 118, 66,\n            -77, 126, 109, 66, -120, -24, -105, 66, 112, 64, -73, 66, -123, 78,\n            59, 66, -126, 56, -56, 66, -121, -120, -61, 66, -122, -59, -39, 66,\n            -86, 94, -118, 66, 107, -123, -50, 66, -119, -29, -122, 66, -109,\n            -104, -40, 66, 79, 127, 115, 66, 86, -24, 3, 66, -113, -88, -21, 66,\n            -74, -117, 60, 66, 98, 109, -22, 66, -75, 22, 90, 66, -77, -18, -68,\n            66, 91, -115, 70, 66, -98, 99, -24, 66, 94, 45, 55, 66, -117, 107,\n            -45, 66, -84, 95, 82, 66, -109, 112, 27, 66, -93, 65, -124, 66, 105,\n            -51, 112, 66, -93, -67, -117, 66, -127, -31, 64, 66, -66, -128, -70,\n            66, -125, -47, -9, 66, -121, 59, -3, 66, -110, -70, 29, 66, 85, 58,\n            84, 66, -127, 98, 57, 66, 106, -120, -47, 66, 125, -104, -63, 66,\n            -87, 51, -34, 66, -88, 5, -121, 66, -116, -116, 108, 66, 96, -109,\n            -102, 66, -122, -54, 29, 66, -69, 90, 39, 66, -102, -71, 106, 66,\n            101, -108, -6, 66, 93, -68, -48, 66, -72, 107, 52, 66, 100, -81,\n            -42, 66, -123, -70, -29, 66, -110, 9, -82, 66, -65, 12, -119, 66,\n            119, 55, -21, 66, 115, 18, 24, 66, -125, 34, 28, 66, -101, -96,\n            -108, 66, -126, -91, -92, 66, -91, 41, 97, 66, -117, 93, 70, 66,\n            106, -67, 53, 66, -127, 77, 29, 66, -128, -10, -55, 66, 85, -6, -14,\n            66, 115, -88, -116, 66, 89, -110, -95, 66, -85, -71, -27, 66, -122,\n            -85, -92, 66, -84, -86, -55, 66, 102, 126, -46, 66, -125, -39, 17,\n            66, -118, 63, 116, 66, 120, -36, -123, 66, 122, -92, 96, 66, -87,\n            -126, -22, 66, -98, 61, 68, 66, -102, 115, -15, 66, -81, 53, -70,\n            66, -127, 93, -42, 66, -83, 55, -42, 66, -125, -21, 47, 66, -75,\n            110, 73, 66, 111, 33, -66, 66, -124, -72, -55, 66, -101, 12, 72, 66,\n            76, -24, -89, 66, 116, -81, -60, 66, 104, -115, -102, 66, 89, 110,\n            -6, 66, 116, 88, -28, 66, -93, 44, -64, 66, -81, -128, -76, 66, -68,\n            -75, 117, 66, -97, 75, -1, 66, -79, 15, 29, 66, -83, 51, 0, 66, 121,\n            7, 66, 66, -80, -49, 9, 66, 105, -66, -96, 66, -102, 103, -87, 66,\n            -95, 83, 83, 66, -107, -14, 123, 66, 95, 13, -63, 66, 76, -48, 74,\n            66, -63, -41, -2, 66, -65, -67, -112, 66, -112, 52, 123, 66, -96,\n            -70, -16, 66, -75, -70, -55, 66, -71, -126, -122, 66, -90, 26, -49,\n            66, -126, 38, 32, 66, -102, -29, -77, 66, 112, 79, 29, 66, 120, 5,\n            14, 66, -127, -35, -13, 66, 107, -29, -92, 66, -127, -94, 51, 66,\n            121, -88, -120, 66, -103, 76, -72, 66, -126, -44, -62, 66, -121, -2,\n            118, 66, -120, 65, -41, 66, -100, -97, 33, 66, -120, -82, 36, 66,\n            -67, 32, -50, 66, -94, 122, -48, 66, -101, -12, 50, 66, -102, -15,\n            38, 66, -117, -37, 14, 66, -86, -126, 71, 66, -81, 7, -126, 66,\n            -116, 69, -63, 66, -78, -41, -94\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 790626295, 679477087, 412506246, 950297478, 705039134,\n            736271007, 176986631, 138021382, 2418\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1041231855, 961487323, 419388414, 203088578, 563888667,\n            712115865, 449557004, 685772805, 289\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -7146792820767120252,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 233499737, 158994418, 628395225, 752668636, 285752461,\n            857045498, 516575438, 605536352, 399910403, 134305809, 406637502,\n            120699630, 764732399, 9847357, 369440076, 274108631, 115797896,\n            417072363, 258526687, 67175843, 650792399, 234580157, 267144489,\n            392867647, 38134234, 523793265, 230238218, 859788879, 761518583,\n            34225241, 392417407, 377993091, 593385661, 314056312, 746276621,\n            498382796, 402814384, 857345902, 33761211, 13348931, 32970897,\n            65301105, 7747\n          ],\n          \"cutValueData\": [\n            66, -74, 81, 69, 66, -106, 91, 98, 66, -119, 125, 113, 66, -81, -73,\n            116, 66, -118, 54, -23, 66, 94, -117, 25, 66, -88, 4, 80, 66, 123,\n            112, 11, 66, 84, -79, -116, 66, -123, 61, -42, 66, -99, 25, -1, 66,\n            -97, -41, -98, 66, -123, -46, 56, 66, -100, -74, -90, 66, -75, -93,\n            120, 66, -77, -83, -70, 66, 100, 99, 90, 66, -70, 95, 84, 66, 90,\n            106, -94, 66, 81, 1, 26, 66, -106, -41, 65, 66, 126, -124, 65, 66,\n            118, -75, -97, 66, -92, -54, 30, 66, -92, -106, 93, 66, 94, -81, 71,\n            66, 108, -66, 97, 66, -104, 39, -106, 66, -60, -62, -112, 66, -59,\n            -17, -23, 66, -119, -74, 86, 66, -93, -61, -43, 66, 127, -106, -108,\n            66, -87, 124, 120, 66, -123, 60, -106, 66, -63, 121, 94, 66, -115,\n            98, 47, 66, -71, 112, 80, 66, -71, 54, -27, 66, -91, -127, 113, 66,\n            -116, 39, -73, 66, 120, 87, -96, 66, -72, -106, -104, 66, -123, -51,\n            105, 66, 100, -36, 110, 66, 84, -83, 60, 66, -91, -128, 108, 66,\n            -106, -16, 118, 66, -107, -28, -6, 66, -92, 23, -124, 66, -77, 114,\n            -46, 66, -106, 58, 109, 66, 100, -11, -29, 66, -86, 111, 84, 66,\n            -115, -77, -39, 66, -123, 54, 70, 66, -81, 113, 60, 66, -105, -96,\n            117, 66, -80, -3, -105, 66, -107, 64, 59, 66, -127, 27, -67, 66,\n            -87, 24, -13, 66, 115, -54, -125, 66, -81, 62, 21, 66, -124, -84,\n            94, 66, -98, -117, -93, 66, -113, -34, 29, 66, -100, -48, -6, 66,\n            -120, -104, 20, 66, -118, -90, 69, 66, 127, 119, 64, 66, -112, 18,\n            42, 66, -121, 44, 90, 66, 120, 34, -15, 66, -91, 51, -92, 66, -71,\n            -52, 18, 66, -128, -100, 22, 66, -77, -31, 65, 66, -122, -10, 16,\n            66, -106, 62, 19, 66, 121, 104, 112, 66, -118, 76, -102, 66, -84,\n            -42, 44, 66, -103, 114, 37, 66, -82, -105, -54, 66, -90, -24, 119,\n            66, -127, 107, -88, 66, -116, 127, -67, 66, -95, 80, 112, 66, -107,\n            70, 27, 66, 96, 25, 93, 66, -98, -100, 78, 66, -108, 54, -64, 66,\n            -108, 49, 121, 66, -126, -78, -87, 66, 121, -95, 92, 66, -123, -56,\n            -46, 66, -77, -123, -22, 66, -121, -86, 31, 66, 96, 101, 48, 66,\n            -73, 122, 46, 66, 97, 1, 0, 66, -109, -87, -19, 66, -72, 3, -86, 66,\n            -108, -64, 85, 66, 107, -84, 3, 66, -126, -13, -22, 66, -124, 85,\n            42, 66, 117, -2, 90, 66, -100, -47, -25, 66, -94, -21, 108, 66,\n            -100, 28, 8, 66, -84, -84, -19, 66, -101, -55, 65, 66, -89, -117,\n            120, 66, -72, 72, -6, 66, -94, -120, 26, 66, 109, 24, 61, 66, -114,\n            29, 10, 66, 121, 65, -43, 66, -109, 62, 27, 66, -105, -89, -31, 66,\n            125, 114, 116, 66, -107, 60, -7, 66, -81, 118, -89, 66, -68, 10,\n            -123, 66, 97, -63, 104, 66, -126, 1, 66, 66, 97, 103, 95, 66, -63,\n            -119, -4, 66, -89, -90, -54, 66, -108, -112, -41, 66, -109, 72, 80,\n            66, -91, -6, 2, 66, -74, -103, 58, 66, -92, 25, 19, 66, 87, -78, 8,\n            66, 90, -41, -109, 66, -114, -67, -98, 66, -126, 116, -71, 66, -86,\n            16, 117, 66, -80, -12, -67, 66, -114, -79, -93, 66, -111, 5, -104,\n            66, -96, 44, 24, 66, -65, -1, -75, 66, 119, 81, -57, 66, -84, 53,\n            100, 66, -70, -84, 58, 66, 119, 106, 123, 66, 85, -29, -26, 66, -94,\n            17, 68, 66, -97, -61, -108, 66, -72, 8, 7, 66, -113, -38, 78, 66,\n            -100, -119, 40, 66, -108, -15, -117, 66, -95, -14, -48, 66, 83, -32,\n            -64, 66, 120, 50, -122, 66, -115, 48, -46, 66, -127, -69, 37, 66,\n            -83, -72, -56, 66, -73, 99, -58, 66, -90, -85, 105, 66, -116, -90,\n            -12, 66, 87, -123, -81, 66, -120, -80, 35, 66, -80, -61, 28, 66,\n            123, 36, -128, 66, -97, -47, 47, 66, 92, -65, 105, 66, -94, -95,\n            -23, 66, -102, -56, 114, 66, -108, -21, 23, 66, -128, 61, 127, 66,\n            -78, 20, 14, 66, -102, -68, -27, 66, 112, -97, -3, 66, -110, 5, -99,\n            66, -115, 78, -74, 66, -62, -61, 95, 66, -118, 113, -95, 66, -101,\n            -32, 105, 66, 86, -115, 81, 66, -126, 59, 101, 66, 81, 72, -97, 66,\n            -110, -79, -88, 66, -70, -38, 123, 66, -113, 37, 6, 66, -105, 48,\n            -51, 66, -106, 22, 102, 66, -124, -48, -25, 66, -109, -100, 53, 66,\n            -100, 19, 53, 66, -106, 97, -107, 66, -118, 126, -80, 66, -110,\n            -127, -23, 66, -117, 107, -34, 66, -99, 96, -106, 66, 126, 121, -1,\n            66, -91, -38, -1, 66, -85, 86, -39, 66, -103, 10, -115, 66, 100,\n            -53, -2, 66, -111, -113, -70, 66, -112, 112, -59, 66, -118, -25, 75,\n            66, -105, 120, -28, 66, 76, -97, 107, 66, -108, -68, -47, 66, -113,\n            -88, 25, 66, 118, 46, -3, 66, -68, 97, -94, 66, 118, 31, 77, 66,\n            115, 98, 35, 66, -101, -87, 81, 66, -83, -125, 95, 66, -75, 17, -42,\n            66, 112, 66, -79, 66, 122, -51, -95, 66, -71, 100, 17, 66, 120, 127,\n            -31, 66, -106, 96, 100, 66, -89, 54, 56, 66, -119, 42, -111, 66,\n            -107, 57, 79, 66, 108, 120, -101, 66, 121, -4, 56, 66, -87, -100,\n            -29, 66, -127, 9, -18, 66, -106, -61, 122, 66, -109, -33, 127, 66,\n            -99, 46, -117, 66, -125, -76, 75, 66, 126, 73, -26, 66, -127, -63,\n            87, 66, -120, -16, -90, 66, 111, 82, 60, 66, -116, 102, -56, 66,\n            106, 75, -80, 66, -66, 15, 64, 66, 98, 38, 73, 66, 109, 37, -108,\n            66, -126, 74, -40, 66, 98, 99, 74, 66, -98, -67, -1, 66, -81, 17,\n            -34, 66, -128, 84, -15, 66, -92, -33, -119, 66, 118, -26, 115, 66,\n            -116, -115, -5, 66, -125, -5, -122, 66, -120, 29, 99, 66, -82, 64,\n            -30\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1048307455, 905133887, 557349561, 356481149, 997778536,\n            38684738, 359091074, 25198865, 4248\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 243250943, 98823801, 825786037, 3135615, 808137098,\n            312601984, 93592132, 545771575, 4124\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 6408488327328865688,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 1031133617, 338291887, 1066871715, 201039099, 45476909,\n            366836685, 896216501, 1029793774, 896185561, 175111619, 590053042,\n            251432098, 761771375, 111523962, 912841061, 393793337, 1068963422,\n            182285743, 303728743, 69990621, 919905963, 912324141, 1033631475,\n            314153517, 977255359, 127203582, 863567834, 747682889, 366464319,\n            652791394, 498297426, 576788985, 919709037, 256986938, 370448213,\n            783362081, 268396485, 714987482, 789636803, 585188839, 207440957,\n            884976566, 1017\n          ],\n          \"cutValueData\": [\n            66, -77, 15, -76, 66, 125, 86, 92, 66, -108, -92, -20, 66, -72, -78,\n            -29, 66, 108, -83, -35, 66, -112, -84, -28, 66, -80, 61, -97, 66,\n            -115, -38, 101, 66, 76, -107, -108, 66, 105, 108, -4, 66, -120, -31,\n            -65, 66, -68, -59, -115, 66, -91, 44, -66, 66, 90, 4, -56, 66, -97,\n            104, 122, 66, -117, 19, -91, 66, 101, -5, -102, 66, -117, 5, 33, 66,\n            125, 76, 81, 66, -89, -128, 11, 66, -106, 35, -56, 66, 116, 123,\n            -10, 66, 115, -79, -79, 66, 78, 72, -32, 66, -116, -49, -37, 66,\n            -93, -30, -68, 66, -118, 127, -107, 66, -127, -121, -15, 66, -99,\n            87, 89, 66, -118, 4, -104, 66, 84, 71, 42, 66, -90, 13, 3, 66, 96,\n            110, 93, 66, 111, -117, -120, 66, -118, 4, 75, 66, 114, 127, 114,\n            66, 91, 81, 4, 66, -119, -20, 5, 66, -109, -102, -6, 66, -88, -85,\n            19, 66, -89, 112, -27, 66, -83, 75, 12, 66, 80, -47, 16, 66, -125,\n            5, 114, 66, -114, -96, -35, 66, -99, -86, 7, 66, -85, 99, -67, 66,\n            -91, 126, -21, 66, -76, -57, -45, 66, -121, 19, -13, 66, 99, 21,\n            -61, 66, 94, 27, -62, 66, 82, 48, 105, 66, -121, 42, 120, 66, -116,\n            -38, 48, 66, 90, 95, -19, 66, -116, 84, -98, 66, -71, 102, -76, 66,\n            -118, -127, 86, 66, 101, 89, -48, 66, -124, 19, 106, 66, -128, 69,\n            -104, 66, -74, 12, 7, 66, 101, -95, 30, 66, -121, 59, 11, 66, 124,\n            95, 91, 66, -127, -53, -56, 66, -67, -58, 65, 66, -93, -1, 24, 66,\n            -78, -128, 106, 66, -78, 109, -100, 66, 125, 11, -68, 66, -126,\n            -114, 63, 66, -102, -99, -98, 66, -112, 85, 72, 66, -86, -78, -127,\n            66, -70, -18, 47, 66, -105, -91, -31, 66, -81, 97, -37, 66, -91,\n            -125, -106, 66, 124, -75, -26, 66, -107, 92, -120, 66, -102, 19,\n            127, 66, -85, 112, 15, 66, -111, 37, 45, 66, -96, -21, 77, 66, -122,\n            -23, 90, 66, -107, -23, 28, 66, -111, 65, -49, 66, -103, 21, -9, 66,\n            -87, -65, -48, 66, -101, -76, 41, 66, -80, -54, 125, 66, -96, -61,\n            -24, 66, -102, 99, 45, 66, -95, -102, 29, 66, -95, -4, -33, 66,\n            -123, 52, 123, 66, -94, -18, 52, 66, -78, 59, -37, 66, -86, 9, -125,\n            66, -91, 41, -77, 66, -115, -103, -62, 66, -84, 43, -4, 66, 86,\n            -125, -127, 66, -105, 93, -74, 66, -112, -30, 71, 66, -122, -24,\n            -95, 66, -90, -4, -112, 66, -120, -77, -104, 66, -103, 89, -39, 66,\n            -75, -113, 113, 66, 98, -50, 29, 66, -123, -117, -103, 66, -100, 96,\n            -5, 66, -114, 99, -25, 66, -96, 76, -34, 66, -84, 19, -11, 66, 102,\n            -102, -1, 66, -91, 78, 102, 66, -113, -89, -87, 66, 98, 39, 114, 66,\n            -100, 36, -86, 66, 97, -53, 18, 66, 109, 18, -75, 66, -116, -20, 45,\n            66, -113, -51, 52, 66, 113, -81, -105, 66, 127, 41, -39, 66, -87,\n            -124, 74, 66, -117, -68, -18, 66, -116, 36, 65, 66, -116, 98, -58,\n            66, -83, 126, 29, 66, 101, 81, -77, 66, 119, 69, -7, 66, 100, 72,\n            -99, 66, 88, 44, -4, 66, -97, -93, -36, 66, -117, -111, -71, 66,\n            111, 79, 100, 66, 96, 39, 1, 66, -90, -115, 23, 66, 121, 19, -64,\n            66, -114, 63, 53, 66, -110, 52, -78, 66, -119, -17, 109, 66, -110,\n            -59, 88, 66, 120, -42, 90, 66, -110, 76, -26, 66, -65, -97, -50, 66,\n            88, 50, -100, 66, 101, 18, -107, 66, -113, -56, 125, 66, -96, -17,\n            -123, 66, -88, -109, 51, 66, 106, 38, -101, 66, -101, -72, 98, 66,\n            -83, 65, 16, 66, -100, 63, -40, 66, 90, 114, 122, 66, -96, -32, 67,\n            66, -105, -14, 11, 66, 116, 19, 8, 66, -112, -77, 36, 66, 127, 123,\n            31, 66, 96, 75, 106, 66, -115, 71, -15, 66, -70, 47, -38, 66, 114,\n            -125, -1, 66, -125, -24, -117, 66, -117, -53, 89, 66, -97, -39, -97,\n            66, -91, 125, -9, 66, -87, -57, -10, 66, -112, 87, -113, 66, -113,\n            -41, 25, 66, -96, -69, -1, 66, -97, 31, -118, 66, -88, -56, 54, 66,\n            -114, -101, 112, 66, -98, 72, 25, 66, -98, -18, 28, 66, 106, 39, 73,\n            66, -113, -76, 17, 66, 103, -90, -79, 66, -124, 17, 34, 66, 68, 75,\n            -15, 66, 122, -92, -122, 66, -67, 98, 30, 66, -91, 69, -19, 66, 102,\n            -49, 42, 66, -75, -116, -124, 66, -125, -90, 19, 66, -100, 97, 79,\n            66, 109, -67, -59, 66, -108, -46, 9, 66, -96, -40, 83, 66, -105, 81,\n            123, 66, -110, -94, 96, 66, -127, -109, -94, 66, -113, -110, -65,\n            66, -121, 127, 49, 66, 111, -24, 55, 66, -76, -98, 50, 66, 112, -4,\n            29, 66, -74, 110, -127, 66, -115, 59, 14, 66, 116, 56, 3, 66, -95,\n            36, -59, 66, 81, -73, 48, 66, 90, -12, 125, 66, 97, -48, -17, 66,\n            -62, -111, 76, 66, 84, -115, 91, 66, -89, 112, -19, 66, -102, 87,\n            -114, 66, -123, -25, 121, 66, 89, 120, 86, 66, 125, -35, -113, 66,\n            110, -123, -105, 66, 118, -38, -18, 66, -73, 61, -63, 66, -76, -45,\n            39, 66, 125, -103, -114, 66, -113, -74, 124, 66, -111, -34, 58, 66,\n            -69, -36, -61, 66, -83, -89, 67, 66, 118, -89, -8, 66, -91, 26, -34,\n            66, -89, -13, 103, 66, -120, 61, -56, 66, -65, 89, -57, 66, 115, 88,\n            31, 66, -112, 94, -69, 66, 102, -77, -64, 66, -114, -91, -106, 66,\n            -69, -5, 21, 66, -98, -79, -83, 66, -66, 85, -5, 66, -107, -90, 69,\n            66, 93, -68, 58, 66, -94, -118, -42, 66, 122, -56, -58, 66, -71,\n            -100, 94, 66, -70, -117, -34, 66, -86, -120, -83, 66, -114, -115,\n            65, 66, -122, 39, -37, 66, -118, 127, -43, 66, -104, -36, 18, 66,\n            122, 62, 88, 66, -105, -38, -123, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162254905, 1160646650, 1026664226, 1013988659,\n            645103511, 975548491, 1026012500, 710883986, 767669444, 724649296,\n            581197193, 639058531, 731003657, 1174\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1018595249, 1162253933, 645619136, 760429858, 595567880,\n            1011678901, 1012188524, 582970849, 724797059, 630575401, 753554141,\n            1155154837, 625975168, 1120\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 8378244044999309591,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 628773447, 523117802, 370389849, 517791936, 26743225,\n            857227540, 253775774, 411967183, 614660847, 543272654, 119639168,\n            600774267, 490466084, 257461899, 865254068, 266169102, 194334916,\n            758179408, 114518120, 628919913, 605158240, 36659517, 310705840,\n            846445514, 188402781, 816839279, 249904951, 356210919, 76512078,\n            427778890, 123316813, 266176825, 509054253, 491918261, 828330153,\n            276432827, 710019274, 127807641, 147226708, 13057635, 842276530,\n            295266369, 1939\n          ],\n          \"cutValueData\": [\n            66, -77, 87, -48, 66, -109, -66, 37, 66, -117, 27, 72, 66, 122, 46,\n            -123, 66, -59, 98, -22, 66, -93, -64, 100, 66, -111, 24, -71, 66,\n            -121, -64, 123, 66, -116, 22, 34, 66, -106, -46, 2, 66, -119, -100,\n            64, 66, -102, -71, -28, 66, -63, -40, -23, 66, -114, 42, -83, 66,\n            -71, -27, 97, 66, 122, -78, 77, 66, -105, -31, 22, 66, -124, -116,\n            -29, 66, -101, 123, -59, 66, -119, 17, 82, 66, -74, -72, -109, 66,\n            -110, 121, -126, 66, 121, 56, -115, 66, -93, -19, -88, 66, -78, -95,\n            -108, 66, -61, -31, 14, 66, -116, -103, -34, 66, -68, 101, 90, 66,\n            -96, 61, -32, 66, -84, -53, 59, 66, 81, 25, -46, 66, -115, -87,\n            -123, 66, -91, 21, -105, 66, -106, -127, 51, 66, -73, -4, -91, 66,\n            114, -64, 28, 66, 109, 21, -104, 66, -121, -36, -74, 66, -88, -79,\n            106, 66, -107, -54, 110, 66, 86, 87, -80, 66, -81, 102, -35, 66,\n            -83, -123, -39, 66, -100, 8, 109, 66, 103, -5, -117, 66, -95, -68,\n            -61, 66, -101, 5, 43, 66, 112, -29, 54, 66, 100, 19, -66, 66, 78,\n            71, -114, 66, -93, 35, -21, 66, -80, -21, 77, 66, -114, -96, -7, 66,\n            -94, -22, -30, 66, 108, 41, -85, 66, 116, 26, -112, 66, 105, -68,\n            -15, 66, -81, 60, -13, 66, -103, -112, 48, 66, 80, -5, -106, 66,\n            -118, 29, -111, 66, 73, 56, -65, 66, -126, 56, 50, 66, -103, -96, 5,\n            66, -104, -15, 38, 66, -122, 109, 13, 66, 96, 110, -91, 66, -71,\n            -88, 58, 66, -107, -46, -88, 66, -89, -111, 88, 66, -120, -110, -11,\n            66, -106, -79, -27, 66, -124, -115, -48, 66, -96, 82, -90, 66, -107,\n            1, -118, 66, 111, 108, -107, 66, -106, 47, -94, 66, -75, -96, -46,\n            66, -109, 41, 84, 66, -101, 29, -46, 66, -99, 82, 37, 66, 115, -9,\n            -29, 66, -128, 63, -72, 66, -68, -22, -103, 66, 120, -94, -3, 66,\n            108, -125, 74, 66, -93, 79, -107, 66, -102, -77, -21, 66, -120, 107,\n            13, 66, -89, -10, 83, 66, 98, -26, -86, 66, -118, -110, 113, 66,\n            -72, 62, 65, 66, -100, 33, -121, 66, -127, -119, -57, 66, -127, 12,\n            -103, 66, -103, -88, -99, 66, 74, -60, 79, 66, -102, -6, -12, 66,\n            78, -113, 68, 66, -95, 69, -128, 66, -120, 15, -2, 66, 125, -125,\n            -18, 66, -99, -40, -6, 66, 91, 105, -85, 66, -74, 41, 67, 66, -122,\n            87, 107, 66, 109, -79, 117, 66, -99, 18, -56, 66, 105, 126, 67, 66,\n            -86, -109, 13, 66, -75, 76, 94, 66, -117, 47, -59, 66, 121, 44, 113,\n            66, -109, 21, 57, 66, -103, -73, -128, 66, 107, 57, 81, 66, -74,\n            -10, 59, 66, -111, 56, 77, 66, -99, 37, -102, 66, -85, -43, -101,\n            66, 110, -49, 63, 66, 84, 41, -48, 66, -81, -69, -67, 66, -88, 95,\n            -89, 66, 84, -95, 100, 66, -123, 92, -99, 66, -119, -75, 5, 66,\n            -101, 86, 97, 66, -70, 13, -112, 66, -94, -10, -6, 66, -115, 60, 43,\n            66, -114, 12, 87, 66, 84, 1, 75, 66, -104, -62, -64, 66, 125, -18,\n            46, 66, -93, 65, 79, 66, -100, -8, 92, 66, -90, -65, -66, 66, -123,\n            91, -87, 66, -64, -114, 51, 66, 110, -18, -48, 66, 115, 32, 122, 66,\n            -92, 92, -58, 66, -98, 88, 14, 66, -86, 113, 84, 66, -97, -75, 38,\n            66, -94, -48, -44, 66, -101, 24, 83, 66, -94, -114, -57, 66, -85,\n            126, 39, 66, -126, -97, 99, 66, -95, -28, 46, 66, -58, -35, 41, 66,\n            -118, 106, 25, 66, -91, 5, 64, 66, -94, -3, 9, 66, 87, -17, -93, 66,\n            -109, -56, 57, 66, 111, 37, -34, 66, -108, 24, 43, 66, -67, 6, 80,\n            66, 96, 45, -94, 66, 108, 51, 59, 66, 99, -51, 15, 66, -103, -84,\n            -69, 66, -127, 93, 35, 66, -95, 112, 95, 66, -88, 84, 124, 66, -68,\n            -99, 100, 66, -86, 110, -38, 66, -83, 0, -17, 66, -115, -13, 38, 66,\n            102, 94, 2, 66, 99, 102, 7, 66, -65, 74, -5, 66, 88, -22, 47, 66,\n            -74, 2, 75, 66, -62, 95, 103, 66, -113, -90, -5, 66, 101, -85, 117,\n            66, -104, -107, -26, 66, -81, 13, -99, 66, -94, -38, 75, 66, 93,\n            -126, -33, 66, -84, -63, -84, 66, -81, -8, 47, 66, -93, 108, -11,\n            66, -94, -113, 116, 66, -75, -121, 44, 66, -125, 37, 104, 66, 122,\n            -97, 47, 66, 110, 94, 97, 66, -124, 96, 97, 66, 93, 49, 87, 66,\n            -124, -75, 27, 66, -125, -4, -120, 66, -122, -102, 35, 66, 70, 44,\n            -19, 66, -128, 47, -89, 66, -65, 7, 76, 66, -122, -48, -55, 66, -92,\n            -53, 96, 66, -81, -36, -89, 66, -122, 120, -72, 66, -113, -120,\n            -102, 66, -118, -128, -65, 66, 96, 104, -92, 66, -120, -14, 127, 66,\n            -107, -44, -74, 66, -107, 96, 9, 66, -124, -5, 104, 66, -108, 81,\n            -86, 66, -101, -102, 0, 66, -113, -94, -97, 66, -94, -25, -113, 66,\n            -95, -108, 120, 66, -79, -35, 35, 66, -69, 43, 33, 66, -89, 44, -80,\n            66, 121, -22, -64, 66, 107, 107, 109, 66, -127, -32, 113, 66, -96,\n            -61, -92, 66, -90, 34, -105, 66, -118, 33, 112, 66, 110, 48, -42,\n            66, -95, 57, 122, 66, 76, 90, -48, 66, -76, -62, -46, 66, 126, -90,\n            -99, 66, 82, 98, -79, 66, 96, -24, -52, 66, 82, -2, 34, 66, -106,\n            -86, 77, 66, -128, 107, 54, 66, -118, 111, -94, 66, -120, 34, 27,\n            66, 87, -35, -65, 66, -91, 55, 120, 66, -80, 124, 119, 66, -101,\n            104, 55, 66, -95, -115, -61, 66, -98, 7, -32, 66, 109, -19, -86, 66,\n            -95, -16, -85, 66, -113, -122, 28, 66, 93, 118, -118, 66, 90, -20,\n            78, 66, 90, 65, 84, 66, -72, -15, -28, 66, 117, -110, -20, 66, -106,\n            53, -9, 66, 118, -19, -5, 66, -108, 113, -75\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 935327743, 199668476, 818660543, 976326301, 34955939,\n            316812057, 612405004, 738726841, 36\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 904916975, 1072630527, 309256703, 576437929, 538206497,\n            25306769, 296010768, 364118708, 6444\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -4725511223573268555,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 723406712, 463420568, 836352260, 142147135, 391259158,\n            742252631, 262160029, 528972865, 114759685, 174013496, 769258,\n            184775505, 47955563, 48818671, 803157143, 854078044, 169185932,\n            126833083, 169765394, 177023887, 382033702, 534393311, 426718093,\n            655907919, 603617051, 423890693, 187606437, 396843701, 154402857,\n            285894412, 853007638, 814257351, 164096517, 115263279, 652047690,\n            241228250, 634709117, 849127301, 291115295, 59755515, 506144251,\n            734488252, 13296\n          ],\n          \"cutValueData\": [\n            66, -71, -54, -8, 66, -83, -29, 30, 66, -75, -45, -2, 66, -124,\n            -110, -108, 66, -66, 4, 6, 66, -66, -103, 7, 66, -115, -105, -115,\n            66, 125, 117, -9, 66, -65, -57, 123, 66, -114, 113, -18, 66, -103,\n            -11, 54, 66, -65, 86, -110, 66, -115, -50, -22, 66, 97, 17, 26, 66,\n            -75, 53, 49, 66, 124, -111, -58, 66, -107, 127, -60, 66, 127, 48,\n            42, 66, 107, 101, 127, 66, -106, 123, -91, 66, -90, -70, -109, 66,\n            75, -62, 99, 66, 115, 87, -100, 66, -121, 115, 100, 66, -92, -128,\n            -56, 66, -122, -64, 38, 66, -85, 121, -28, 66, -112, -35, 117, 66,\n            114, 55, -93, 66, -106, -97, 16, 66, 122, 81, 7, 66, 99, -94, 5, 66,\n            91, 77, -124, 66, 104, -80, -112, 66, 104, -114, 84, 66, -115, 74,\n            25, 66, -61, -57, -118, 66, -117, -90, 89, 66, 113, 23, -73, 66,\n            -88, 121, -125, 66, 84, 54, 24, 66, -110, -36, 78, 66, -94, 18,\n            -106, 66, -96, -99, 9, 66, -108, -41, -126, 66, 114, 16, 89, 66,\n            103, -5, -128, 66, -78, 38, 16, 66, -128, -119, 18, 66, -101, -61,\n            13, 66, -87, 28, 89, 66, 88, 114, -81, 66, -120, -60, 50, 66, -94,\n            -66, 47, 66, -94, 47, -58, 66, 95, -8, -90, 66, -104, -11, -55, 66,\n            121, -11, 51, 66, -109, -92, -126, 66, -105, 79, -27, 66, -61, -69,\n            115, 66, 71, 22, 56, 66, -75, 48, 93, 66, -99, 36, 30, 66, -78, -78,\n            114, 66, 117, -26, -78, 66, -117, -60, -123, 66, -98, 105, 7, 66,\n            -83, -96, 33, 66, 108, 21, 23, 66, 95, 49, 104, 66, 109, 69, -79,\n            66, -108, -40, 123, 66, 115, 51, 114, 66, -128, 8, -101, 66, -99,\n            -52, -5, 66, -121, 94, -115, 66, -91, -53, -71, 66, -105, 47, -25,\n            66, -88, 46, -116, 66, -124, 67, -30, 66, 106, 116, -87, 66, 88, 96,\n            34, 66, -121, -117, 122, 66, -102, -101, 6, 66, 80, -127, -93, 66,\n            -82, -94, 22, 66, -61, 116, 0, 66, -101, 88, -56, 66, 116, 11, -104,\n            66, -87, 96, -32, 66, -124, 1, 47, 66, 107, -11, 87, 66, -97, 15,\n            20, 66, -101, -45, -128, 66, -96, -11, 4, 66, -122, 81, 99, 66,\n            -122, -21, -97, 66, -71, 52, -77, 66, -88, 108, -95, 66, -109, 57,\n            -7, 66, -110, -16, 97, 66, 126, -86, 70, 66, -77, -80, -18, 66,\n            -114, 112, -120, 66, -92, -120, 96, 66, -122, -122, 67, 66, 127, 64,\n            86, 66, -90, 91, 76, 66, -125, 7, -109, 66, -73, 30, -110, 66, -63,\n            2, 105, 66, -86, 80, 71, 66, -102, -19, -57, 66, -98, -39, 28, 66,\n            125, -1, 34, 66, 104, 15, -42, 66, -103, -76, -117, 66, -92, -103,\n            -32, 66, 92, -108, 81, 66, -110, 17, -88, 66, 104, 73, 5, 66, -90,\n            13, 57, 66, 105, 3, 35, 66, 124, 49, -13, 66, -101, -81, 106, 66,\n            -111, 99, 58, 66, -120, 20, -62, 66, -107, 83, -114, 66, -88, -30,\n            -98, 66, -104, 120, 109, 66, 102, 9, -9, 66, -87, -125, 80, 66, -92,\n            -117, 20, 66, 108, 36, 123, 66, -67, -44, -3, 66, -95, -30, -106,\n            66, -111, -87, -126, 66, 94, -99, 45, 66, -91, -116, 127, 66, -114,\n            -8, 127, 66, -102, -83, -46, 66, -101, -93, -57, 66, -97, -17, -64,\n            66, 110, 112, -44, 66, -98, 113, -103, 66, -105, -76, 18, 66, -63,\n            108, -97, 66, -102, 38, 73, 66, -87, 108, -53, 66, -103, 52, -128,\n            66, -92, 32, 111, 66, -103, -98, -45, 66, -115, 14, -3, 66, 123,\n            -109, -82, 66, 71, 104, -36, 66, 110, -57, -59, 66, -103, -63, -99,\n            66, -83, -60, -82, 66, -92, -76, 88, 66, -94, 57, 113, 66, -122,\n            -85, 106, 66, -68, 30, 95, 66, 126, 37, 88, 66, 125, -47, 29, 66,\n            -116, 89, 96, 66, -88, -111, 68, 66, 121, 119, 124, 66, -92, 85, 27,\n            66, -97, 77, -75, 66, -115, -107, -34, 66, -106, 59, 74, 66, -118,\n            -80, 80, 66, -127, -95, 70, 66, -127, -26, 86, 66, -70, -114, -15,\n            66, -93, -11, 11, 66, -99, -30, 0, 66, -101, -80, 25, 66, -96, 8,\n            57, 66, 86, -100, -126, 66, 116, -76, -48, 66, -100, 35, 19, 66,\n            -124, -23, 64, 66, -98, 68, 46, 66, 115, -126, -92, 66, -79, -110,\n            44, 66, -120, 102, -26, 66, -89, 69, 44, 66, -65, 38, 72, 66, -125,\n            75, 34, 66, -104, 53, 36, 66, -111, -53, -78, 66, -95, -11, 114, 66,\n            118, 20, -102, 66, -101, 3, 72, 66, -97, 114, -79, 66, 84, -120,\n            125, 66, 80, 29, -116, 66, -125, -68, 0, 66, -125, -3, 82, 66, 112,\n            -83, -85, 66, 114, 53, 81, 66, 111, -100, -118, 66, -99, -61, 0, 66,\n            -127, 85, 50, 66, -125, 65, 75, 66, -106, 45, 8, 66, -118, 120, 104,\n            66, 111, -83, -26, 66, -120, -79, -11, 66, -107, -11, 29, 66, -104,\n            78, -43, 66, -85, -101, 92, 66, -89, 93, -19, 66, -104, -6, -25, 66,\n            -111, -113, -67, 66, -102, 120, 0, 66, -116, -2, 102, 66, 110, 39,\n            83, 66, -121, 84, 28, 66, -105, 52, -72, 66, -123, 50, -10, 66,\n            -120, 83, -67, 66, -93, 3, 110, 66, 97, -43, 62, 66, 118, -74, 42,\n            66, 111, -113, 63, 66, -79, -32, -49, 66, 94, 1, -47, 66, 88, 23,\n            -53, 66, -112, -127, -13, 66, -105, -5, -89, 66, -83, 19, -62, 66,\n            -64, -74, 85, 66, -118, 54, 45, 66, -86, -88, -11, 66, 69, -86, 104,\n            66, 114, 20, 52, 66, -85, 15, 63, 66, -71, -83, 8, 66, -84, -110,\n            102, 66, -116, -125, -77, 66, -91, -35, -6, 66, -110, 2, -83, 66,\n            -116, 78, 39, 66, -102, -31, -19, 66, 95, 99, 121, 66, -88, -104,\n            -67, 66, -103, -80, 90, 66, -124, 23, -71, 66, -106, 102, 86, 66,\n            -99, -84, -102, 66, -109, 93, 9, 66, -83, -47, -94\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 461832191, 418676726, 162175476, 384891898, 675263187,\n            680187224, 123221008, 604112001, 2049\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1000859551, 682065855, 552189678, 373257974, 548345042,\n            59130747, 350560777, 542249088, 1408\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -8285893777058293760,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 81583742, 385386586, 363936474, 878815858, 242833911,\n            80808377, 346646609, 116776262, 464958554, 106614201, 441779250,\n            201190265, 652076871, 338423675, 313904821, 391588830, 669161562,\n            330627649, 596832195, 372463395, 374411119, 452578515, 511408254,\n            523175133, 716953662, 448388817, 311499591, 1033932471, 65773007,\n            236796129, 35726673, 120187742, 1006204994, 500991723, 573783925,\n            712718303, 225549543, 590130523, 257993027, 65874550, 469166766,\n            246363898, 19\n          ],\n          \"cutValueData\": [\n            66, -73, 80, 45, 66, 72, 77, 120, 66, -127, 121, 127, 66, -86, 43,\n            -31, 66, 84, 1, 117, 66, 91, -15, -116, 66, -97, -32, 124, 66, 103,\n            -122, 6, 66, 69, -17, 84, 66, -94, -14, -100, 66, -123, -99, -41,\n            66, 103, -33, 97, 66, -119, 7, 60, 66, -105, -105, 22, 66, 107, -41,\n            -70, 66, -78, -42, -94, 66, -88, -126, 108, 66, 109, 65, -128, 66,\n            -90, 61, 70, 66, -63, 93, -127, 66, -96, -33, -128, 66, -113, 97,\n            -17, 66, -81, 107, 5, 66, -69, 55, 75, 66, -74, -94, 6, 66, -87, 32,\n            -106, 66, 99, 79, -7, 66, -96, -7, 110, 66, 96, 0, -85, 66, 81, 51,\n            14, 66, 100, -59, 120, 66, 120, -86, 93, 66, -67, -119, -28, 66,\n            -87, 52, -25, 66, -94, 96, 41, 66, 89, -30, -45, 66, -82, -76, 25,\n            66, 127, -112, -107, 66, -110, 89, 15, 66, -109, -37, 109, 66, -80,\n            -65, 100, 66, -65, -58, -112, 66, -103, -65, -30, 66, -122, 46,\n            -108, 66, -79, 32, -107, 66, -118, -29, -120, 66, -97, 34, -63, 66,\n            94, 61, 114, 66, -104, -12, 88, 66, 89, -64, 57, 66, -111, -56, 110,\n            66, 126, 28, 6, 66, -84, -42, 106, 66, -77, -10, 54, 66, -77, -43,\n            -94, 66, -97, 125, 69, 66, -104, -122, -77, 66, 123, 56, 90, 66,\n            101, 9, -7, 66, -120, 5, -16, 66, 99, -65, 63, 66, 79, -128, 78, 66,\n            -121, 124, -14, 66, -75, 49, -109, 66, -124, -35, -79, 66, 124, -94,\n            -41, 66, -120, 38, -69, 66, -108, -98, 121, 66, 77, -3, 65, 66,\n            -120, -74, 49, 66, 93, 86, -66, 66, -116, -63, 52, 66, -118, -107,\n            71, 66, 122, -97, 114, 66, -111, -11, -124, 66, -114, -10, -87, 66,\n            -102, 28, 29, 66, -108, -105, -111, 66, 113, 101, -18, 66, -107,\n            114, 103, 66, -116, -11, 101, 66, -80, -32, -13, 66, -85, -78, -87,\n            66, -110, -112, 13, 66, 122, -16, 74, 66, 125, -76, 16, 66, -111,\n            72, -118, 66, -90, -18, -4, 66, -126, -77, -119, 66, 126, 5, -38,\n            66, -95, -63, 81, 66, -115, 72, -32, 66, -111, -3, -92, 66, -90,\n            100, 51, 66, -107, 57, 59, 66, 98, 20, -32, 66, 68, -102, -60, 66,\n            -102, -38, 95, 66, -83, -97, 73, 66, 94, -24, 53, 66, -117, -67, 47,\n            66, -69, 114, -17, 66, -115, 117, -92, 66, -116, 3, -35, 66, 109,\n            -97, -43, 66, -119, -59, -119, 66, 101, -18, 28, 66, -109, -115, 61,\n            66, -101, -95, -95, 66, -112, 127, 104, 66, -112, -2, -85, 66, -78,\n            -54, -78, 66, -99, -30, -81, 66, -106, -85, 92, 66, -68, -109, 72,\n            66, -107, 125, 68, 66, -102, -92, -64, 66, -109, 87, -99, 66, -94,\n            -7, -10, 66, -80, 110, -109, 66, 122, -33, 109, 66, -77, 0, -108,\n            66, -114, 124, 68, 66, -72, -51, 46, 66, 81, -63, -50, 66, 127, -68,\n            34, 66, -109, -100, 0, 66, 115, 66, -77, 66, -100, 87, -113, 66,\n            -71, -114, -107, 66, -88, -24, 119, 66, -111, 41, 94, 66, -116, 48,\n            115, 66, -106, 57, -55, 66, 124, 122, -31, 66, -111, -115, 70, 66,\n            76, -76, -42, 66, -83, 85, 36, 66, -98, 82, 117, 66, -85, 69, -25,\n            66, 103, -70, -17, 66, 124, -51, 46, 66, -125, -46, -117, 66, 126,\n            -25, 95, 66, -102, 70, 9, 66, -120, -76, 118, 66, -116, -36, 0, 66,\n            -98, 55, 71, 66, -109, 4, -64, 66, 107, 8, 23, 66, -117, -85, 22,\n            66, -85, -38, -91, 66, -124, -79, 62, 66, -91, -30, -30, 66, -93,\n            58, 6, 66, -100, 9, 38, 66, 122, 94, -86, 66, -91, 47, -15, 66, -79,\n            -42, 78, 66, -115, -115, 78, 66, 110, -40, -62, 66, -108, -4, 87,\n            66, -117, -61, 14, 66, 96, 54, 87, 66, -83, -23, -111, 66, 77, 35,\n            -121, 66, -102, -67, 21, 66, -80, -6, -25, 66, -118, -22, 122, 66,\n            -64, -74, 4, 66, -97, -33, -107, 66, -106, -15, -22, 66, -81, -29,\n            122, 66, 107, -89, -98, 66, -121, -85, -58, 66, -109, 3, -44, 66,\n            -115, -13, -52, 66, -73, -91, -76, 66, -121, -31, 10, 66, 71, 112,\n            66, 66, -64, 51, 100, 66, -101, 51, -76, 66, -72, -85, 44, 66, 126,\n            33, -127, 66, 82, 98, -87, 66, 88, -82, 107, 66, -112, -29, -94, 66,\n            -92, 47, -29, 66, 113, 75, -86, 66, -109, 119, -90, 66, 118, 26,\n            -24, 66, 111, 81, -37, 66, -108, -92, 18, 66, -96, 75, 18, 66, 121,\n            48, 73, 66, -98, 70, -12, 66, 105, -90, 73, 66, -97, -20, -81, 66,\n            -116, -100, 34, 66, -123, 20, 32, 66, -89, 12, -36, 66, -122, -26,\n            48, 66, -90, 118, 86, 66, 112, 122, 9, 66, -127, -5, -3, 66, 95,\n            -128, 71, 66, 99, 0, -88, 66, -121, 99, -91, 66, 123, 31, 11, 66,\n            -110, 92, 46, 66, 112, -41, -72, 66, 117, 43, 76, 66, -66, 13, -122,\n            66, -126, 4, 72, 66, -123, 101, 95, 66, 122, -99, -122, 66, -128,\n            -59, -67, 66, 99, -83, 103, 66, -120, 32, -39, 66, -89, -103, -58,\n            66, -92, 22, 98, 66, -86, 24, -69, 66, -79, -94, 49, 66, -100, 114,\n            -19, 66, -117, -44, -92, 66, -119, 15, -63, 66, -98, 96, -19, 66,\n            107, -47, 35, 66, 127, 11, 20, 66, -112, 103, -20, 66, -64, -121,\n            78, 66, 111, -110, -118, 66, -86, -5, -37, 66, 110, -60, 1, 66,\n            -112, -109, -120, 66, -121, -7, -10, 66, -95, -53, -125, 66, 121,\n            44, 6, 66, -104, -70, 100, 66, -123, -82, -22, 66, 121, 10, -128,\n            66, -102, -123, -96, 66, 93, -9, -13, 66, -104, 25, 125, 66, -122,\n            -46, -128, 66, -92, 32, 89, 66, -99, 32, -104, 66, -74, 111, 56, 66,\n            -78, -107, 25, 66, 126, -88, -76, 66, -78, 69, -91, 66, -79, 34,\n            -58, 66, 105, -61, 79, 0, 0, 0, 0, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 770049260, 731794148, 1033121272, 583495964, 985223492,\n            769498052, 767688749, 710470975, 768443963, 1142569354, 581750635,\n            1118424983, 759694936, 364\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 774781901, 725409556, 1026146200, 583515404, 1027804378,\n            585973570, 769322195, 710352553, 755176892, 712405651, 595558466,\n            982900942, 970683632, 365\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 3042871460721369999,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 818563458, 267024833, 592483417, 601842370, 638545174,\n            358713604, 524252847, 83682238, 485162116, 232785329, 69940461,\n            649755178, 585897883, 531552002, 166221134, 527594082, 192270582,\n            306093898, 577226065, 540259506, 700193265, 356647355, 12031430,\n            728793122, 694852009, 262245903, 843596849, 129431883, 573061619,\n            423118739, 850836800, 514586036, 834951447, 477313838, 611354023,\n            586263186, 417820913, 148138274, 199159551, 242168422, 628870152,\n            257972368, 8336\n          ],\n          \"cutValueData\": [\n            66, 127, -95, 61, 66, -103, 42, 93, 66, -83, -123, 81, 66, 93, 76,\n            25, 66, -122, -4, -51, 66, 92, -89, -23, 66, 81, 119, 81, 66, -103,\n            5, 33, 66, -65, 118, 108, 66, -123, 3, 73, 66, -106, 64, -123, 66,\n            -100, 82, -101, 66, -83, 112, -119, 66, -107, -92, 3, 66, -110, 73,\n            36, 66, -91, -100, 22, 66, 122, -66, 107, 66, -99, 116, 80, 66, 121,\n            -92, 70, 66, -111, 123, -117, 66, -97, -51, -91, 66, -89, 30, -8,\n            66, 120, 80, 7, 66, -122, -37, -73, 66, -71, 77, 64, 66, -106, 80,\n            24, 66, 102, 118, 103, 66, -66, -57, -126, 66, 87, -61, 19, 66, 95,\n            97, -48, 66, -86, 5, -27, 66, -118, -114, -17, 66, 80, -57, 25, 66,\n            -60, 111, 102, 66, -93, -66, 98, 66, -69, -100, 108, 66, -102, 50,\n            -126, 66, -76, 23, 20, 66, -80, -49, 68, 66, 82, 93, -72, 66, -124,\n            -108, 99, 66, -120, 56, 52, 66, -90, 63, -114, 66, 125, 75, -48, 66,\n            112, 82, 23, 66, -120, -4, 107, 66, -116, 14, -17, 66, 127, 86, 18,\n            66, -68, -80, -57, 66, 117, 67, -41, 66, -96, 27, 109, 66, 92, 16,\n            -81, 66, -123, 47, 74, 66, -86, 10, -68, 66, 126, 71, 122, 66, -112,\n            -81, -96, 66, -123, -29, 6, 66, 101, 103, -56, 66, 80, -69, -43, 66,\n            89, 23, -89, 66, -83, 36, -69, 66, 96, 41, 84, 66, -106, -32, 17,\n            66, -118, 34, 106, 66, -113, 29, 22, 66, -90, -59, -13, 66, -115,\n            -84, -35, 66, 127, -15, 10, 66, -81, 2, 20, 66, -89, 22, 79, 66,\n            -83, 40, -3, 66, -127, 19, 84, 66, -85, 47, 117, 66, -90, 89, -84,\n            66, -112, 117, -103, 66, -114, -115, -11, 66, -108, -78, 44, 66,\n            -117, -117, 6, 66, -64, 18, 69, 66, -68, -86, -13, 66, -88, 84, 65,\n            66, -126, 47, 125, 66, -74, -58, -33, 66, 78, -105, 44, 66, 112,\n            -109, 57, 66, 117, 1, -71, 66, 85, 5, 107, 66, 74, -43, 69, 66, 117,\n            -58, 93, 66, -96, -128, 7, 66, -65, 25, -23, 66, -126, -81, -51, 66,\n            82, -59, -32, 66, 122, 38, 78, 66, -124, -62, -43, 66, -98, -77,\n            -119, 66, 100, 103, 24, 66, -104, -54, 96, 66, -80, 74, -63, 66,\n            -86, 33, -76, 66, 118, 63, 101, 66, -108, -87, -72, 66, -111, -24,\n            78, 66, 110, 74, 47, 66, -99, 27, 33, 66, -94, 37, 121, 66, -111,\n            -4, -46, 66, -123, 73, -107, 66, 87, -127, -63, 66, -117, 47, 72,\n            66, -95, -84, -8, 66, 124, 18, -111, 66, -120, -71, -32, 66, 94, -1,\n            -2, 66, 124, 57, 31, 66, -96, -81, 93, 66, 110, 9, -16, 66, -101,\n            107, -45, 66, 91, -111, 15, 66, -82, 90, -80, 66, -127, -98, 41, 66,\n            -111, -80, 74, 66, -93, 96, 115, 66, -98, -33, 64, 66, -95, -62,\n            -43, 66, 107, -60, 30, 66, -71, -67, -102, 66, 89, -46, -84, 66,\n            119, -81, 3, 66, 85, 22, -16, 66, -96, 30, -7, 66, -81, -66, -67,\n            66, 113, -39, 62, 66, -79, -74, -96, 66, -95, 51, 29, 66, 108, -36,\n            -4, 66, 95, -35, 46, 66, -116, -57, 106, 66, 108, 37, 39, 66, -81,\n            -34, -33, 66, -110, 69, 117, 66, -107, -80, 82, 66, -75, -76, -59,\n            66, -107, 91, 52, 66, -113, 49, 59, 66, -81, -86, -79, 66, -71, -83,\n            31, 66, 100, 9, 9, 66, 120, 105, 13, 66, -64, 47, 5, 66, -84, 113,\n            107, 66, -85, 15, -22, 66, -65, 48, 36, 66, -93, -62, -77, 66, 92,\n            19, -27, 66, -128, -117, 84, 66, -93, -48, 110, 66, 118, 30, -67,\n            66, -106, -30, -24, 66, 112, -45, -63, 66, -100, 114, -97, 66, -114,\n            93, -110, 66, -96, 35, -71, 66, -92, 45, -44, 66, -85, 111, -71, 66,\n            -63, -75, 16, 66, -124, -88, 41, 66, -119, -47, -31, 66, -65, -10,\n            7, 66, 98, -33, 107, 66, -116, -10, -81, 66, -114, -45, 76, 66, -95,\n            -76, 59, 66, -69, -88, -8, 66, -79, -79, 74, 66, 82, 47, -121, 66,\n            -97, 71, 72, 66, -84, -14, 59, 66, -112, 12, 69, 66, -59, -34, -126,\n            66, 114, 104, 50, 66, -100, 63, 120, 66, -116, -69, 119, 66, -63,\n            -34, 62, 66, 110, 127, 111, 66, -109, -8, -81, 66, -114, -108, -116,\n            66, 122, 68, -2, 66, -128, 67, 86, 66, -91, -33, -6, 66, -128, 80,\n            80, 66, 123, 45, 19, 66, -117, 115, 13, 66, -81, 60, -24, 66, -123,\n            107, 19, 66, -104, 26, -73, 66, 109, -83, -61, 66, -114, -123, -75,\n            66, 105, 37, -69, 66, -94, -51, 112, 66, -103, -124, -86, 66, -103,\n            105, 95, 66, 121, 23, 98, 66, -123, -63, -87, 66, 114, -72, 64, 66,\n            -82, -58, 71, 66, -77, -70, 74, 66, -121, 3, 116, 66, -123, 102,\n            -87, 66, 116, -10, 18, 66, -117, -87, 19, 66, -100, 20, 25, 66, 110,\n            -108, -33, 66, 92, -2, 91, 66, 107, 123, 114, 66, -122, -76, 88, 66,\n            -71, -92, 62, 66, 105, 75, 7, 66, -95, 75, -87, 66, -116, -75, 76,\n            66, -109, 13, -100, 66, 123, 113, -58, 66, 124, -29, 26, 66, -112,\n            -87, 30, 66, -78, 46, 67, 66, -100, -72, 21, 66, 124, 86, 69, 66,\n            -99, -80, -83, 66, -104, 50, 17, 66, -85, 115, -27, 66, -92, -18,\n            -126, 66, -84, 25, 66, 66, -119, 48, 127, 66, 110, 51, -20, 66,\n            -100, 42, -119, 66, -102, -119, -50, 66, -113, 80, 110, 66, -95, 95,\n            -48, 66, -113, -27, -21, 66, -111, 15, -82, 66, 87, -96, 123, 66,\n            -112, 18, -99, 66, 125, -126, 118, 66, -97, 111, 12, 66, -89, 103,\n            74, 66, -71, 59, 51, 66, -99, 44, 4, 66, 119, 110, -4, 66, -119,\n            -38, -29, 66, -83, -56, -56, 66, -83, -26, 15, 66, -77, -124, 70,\n            66, -101, 90, -72, 66, 70, 12, -55, 66, -107, 111, -118\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 263979007, 238943482, 861173295, 782356496, 69879311,\n            5211467, 664087149, 20185088, 2\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 534241023, 993588829, 868644511, 78510832, 310362411,\n            41550898, 800049877, 206995, 280\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 3094212984032582080,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 305507652, 699820696, 805367403, 391631488, 572779008,\n            767723353, 144957048, 392687978, 230679448, 419884415, 861636011,\n            876161820, 291679656, 55783162, 171001627, 2687080, 460052471,\n            496706844, 592608466, 84448833, 401075265, 68391469, 527969991,\n            191298208, 691434969, 590001468, 754500652, 744342113, 839885172,\n            639197907, 485833829, 638708915, 710264857, 838079986, 412540093,\n            145864818, 402939544, 130866749, 500538829, 541498500, 66971836,\n            755679140, 5712\n          ],\n          \"cutValueData\": [\n            66, 111, -72, 91, 66, -94, -29, -117, 66, 74, 72, -70, 66, 117,\n            -120, -73, 66, 88, 12, 18, 66, -110, 19, -79, 66, 111, 124, 12, 66,\n            -86, -81, 7, 66, 69, 32, 2, 66, 110, 126, 103, 66, -118, -2, -4, 66,\n            69, 99, -38, 66, 89, -124, 41, 66, -75, 66, 19, 66, -123, -30, 30,\n            66, -91, 78, 104, 66, -82, -86, -115, 66, -90, 92, 98, 66, -79, 127,\n            9, 66, -91, -52, -108, 66, 90, 123, -111, 66, 83, -18, -40, 66,\n            -108, -1, -15, 66, -113, 112, -69, 66, -79, -109, -32, 66, -100,\n            -128, -11, 66, 74, -97, 72, 66, -125, -120, -21, 66, -78, -33, 82,\n            66, -67, -26, -30, 66, -122, 80, 118, 66, -113, 44, 109, 66, -78,\n            -127, -7, 66, -62, -70, -81, 66, 111, -69, -99, 66, -85, 47, 44, 66,\n            -86, -118, 43, 66, 76, -115, -89, 66, 68, 48, 26, 66, 110, 7, -31,\n            66, -92, 15, -37, 66, -93, -82, 78, 66, 119, -69, 126, 66, -99, -96,\n            43, 66, 95, -96, -43, 66, -64, 92, 62, 66, -110, -67, 83, 66, 109,\n            -122, 9, 66, -124, -91, -96, 66, -92, 5, 36, 66, -91, 117, -64, 66,\n            83, 110, -19, 66, -97, 108, -23, 66, -100, -4, -44, 66, -74, -104,\n            -46, 66, 127, -25, -47, 66, -72, -83, -68, 66, -86, 56, 59, 66, -77,\n            21, 54, 66, 71, 106, 84, 66, -116, 64, -87, 66, -108, -116, 75, 66,\n            -103, -71, 11, 66, -118, -61, 85, 66, 111, -99, 22, 66, 87, 80, -41,\n            66, -117, 91, 19, 66, -62, -119, 68, 66, -109, -86, -70, 66, 126,\n            21, 40, 66, 117, 124, 23, 66, -72, -67, -15, 66, -65, -76, 92, 66,\n            -106, -67, -48, 66, -99, 49, 23, 66, 101, -39, -11, 66, 69, -86,\n            118, 66, -108, -15, 105, 66, -128, -20, -107, 66, 103, -56, 47, 66,\n            95, 86, 19, 66, -70, 63, -11, 66, -68, 60, -50, 66, -109, -21, 127,\n            66, -106, 50, 123, 66, 93, 23, 72, 66, -95, -31, -113, 66, -124, 62,\n            -51, 66, -114, 36, -38, 66, -125, 29, -14, 66, 93, -7, 52, 66, -101,\n            65, -100, 66, -87, 54, 108, 66, -88, -126, 78, 66, -99, -33, 49, 66,\n            -124, -70, 37, 66, -97, 108, -95, 66, -100, -72, -11, 66, -122, -21,\n            31, 66, -120, -115, -71, 66, -90, 103, -36, 66, -127, 116, 119, 66,\n            -96, 27, -68, 66, -117, -105, 3, 66, -125, -15, -32, 66, -101, -13,\n            108, 66, 80, 54, -25, 66, -70, 119, 59, 66, -81, -90, -84, 66, -94,\n            -49, 107, 66, -95, -89, 118, 66, 115, -36, -60, 66, -69, 40, -46,\n            66, -96, -93, -85, 66, -111, 18, 103, 66, -98, 79, 113, 66, 114, 79,\n            -66, 66, -105, 73, -116, 66, -97, -62, 11, 66, -119, 15, 25, 66,\n            -128, 89, 76, 66, 86, 40, -34, 66, -84, -40, 21, 66, -97, 95, 125,\n            66, -122, -122, -121, 66, 117, -94, -35, 66, -124, 0, 34, 66, -99,\n            -69, 86, 66, -69, 35, -7, 66, 111, -17, -43, 66, -79, -12, -60, 66,\n            -75, 51, 86, 66, 93, 20, 117, 66, -105, -74, -123, 66, 116, -17,\n            122, 66, -120, 4, -60, 66, 87, 63, -23, 66, -122, 82, -77, 66, -122,\n            -102, -14, 66, -83, -13, -88, 66, -122, 107, 53, 66, -109, -68, 23,\n            66, -91, -119, 62, 66, -76, -40, 124, 66, -108, -127, -122, 66,\n            -108, -33, 88, 66, -88, 127, 20, 66, -69, -66, -15, 66, -114, 78, 5,\n            66, -93, 7, 22, 66, -71, -77, -68, 66, -85, 124, -121, 66, -82, 77,\n            -5, 66, -126, -88, -111, 66, 88, -105, 22, 66, -116, -7, 21, 66,\n            -112, -127, -92, 66, -125, -77, 38, 66, -110, 41, -39, 66, -86, -88,\n            39, 66, 96, 90, 32, 66, -90, 2, -7, 66, 124, 20, 65, 66, -118, -17,\n            58, 66, -110, 98, -37, 66, -118, 105, -121, 66, -106, -68, -49, 66,\n            82, -75, -54, 66, -93, -75, 106, 66, -105, -98, 33, 66, 87, -47, 22,\n            66, -96, -127, 87, 66, -117, -120, -52, 66, 124, -61, 85, 66, -105,\n            92, -66, 66, -118, 119, 11, 66, -107, 78, 49, 66, -103, 21, -50, 66,\n            107, -99, -29, 66, -113, 25, -10, 66, -99, -117, 98, 66, 126, -15,\n            117, 66, -62, 70, -13, 66, -84, 70, -55, 66, -124, 20, -25, 66, -97,\n            42, -81, 66, -122, -6, 25, 66, 125, 46, -99, 66, 82, 41, -33, 66,\n            101, 109, 103, 66, 76, 23, -115, 66, -99, -4, -26, 66, 105, -121,\n            -55, 66, -107, 83, 41, 66, -125, 40, -88, 66, -107, -101, 21, 66,\n            -119, -55, -47, 66, -105, 46, 57, 66, -80, 101, -34, 66, 126, -65,\n            -67, 66, -67, 93, 33, 66, 104, 74, 94, 66, -127, 100, -17, 66, -63,\n            -78, 6, 66, -84, 124, 64, 66, -103, -118, 36, 66, -118, -72, -57,\n            66, -112, 34, 51, 66, -126, -20, 25, 66, 102, -123, 31, 66, -82, 53,\n            5, 66, -94, 51, -79, 66, 126, 101, -8, 66, -83, -117, 14, 66, -104,\n            -61, -72, 66, -77, -46, -41, 66, -123, 119, -12, 66, -69, -27, -91,\n            66, -74, 117, -51, 66, -126, -14, 27, 66, 115, -82, 86, 66, -121,\n            126, 41, 66, -119, 64, 109, 66, -126, -42, -20, 66, -95, -76, 85,\n            66, -109, 91, 31, 66, -109, -45, -107, 66, -103, -77, -96, 66, 117,\n            -92, -93, 66, -97, 43, -82, 66, -79, -62, 49, 66, -95, -126, -28,\n            66, -116, 83, -18, 66, -92, -101, -51, 66, -110, 34, 85, 66, -90,\n            -104, 0, 66, -85, -123, -8, 66, -115, 33, -124, 66, 95, 74, 50, 66,\n            -82, 35, 27, 66, -123, -17, 18, 66, -128, 31, -25, 66, -122, -46, 5,\n            66, -78, 52, 86, 66, 116, 52, 40, 66, -67, 33, 31, 66, -117, 17,\n            -13, 66, -69, -82, -3, 66, 117, -68, 42, 66, -90, 127, 120, 66, -77,\n            95, 92, 66, 99, -121, 55, 66, -82, 38, 36, 66, 105, 89, 5, 66, -119,\n            22, 59\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 536700159, 253532396, 468991997, 584964882, 162396161,\n            597516692, 341248000, 575881263, 39\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 50298335, 25041377, 534523391, 917365552, 428847705,\n            748233233, 961896194, 805407828, 20\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 8379111172481454749,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 389195638, 312558508, 73024403, 187600000, 191590157,\n            246298069, 715011560, 137214802, 266620403, 352753290, 628494987,\n            574451625, 731148569, 509361556, 77737387, 860768503, 597147906,\n            2497940, 413711482, 27677131, 28837294, 606762722, 424970127,\n            378199207, 52457400, 827644286, 79953675, 485158991, 55700526,\n            881841309, 349160806, 137759188, 69520163, 728916894, 538604482,\n            254972222, 365779964, 186739176, 628734895, 715013232, 367189606,\n            291204402, 29594\n          ],\n          \"cutValueData\": [\n            66, -100, -36, -104, 66, -99, 74, -116, 66, -113, 28, 111, 66, 94,\n            -22, 51, 66, -77, 21, -45, 66, -83, 118, -13, 66, -59, 66, -39, 66,\n            -115, -5, 22, 66, -74, -70, -91, 66, -126, 66, 86, 66, -128, -113,\n            -60, 66, 76, -115, 17, 66, -109, 64, -90, 66, 76, 10, 69, 66, 78,\n            23, 77, 66, 84, 66, -108, 66, -105, 86, 86, 66, 76, -109, -58, 66,\n            89, 57, -43, 66, -69, 73, -2, 66, -114, -102, 66, 66, -90, -50, 41,\n            66, 99, -40, -18, 66, -128, 76, 30, 66, -87, 92, 75, 66, -71, 118,\n            96, 66, -79, -70, -29, 66, 76, 77, -1, 66, -120, -104, -77, 66, -87,\n            -114, 120, 66, 97, -1, -59, 66, -85, 59, 107, 66, 121, 59, -8, 66,\n            -125, -4, 117, 66, -72, -18, 72, 66, -115, -70, -119, 66, -127, 19,\n            -73, 66, 123, -128, 124, 66, -95, -64, -8, 66, 121, 48, 22, 66,\n            -108, -11, 33, 66, -100, 97, -90, 66, -109, -99, -65, 66, 113, -27,\n            -54, 66, 104, 106, 10, 66, -121, 92, -128, 66, 96, 46, 6, 66, -102,\n            35, 7, 66, -112, -110, 33, 66, 85, -45, -32, 66, 117, -75, -80, 66,\n            -112, -18, 82, 66, -128, -60, 112, 66, -106, 39, 47, 66, -119, -79,\n            -91, 66, 126, -122, 106, 66, -108, 29, 22, 66, 97, 6, 82, 66, -87,\n            100, 75, 66, 97, 95, 57, 66, -62, -102, 78, 66, -125, -106, -42, 66,\n            -124, -41, -10, 66, -124, 80, 51, 66, 123, 74, -28, 66, -97, 42, 40,\n            66, -64, 63, 0, 66, -128, -18, -1, 66, -64, -33, -66, 66, -86, -19,\n            71, 66, -122, -80, -115, 66, -127, 29, -105, 66, -117, -36, 76, 66,\n            -103, -29, 110, 66, -102, -96, 118, 66, 78, 86, -96, 66, -114, 65,\n            -16, 66, 73, 14, -108, 66, -111, -46, 59, 66, 120, 67, -47, 66,\n            -122, 123, 15, 66, -112, -67, 119, 66, -100, -127, 82, 66, -88, -94,\n            -68, 66, 111, -127, 124, 66, -103, 119, 101, 66, -65, -79, -36, 66,\n            -118, 12, 70, 66, -71, -112, -57, 66, -119, -116, -86, 66, -91, -57,\n            -81, 66, 112, -20, 58, 66, 108, 16, -123, 66, 95, -73, 8, 66, -114,\n            -86, -27, 66, 116, -15, -88, 66, -93, 73, -126, 66, 91, -103, 77,\n            66, -118, -81, -32, 66, -118, -9, 42, 66, 117, -32, 27, 66, 120,\n            -123, 57, 66, 107, -1, 77, 66, -79, -126, -104, 66, -101, -85, 12,\n            66, -104, -101, -23, 66, -67, -121, -54, 66, -75, -24, -50, 66, 86,\n            71, -9, 66, -115, 110, -79, 66, -101, -42, 0, 66, -94, 73, -69, 66,\n            -121, 9, -2, 66, 125, 8, 115, 66, 112, 23, -92, 66, -122, 65, 44,\n            66, -122, -16, 102, 66, -111, 101, -58, 66, -89, -62, 79, 66, 127,\n            110, -63, 66, -94, 39, 110, 66, -101, -101, 41, 66, -125, -120, 106,\n            66, -112, -23, -29, 66, -119, 68, -86, 66, 118, -104, -121, 66, 118,\n            -53, -34, 66, 102, -28, -120, 66, -108, 65, 104, 66, 110, 36, -27,\n            66, -87, 102, 56, 66, -92, 37, 43, 66, -74, 93, 76, 66, -100, 102,\n            35, 66, 110, -68, -117, 66, -111, -21, -115, 66, -111, -119, 52, 66,\n            -128, -62, 6, 66, 118, 116, -121, 66, -92, 107, 27, 66, -72, -59,\n            -86, 66, -100, -36, 127, 66, -85, -58, 44, 66, 110, 125, -61, 66,\n            -83, 17, 91, 66, 117, 52, -26, 66, 112, -84, 87, 66, -116, -124, 18,\n            66, 117, 28, -37, 66, 111, 118, 40, 66, -110, -120, -120, 66, -128,\n            82, 107, 66, -123, -43, -30, 66, -100, 22, -72, 66, -100, -75, -87,\n            66, -106, 119, 84, 66, -93, -18, 114, 66, -63, -62, 56, 66, -119,\n            101, -40, 66, -82, -73, 113, 66, -111, 91, 104, 66, 106, -50, 56,\n            66, -112, 12, 122, 66, 72, 78, 34, 66, -103, 107, 109, 66, 102, 73,\n            118, 66, -117, 5, -9, 66, -115, 93, -72, 66, -78, 92, -102, 66, 112,\n            -60, -37, 66, -119, -108, 85, 66, 120, -107, 33, 66, 71, 125, -23,\n            66, -74, -92, 30, 66, -79, -4, -89, 66, -108, 121, 81, 66, -103,\n            115, 104, 66, -128, -21, 114, 66, 108, -42, -73, 66, -105, 29, -58,\n            66, 106, -75, -106, 66, -106, -24, 49, 66, 114, -122, -15, 66, -125,\n            -29, -87, 66, -105, 93, 39, 66, -106, -92, 78, 66, 91, 100, 61, 66,\n            -106, -68, -61, 66, -118, -14, 105, 66, -92, -28, -38, 66, 102,\n            -106, 23, 66, -110, -35, 125, 66, -107, 104, -18, 66, -103, -55,\n            -46, 66, -126, 118, -20, 66, -102, -4, 38, 66, 102, -60, -73, 66,\n            -80, -4, -36, 66, -95, -37, 93, 66, -117, 21, -7, 66, -86, 124, 102,\n            66, -92, 116, -102, 66, -122, 30, -53, 66, -85, 102, -100, 66, -93,\n            -123, 76, 66, -120, 0, 88, 66, -121, 71, -70, 66, -66, -68, -26, 66,\n            -128, 121, 17, 66, -123, -42, 47, 66, -83, -121, 60, 66, -120, -66,\n            26, 66, -95, -51, 53, 66, -83, -43, 102, 66, -87, -56, 37, 66, -84,\n            63, -101, 66, -104, 42, -25, 66, -73, 67, 52, 66, -61, -64, 79, 66,\n            85, -104, 22, 66, 111, -69, 45, 66, -114, -82, -108, 66, -109, 2,\n            -111, 66, -108, -52, 48, 66, -84, 121, -99, 66, -89, -76, -35, 66,\n            123, -76, 117, 66, -64, 8, 36, 66, -80, -114, -94, 66, -125, -29,\n            -64, 66, -91, -69, 87, 66, -89, -115, 76, 66, -111, 26, 69, 66,\n            -120, 110, 30, 66, -108, 115, 126, 66, -89, -2, 87, 66, -91, -65,\n            -109, 66, -71, -22, 69, 66, -121, 90, -70, 66, -107, -82, -22, 66,\n            -98, -44, -9, 66, -92, -72, -105, 66, -117, 4, -122, 66, 102, -114,\n            -78, 66, -123, -99, -60, 66, 115, -93, -68, 66, -125, 101, -53, 66,\n            -116, 15, -57, 66, -98, 11, -88, 66, 98, -54, -67, 66, -85, -127,\n            21, 66, -74, 79, -37, 66, -92, 47, -9, 66, -124, -96, -60, 66, 127,\n            97, -13\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 699400191, 266607598, 524071438, 517151816, 29220877,\n            271356121, 145756769, 230801804, 65\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 783286207, 1072594924, 532522526, 32905591, 27803799,\n            1042288659, 568983905, 415616650, 1543\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -5968712885689118287,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 486694461, 64721605, 185461313, 126593884, 174283160,\n            400548254, 647367783, 845295726, 419418155, 139086039, 748996396,\n            724910844, 710223360, 140803803, 488739332, 152216485, 586379063,\n            803528157, 474182790, 466008553, 815466476, 849697412, 688632335,\n            479168762, 407199663, 364230794, 772460320, 511074611, 736421702,\n            391288059, 882800883, 843102518, 34716885, 507559525, 595511958,\n            752623314, 536247220, 706345695, 118300011, 459173939, 577881444,\n            740575421, 29586\n          ],\n          \"cutValueData\": [\n            66, -98, -25, -117, 66, -69, 81, 120, 66, 103, 9, 1, 66, 79, 2, 21,\n            66, -100, -86, -13, 66, -82, 81, -93, 66, -86, 87, 111, 66, -84, 67,\n            -40, 66, -79, 53, 8, 66, -123, 122, -24, 66, 90, -57, 97, 66, -102,\n            105, 22, 66, 113, 112, 73, 66, -114, 79, 49, 66, 126, -84, 70, 66,\n            -114, -27, 114, 66, -100, 87, 37, 66, -98, -33, 25, 66, -115, 64, 7,\n            66, -123, 28, 102, 66, 125, -8, -64, 66, 75, -80, 69, 66, 116, -90,\n            79, 66, 80, -91, -51, 66, -75, -116, 89, 66, 120, 26, 67, 66, -104,\n            49, 72, 66, 92, 49, 79, 66, -97, -63, 3, 66, -112, 127, 25, 66,\n            -107, -124, -112, 66, -92, -6, -87, 66, -106, -30, -8, 66, 105, 109,\n            83, 66, -72, -47, 19, 66, -74, 77, 34, 66, -108, 58, 35, 66, 115,\n            43, 29, 66, -83, 87, 82, 66, -90, 115, 72, 66, -95, -88, 44, 66,\n            121, 127, 82, 66, -95, -44, -64, 66, -125, 96, 53, 66, -77, 29, 27,\n            66, -113, 96, 74, 66, -98, 91, 105, 66, 117, -85, 97, 66, -112, -19,\n            30, 66, -126, -34, -28, 66, 101, 104, -61, 66, -126, -61, -82, 66,\n            -100, 44, 61, 66, 94, 98, -61, 66, -100, -30, 67, 66, -102, 63, 82,\n            66, 113, -9, -86, 66, -114, -108, 1, 66, 105, 46, -46, 66, 83, -14,\n            2, 66, -95, 59, 35, 66, -106, -29, -43, 66, 111, -50, 79, 66, 93,\n            53, 94, 66, -110, -89, 95, 66, -103, -98, 117, 66, 111, 74, -110,\n            66, -71, -38, 18, 66, 117, -90, 49, 66, -73, 125, 74, 66, 83, -83,\n            104, 66, 83, -22, 101, 66, 100, -4, 93, 66, 81, 59, 28, 66, 110, 63,\n            -13, 66, -85, 21, 83, 66, -118, 6, -120, 66, -100, 24, -18, 66,\n            -103, 65, -19, 66, -79, 110, -35, 66, -109, 106, -77, 66, -91, 0,\n            -119, 66, -100, -12, -33, 66, -89, -9, -11, 66, 116, 53, 54, 66,\n            105, -126, -117, 66, -116, -58, -45, 66, -101, -49, -76, 66, -105,\n            79, -13, 66, -119, -22, 97, 66, -78, -62, -67, 66, -69, 101, 24, 66,\n            111, -105, 36, 66, -102, -30, -127, 66, 93, 119, -16, 66, -75, -83,\n            15, 66, -94, 67, -30, 66, -86, -79, 118, 66, -95, 49, -18, 66, -117,\n            99, 11, 66, -74, -94, 114, 66, -62, 28, 45, 66, 70, 64, -42, 66,\n            123, -126, 32, 66, -119, -99, -60, 66, 116, 9, -100, 66, -114, -81,\n            -28, 66, -76, 82, -13, 66, -115, 84, -103, 66, -102, 119, -31, 66,\n            -87, -103, -19, 66, 117, 116, -73, 66, 106, -47, -7, 66, -121, -124,\n            26, 66, 117, 104, -4, 66, -121, 21, 6, 66, -101, -80, 23, 66, 106,\n            -20, 9, 66, -64, 36, -4, 66, -117, -28, 69, 66, -91, 79, 32, 66, 79,\n            13, 85, 66, -123, 41, -22, 66, 108, 40, 15, 66, 98, -33, 114, 66,\n            -76, -99, -123, 66, 93, 55, 71, 66, 98, 125, -64, 66, -107, 30, 61,\n            66, 121, -111, -58, 66, -88, -5, 79, 66, -65, -4, 40, 66, 101, -39,\n            127, 66, -119, -77, -124, 66, -105, -38, 106, 66, -74, 18, -48, 66,\n            -103, 99, 64, 66, -62, -71, 92, 66, -99, 121, 17, 66, 68, 85, -118,\n            66, -63, -102, -59, 66, -90, -57, 50, 66, -117, 110, -54, 66, -83,\n            60, 107, 66, 110, -60, 1, 66, -110, 72, -32, 66, 119, 2, -17, 66,\n            -89, -73, 15, 66, -106, -80, -44, 66, 80, 57, 68, 66, 115, 0, 122,\n            66, -99, 19, 104, 66, -102, -28, 27, 66, 115, 2, 4, 66, 123, 16,\n            -84, 66, -78, 38, -32, 66, -124, -62, -77, 66, -119, -111, -127, 66,\n            88, -115, 89, 66, -82, -78, -2, 66, 114, 120, 73, 66, -80, -128,\n            -71, 66, -127, 73, -68, 66, -92, 113, 14, 66, -62, -73, 98, 66,\n            -113, 1, 38, 66, -85, 67, 108, 66, -90, 24, -9, 66, -118, 111, 71,\n            66, 122, -64, -10, 66, -107, 5, -76, 66, 90, -81, 77, 66, -90, 123,\n            119, 66, -80, -27, -65, 66, -118, 4, 99, 66, -126, -59, -16, 66,\n            110, -98, -98, 66, -105, -41, 42, 66, -112, -58, 88, 66, -90, -71,\n            110, 66, 123, -111, -112, 66, 79, -107, -99, 66, -102, -36, -43, 66,\n            -100, -31, 44, 66, 108, 43, -100, 66, -94, 107, 114, 66, -128, -52,\n            -76, 66, -126, 69, 122, 66, -85, -106, -104, 66, 125, -43, 57, 66,\n            -75, 120, -102, 66, -102, -38, -39, 66, 86, -12, -36, 66, 115, 59,\n            -114, 66, -125, -30, -39, 66, -108, 32, 60, 66, -121, -108, 117, 66,\n            -119, -43, 56, 66, -122, -19, -44, 66, -124, -124, -97, 66, -127,\n            -125, 69, 66, 107, -27, -24, 66, -104, 85, 63, 66, -121, -13, 28,\n            66, -121, -117, 88, 66, -80, 102, 2, 66, -81, 62, 35, 66, -110, -36,\n            59, 66, -123, -125, 54, 66, 69, 99, 55, 66, 104, -10, 26, 66, -105,\n            102, 10, 66, -101, 7, -61, 66, -103, 67, 29, 66, -77, -48, 69, 66,\n            -117, 106, -106, 66, -112, -99, -83, 66, -116, 32, -121, 66, -107,\n            127, -93, 66, -101, 116, 59, 66, -71, -40, 53, 66, -101, 13, 44, 66,\n            -89, 12, -83, 66, -92, -97, 10, 66, 108, -61, -69, 66, -116, -75,\n            -1, 66, -100, 2, -103, 66, -77, -128, -66, 66, -104, 51, -2, 66,\n            -124, 88, 21, 66, -115, 117, 12, 66, -123, -35, -74, 66, 112, -121,\n            29, 66, 104, 103, 109, 66, -91, -81, -86, 66, -126, 8, -70, 66,\n            -117, 107, -94, 66, 79, 47, -34, 66, -98, -67, -25, 66, -93, 125,\n            -112, 66, -128, 17, -120, 66, -110, 125, 107, 66, -102, -72, 123,\n            66, -101, -108, -15, 66, -112, -124, 90, 66, -91, 52, 35, 66, -85,\n            125, 113, 66, 110, -79, -55, 66, -125, -97, 3, 66, -72, -20, 96, 66,\n            125, -3, -34, 66, -88, 8, 21, 66, -113, -96, -33, 66, 114, 7, -17,\n            66, -96, 80, 108\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1037786111, 940570355, 875282198, 629598939, 753208112,\n            166751039, 69043420, 857944212, 268\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 989558751, 1007615901, 268566028, 363217617, 544743806,\n            569421117, 36571843, 41980064, 2520\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 6987922936086641544,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 393690042, 880352886, 359590950, 332912875, 609455066,\n            359897695, 649702387, 206670581, 1058225711, 106532297, 391857894,\n            779450049, 263108054, 317438891, 1066597970, 582657454, 882284227,\n            723598553, 523832643, 318551635, 488105939, 220825146, 208485375,\n            446417327, 536656307, 303147217, 770792693, 874300963, 1038980577,\n            208129733, 114362154, 848337383, 188442239, 391986131, 234810543,\n            1060019914, 199746991, 313499211, 221189595, 452579125, 395413097,\n            64720111, 18\n          ],\n          \"cutValueData\": [\n            66, -127, 85, -2, 66, -81, 42, 25, 66, 73, -117, 69, 66, 80, 85,\n            -76, 66, -74, -126, 118, 66, -85, 120, -93, 66, -114, 59, -43, 66,\n            -113, 121, -45, 66, -71, -6, 27, 66, -109, 63, -90, 66, -83, 62, 55,\n            66, -85, 3, 4, 66, -75, 100, -24, 66, -94, 39, 48, 66, -109, 48,\n            -59, 66, 88, 27, -26, 66, -106, -92, -35, 66, 93, -42, 26, 66, -92,\n            22, 69, 66, -78, 100, 60, 66, -93, -108, -8, 66, -100, 43, -51, 66,\n            -75, -49, 59, 66, -110, 120, -100, 66, -64, -7, 11, 66, -120, 0,\n            -55, 66, 77, -84, -17, 66, -112, -45, 41, 66, -110, -28, 119, 66,\n            72, -76, 90, 66, -117, 14, -111, 66, -87, 92, 114, 66, -82, 126,\n            -50, 66, -108, -85, 24, 66, -86, -25, 102, 66, 98, 19, -62, 66, 107,\n            -1, -4, 66, -119, -100, 95, 66, 94, 48, 119, 66, 85, 29, -52, 66,\n            92, -13, -25, 66, 119, -95, 110, 66, -75, -60, 11, 66, 92, 121, -38,\n            66, 83, 84, -85, 66, -68, -42, 36, 66, -124, 9, -115, 66, -100, 34,\n            -18, 66, -113, -125, 72, 66, -100, -63, -100, 66, 124, 23, 53, 66,\n            -96, -61, -120, 66, -104, 92, 5, 66, 115, 33, 11, 66, -68, 63, 121,\n            66, -73, 2, -88, 66, -62, 21, 86, 66, 109, -116, 116, 66, -86, 9,\n            112, 66, -112, 106, -90, 66, -118, -27, -104, 66, -88, 5, 106, 66,\n            -107, 32, -114, 66, -72, 21, -37, 66, -72, -48, 123, 66, 116, -128,\n            32, 66, -97, 16, 58, 66, -107, -43, -68, 66, -74, -95, -59, 66,\n            -107, -11, -69, 66, -83, -14, -93, 66, 95, -2, 37, 66, 95, -82, 123,\n            66, -127, 22, -68, 66, -125, 97, 35, 66, -83, -67, -4, 66, -79, -80,\n            83, 66, 72, -1, 61, 66, 73, 52, -52, 66, -105, -118, 79, 66, -114,\n            -72, -29, 66, 81, -3, 58, 66, -104, -51, 10, 66, -74, -99, 59, 66,\n            -62, -9, -106, 66, -70, 96, 25, 66, 88, -82, -117, 66, -82, 123,\n            -53, 66, 72, 33, -73, 66, -123, -98, -120, 66, -65, 64, 16, 66,\n            -123, 60, -8, 66, -87, -94, 63, 66, -123, 24, -81, 66, -127, -23,\n            101, 66, 106, -78, -23, 66, -73, 124, 105, 66, 90, 48, 15, 66, 87,\n            -117, -55, 66, -101, 113, -1, 66, -127, -128, -73, 66, -126, -36,\n            -4, 66, -116, 115, -82, 66, -128, -110, 91, 66, -94, -83, 46, 66,\n            99, -112, 83, 66, -96, 50, 18, 66, 88, 54, 88, 66, -71, 42, -103,\n            66, -85, -48, 76, 66, -67, 9, 104, 66, -62, -6, 89, 66, -114, 121,\n            65, 66, -82, -105, 37, 66, -82, 87, 92, 66, 126, -81, 95, 66, 104,\n            -44, -8, 66, -69, -110, -97, 66, -81, -26, -13, 66, -99, -24, 115,\n            66, -83, -127, 113, 66, 76, 26, 39, 66, 99, -115, -105, 66, -94, 42,\n            -26, 66, -72, -80, -96, 66, 122, -103, 71, 66, 78, -82, 98, 66, 98,\n            31, -82, 66, -124, -28, -126, 66, 119, -87, 114, 66, 125, -76, -43,\n            66, -122, -96, 37, 66, -93, -51, 64, 66, -98, -111, 95, 66, 95, -81,\n            -125, 66, -62, -13, -104, 66, -110, -23, 30, 66, -100, 25, -2, 66,\n            -99, -1, -13, 66, -116, 61, -49, 66, -96, 126, 35, 66, -103, 117,\n            124, 66, 115, -88, -14, 66, -107, 67, -1, 66, -69, 42, 64, 66, -127,\n            -106, 83, 66, -83, -81, 3, 66, -93, -105, -27, 66, -111, 108, 126,\n            66, -127, -100, 14, 66, 120, 110, 33, 66, -115, 115, -48, 66, -83,\n            118, -93, 66, -97, 40, -58, 66, -112, -67, -97, 66, -66, 5, -23, 66,\n            -127, 83, 30, 66, 90, 121, -24, 66, -105, 103, 55, 66, -122, 4, -57,\n            66, -117, 125, 81, 66, -103, 94, -4, 66, -125, -28, 114, 66, -83,\n            116, -44, 66, -108, 68, -128, 66, -85, -57, -84, 66, -112, 99, -62,\n            66, -83, -124, -94, 66, 109, 17, -57, 66, 124, 109, -34, 66, 115,\n            -119, -13, 66, -91, 12, 34, 66, -105, -46, 92, 66, -125, 119, -9,\n            66, -104, -29, 68, 66, -96, 103, 63, 66, 86, 95, 30, 66, -67, 70,\n            36, 66, 112, 105, 14, 66, 98, -50, -8, 66, -118, 67, 85, 66, -102,\n            -126, 78, 66, -102, 58, 92, 66, 106, 40, -84, 66, -112, -27, 119,\n            66, -95, -16, -106, 66, 105, 18, -28, 66, -117, -43, -7, 66, 117,\n            -58, -31, 66, -106, -40, -35, 66, -117, -69, -84, 66, 118, -99, 109,\n            66, -75, -26, 23, 66, -120, 85, -59, 66, -82, 24, -107, 66, -128,\n            -127, 101, 66, -112, -109, 20, 66, 95, 83, 55, 66, -105, 71, 108,\n            66, 78, 100, 30, 66, -102, -90, 13, 66, -103, -106, -22, 66, -100,\n            -5, 103, 66, -115, 30, 109, 66, -95, 122, -98, 66, 100, -15, -33,\n            66, -95, -52, 50, 66, -80, -46, -34, 66, -104, 11, 64, 66, 97, -90,\n            -17, 66, -86, -90, 48, 66, -112, -14, 27, 66, 81, 64, -99, 66, 93,\n            26, -55, 66, -116, -64, -71, 66, 112, 102, -127, 66, 90, 39, 25, 66,\n            -94, -66, -110, 66, 82, 14, 93, 66, -120, -91, 60, 66, -62, -115,\n            -107, 66, -115, -39, 61, 66, -81, -96, 121, 66, -72, 59, -5, 66,\n            -96, 119, 21, 66, -79, 95, 75, 66, -128, 85, 78, 66, -105, -68, -27,\n            66, -70, 63, 109, 66, -87, 126, 114, 66, -94, 71, 85, 66, 124, 19,\n            88, 66, -100, 30, 122, 66, 127, 11, -60, 66, -114, 122, 17, 66, 86,\n            57, 68, 66, 118, 67, -36, 66, -103, -19, 2, 66, 106, -49, 15, 66,\n            -79, 117, -127, 66, 125, -94, 71, 66, 112, -50, 53, 66, -93, 101,\n            -81, 66, -66, 5, 80, 66, -89, -19, -73, 66, -109, 68, -38, 66, -99,\n            -59, -109, 66, -122, -94, -52, 66, 90, -52, -15, 66, -110, 35, 41,\n            66, 126, -59, 99, 66, -85, -116, 106, 66, -101, 44, -66, 0, 0, 0, 0,\n            0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 1118506157, 1156947047, 1160482432, 1028279276,\n            643515962, 629157604, 982986506, 1114225843, 982900966, 1114352689,\n            984730679, 601050685, 710291311, 364\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1104275348, 1140997225, 1155638959, 1031465003,\n            1155685120, 711039613, 1141532270, 1114234586, 1140739159,\n            1112659240, 1026143708, 596092738, 628960423, 391\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 7337502249195313879,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 278250383, 164297688, 27510971, 832928501, 367753154,\n            413299321, 580523608, 124240170, 577698165, 722004443, 540095055,\n            591233598, 642793830, 459130683, 871739875, 720548358, 181778104,\n            196388197, 248232339, 405493197, 525456224, 721989530, 147492756,\n            723832976, 378567684, 689096449, 76587691, 247564083, 593655786,\n            860086032, 164090683, 348600156, 842797911, 582550765, 706011474,\n            37720289, 866771283, 373625313, 400683123, 516801133, 719502795,\n            310574043, 25157\n          ],\n          \"cutValueData\": [\n            66, -65, -64, 23, 66, -110, -76, 94, 66, -64, -52, 94, 66, 87, -46,\n            104, 66, -60, 35, -122, 66, 74, -113, -117, 66, 123, -72, 21, 66,\n            -127, 68, -35, 66, 108, 19, -26, 66, -103, -26, 74, 66, 115, -71,\n            19, 66, -126, -35, 108, 66, 106, 71, 59, 66, -96, -20, -73, 66,\n            -118, 23, -71, 66, -113, 1, 76, 66, 114, 34, 40, 66, 108, -109, -40,\n            66, -92, -23, -63, 66, -117, -105, -68, 66, -63, 106, -55, 66, -70,\n            -65, -85, 66, 88, 55, -17, 66, -113, -14, 97, 66, -100, -23, -27,\n            66, 108, -63, 85, 66, -88, 112, 45, 66, 72, -119, 100, 66, -114,\n            -43, -115, 66, -65, 63, 111, 66, 100, 79, -48, 66, -119, 91, 117,\n            66, -96, 83, 55, 66, -96, 69, -93, 66, 77, 56, -6, 66, -82, 117, 82,\n            66, 85, -60, 33, 66, 80, -105, -39, 66, 96, -1, 15, 66, 89, -127,\n            29, 66, -70, 64, -6, 66, -92, 11, 18, 66, -102, 23, 92, 66, 84, -88,\n            74, 66, -119, 19, -100, 66, -94, 1, -96, 66, -102, -123, 18, 66,\n            -90, 29, 19, 66, -123, 45, -42, 66, -103, 56, -2, 66, 88, -84, -88,\n            66, 112, 89, 81, 66, -80, -53, -104, 66, -104, 46, -72, 66, -94, 52,\n            58, 66, -100, 104, -86, 66, -89, -92, 95, 66, -77, -50, -117, 66,\n            -87, 1, -1, 66, -106, 115, 58, 66, 127, 87, -16, 66, -128, -22, 92,\n            66, -92, 49, -67, 66, -98, 5, 27, 66, 101, -64, -46, 66, 103, 103,\n            -36, 66, -102, -112, 97, 66, 84, 10, 1, 66, -94, -80, -70, 66, 114,\n            7, -105, 66, 86, -116, -71, 66, -122, 54, -119, 66, -110, 57, -118,\n            66, -98, -60, -122, 66, -123, 14, -14, 66, 102, 30, 124, 66, -125,\n            -83, 125, 66, 105, 79, 2, 66, -111, 74, 68, 66, 127, -35, -102, 66,\n            -74, -79, -22, 66, -118, 38, 52, 66, 127, -63, 53, 66, -67, 13, 2,\n            66, -88, -66, 41, 66, -81, -73, -31, 66, -60, 117, 105, 66, -113,\n            -44, 47, 66, -101, -116, -116, 66, -83, -82, 35, 66, -121, 10, -53,\n            66, -121, -22, -111, 66, -99, 5, 96, 66, -84, -13, -22, 66, -102,\n            27, 66, 66, -74, 77, 91, 66, -103, -116, -92, 66, 125, 39, -37, 66,\n            -86, -74, 76, 66, -92, 73, -99, 66, -100, -46, -23, 66, -109, -23,\n            -108, 66, -95, -124, -43, 66, 74, 91, -23, 66, -105, -34, 35, 66,\n            109, 56, -109, 66, 110, -121, -38, 66, -79, 91, 127, 66, -84, 125,\n            80, 66, -104, -97, -122, 66, -109, 92, -114, 66, 115, -107, 91, 66,\n            -108, -85, 69, 66, -120, 0, -84, 66, 80, 79, 58, 66, 98, -58, -115,\n            66, -122, 50, -63, 66, -102, 27, 114, 66, -117, -10, 41, 66, 73, 37,\n            49, 66, 107, -82, 40, 66, -84, -6, 74, 66, 96, 38, -122, 66, -98,\n            98, 41, 66, 87, -1, -37, 66, -127, 93, 55, 66, -103, -49, 22, 66,\n            111, 47, -9, 66, 117, -84, 122, 66, 121, -90, 23, 66, -117, -98, 57,\n            66, 73, -73, 27, 66, -83, 19, 64, 66, -65, 74, -40, 66, -115, -101,\n            -70, 66, -60, -49, 22, 66, -103, 19, -102, 66, 126, 83, 108, 66,\n            116, 1, -128, 66, -84, 118, -37, 66, -112, 52, 102, 66, -72, 17, 19,\n            66, -94, -60, -111, 66, 82, -104, -45, 66, 88, 84, 73, 66, -103, 20,\n            113, 66, -115, -102, -26, 66, -62, 103, 69, 66, -84, 109, 2, 66,\n            -118, -125, 63, 66, -103, -84, 81, 66, -77, 82, 24, 66, -128, 2, 35,\n            66, -102, -106, -12, 66, -114, 110, 123, 66, -90, 73, 59, 66, 119,\n            -25, 114, 66, 120, 90, -47, 66, -116, 21, 34, 66, -92, -108, -91,\n            66, -81, -4, -106, 66, -91, -34, 35, 66, -88, 33, -110, 66, -72,\n            -45, -21, 66, -88, 104, 124, 66, -117, 76, -86, 66, 114, -91, -98,\n            66, -103, -119, -39, 66, -108, 8, -75, 66, -111, 121, -53, 66, -92,\n            -24, 3, 66, 103, 44, -8, 66, -93, -8, 66, 66, -108, -106, -49, 66,\n            -126, -113, -89, 66, -98, -21, 118, 66, 72, 87, -89, 66, -75, -70,\n            -49, 66, 85, -33, 77, 66, -113, 83, 34, 66, -102, -94, 73, 66, -98,\n            -30, 105, 66, -119, -95, -11, 66, -100, -114, -97, 66, 111, -54, 64,\n            66, -109, -10, 81, 66, 95, 71, 78, 66, -81, 53, -120, 66, -85, -14,\n            -49, 66, 114, 15, -107, 66, -119, -102, 41, 66, -121, 6, -73, 66,\n            -114, 73, 53, 66, -105, -80, 102, 66, 103, 28, -5, 66, -68, 85, 81,\n            66, -94, -120, -118, 66, -102, -58, -36, 66, -68, -107, -121, 66,\n            -113, 53, -24, 66, 113, 125, 72, 66, -105, -58, -91, 66, -112, -59,\n            35, 66, 111, 2, -125, 66, -113, 121, -29, 66, -99, 50, 28, 66, 109,\n            -39, 90, 66, -78, -108, 18, 66, 124, 27, 117, 66, -113, 43, 35, 66,\n            -75, -40, 127, 66, -79, -60, -103, 66, 88, -63, 115, 66, -92, 103,\n            -62, 66, -105, -73, -24, 66, -114, -85, 25, 66, -103, 20, 80, 66,\n            109, 72, -117, 66, -76, -76, -63, 66, -65, 71, 41, 66, 117, 62, -31,\n            66, -88, -32, 64, 66, -112, -4, -93, 66, -125, -32, 36, 66, 113,\n            115, -54, 66, 110, -125, -86, 66, -91, -70, -56, 66, -68, 43, 32,\n            66, -106, -91, -86, 66, -85, -73, -57, 66, -97, 104, 14, 66, 94, 3,\n            -36, 66, -120, 82, -108, 66, 101, -65, -91, 66, -92, -55, 115, 66,\n            -109, -125, 57, 66, -95, 118, 53, 66, -76, -44, 119, 66, -122, -33,\n            91, 66, -89, -62, 1, 66, -108, -35, -92, 66, -73, -107, 25, 66, -86,\n            48, 126, 66, -96, -46, 52, 66, -63, 52, -32, 66, 100, 23, 89, 66,\n            113, 60, -2, 66, -106, -74, 52, 66, -121, 102, -26, 66, 102, -86,\n            -104, 66, -85, 61, 35, 66, 116, -120, 107, 66, -70, 3, -48, 66,\n            -110, -117, -87, 66, -101, 23, -41\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 939386367, 654835694, 91089351, 498687794, 864854050,\n            729388756, 118344839, 407388214, 0\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1003356139, 77053855, 7027618, 798016566, 922894498,\n            124295892, 128765699, 8439475, 4105\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 6898202114516460044,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 863063295, 589674055, 581363007, 173357345, 191095977,\n            329975383, 728939570, 212657826, 388742617, 248883253, 909700289,\n            668002619, 372165166, 757951917, 59849802, 708519730, 438754037,\n            44231546, 371001141, 1068796494, 532002809, 82254437, 1010488899,\n            894781143, 653732805, 598273714, 68590131, 1067171527, 396748469,\n            41674871, 328690762, 91043747, 174894957, 73348531, 115422305,\n            119781307, 732867394, 237194410, 329186301, 100118259, 506676843,\n            1063453746, 75\n          ],\n          \"cutValueData\": [\n            66, -99, 99, 45, 66, -123, 70, 69, 66, -86, 107, 31, 66, 72, 99,\n            -112, 66, -120, 92, 60, 66, -83, -97, -16, 66, -102, -114, -5, 66,\n            -101, 114, 122, 66, -64, -106, 1, 66, 104, -25, 74, 66, 86, 122,\n            105, 66, -100, -124, 15, 66, -79, -123, -53, 66, -101, 78, -70, 66,\n            116, 58, 79, 66, -74, 113, 120, 66, -90, 26, 25, 66, -106, 127,\n            -125, 66, -86, -32, 60, 66, 95, 9, 44, 66, -63, -57, 106, 66, -94,\n            -93, -71, 66, -126, 85, 23, 66, 121, 112, -112, 66, -111, -31, 79,\n            66, 110, -41, -33, 66, -103, 65, 127, 66, -91, 93, 101, 66, 109,\n            -127, 51, 66, -124, -43, -15, 66, -84, -49, -88, 66, 110, -113, -95,\n            66, -112, -105, 76, 66, -121, -112, -18, 66, -105, -128, -90, 66,\n            -107, -92, 110, 66, -124, -48, -68, 66, -91, 0, -31, 66, 80, 73,\n            -54, 66, -114, 96, -7, 66, -120, 103, 85, 66, -120, -117, -117, 66,\n            -85, -57, 93, 66, -113, -106, -96, 66, -61, 65, -15, 66, -69, -82,\n            -68, 66, -108, -77, 38, 66, -113, -127, -22, 66, 108, -67, -105, 66,\n            -117, 15, -19, 66, 117, -15, -30, 66, -86, 61, 114, 66, -76, -39,\n            -71, 66, -98, 41, 31, 66, 124, -81, 87, 66, 120, -56, -8, 66, -85,\n            7, 113, 66, -79, 102, -82, 66, -96, -26, -73, 66, -102, 69, -118,\n            66, -112, -48, -95, 66, -94, 17, 84, 66, -89, -118, -42, 66, -107,\n            100, 97, 66, -113, -2, 62, 66, -124, -13, -109, 66, 120, -22, 114,\n            66, -117, 56, -72, 66, 79, 7, -106, 66, -79, 4, -71, 66, -95, -102,\n            18, 66, -94, 58, -47, 66, -124, 83, 22, 66, -106, 114, -48, 66, -99,\n            37, 22, 66, -69, 127, -85, 66, -103, -89, -105, 66, 98, 77, -56, 66,\n            -114, 85, 36, 66, -80, 62, 14, 66, -102, 39, -18, 66, -121, 76, -34,\n            66, -128, -18, 85, 66, -97, 69, 28, 66, -125, 40, -47, 66, -122, 10,\n            -38, 66, -124, 112, -119, 66, -99, 53, 3, 66, -122, 75, 99, 66, 82,\n            126, 88, 66, -96, 32, 65, 66, -67, -54, -55, 66, -101, 52, -78, 66,\n            -97, 1, -77, 66, 94, 54, 29, 66, -105, -36, -34, 66, -109, 111, 110,\n            66, 97, 3, -17, 66, -109, -68, -100, 66, -108, -93, -80, 66, -116,\n            22, 58, 66, -67, 70, -112, 66, -124, 105, 60, 66, -106, 99, 90, 66,\n            -119, -10, -63, 66, 73, 91, -114, 66, 85, -67, -116, 66, -85, 118,\n            -23, 66, -109, 114, 108, 66, -91, -66, 99, 66, -112, 53, -87, 66,\n            -93, -100, 31, 66, -122, -31, 122, 66, 113, 84, -117, 66, -99, -24,\n            41, 66, -72, 84, 105, 66, -96, -128, -28, 66, -122, 81, -69, 66,\n            -109, 58, -14, 66, -110, -15, -48, 66, 96, -125, 40, 66, -112, 73,\n            -72, 66, -105, 97, 119, 66, -123, 34, 48, 66, -105, 32, -118, 66,\n            121, -16, -73, 66, -98, 21, -22, 66, 116, -96, 33, 66, -102, -118,\n            -4, 66, -127, -79, 48, 66, 122, 52, -41, 66, -83, 59, 122, 66, 109,\n            57, -96, 66, -104, -31, 24, 66, -87, 107, -48, 66, -76, 38, 16, 66,\n            -119, 32, 94, 66, -73, -121, 66, 66, 125, -46, -62, 66, -103, -6,\n            -127, 66, -69, -33, 30, 66, -112, 47, 69, 66, -118, -37, 12, 66, 94,\n            92, -15, 66, 115, 59, -126, 66, -127, 87, 47, 66, 105, 50, -50, 66,\n            -114, 51, 30, 66, -100, -46, -87, 66, -118, -66, 121, 66, 110, 93,\n            27, 66, -74, -43, 52, 66, -97, 125, -67, 66, -110, 87, -75, 66, 103,\n            51, 57, 66, -77, 84, 45, 66, -110, -106, 80, 66, -106, 84, 30, 66,\n            -84, -15, 26, 66, -126, 105, 90, 66, -89, 6, -119, 66, -63, -128,\n            -95, 66, -62, -83, 115, 66, -102, -118, 23, 66, -121, -116, 29, 66,\n            127, -32, -112, 66, 115, -92, -115, 66, -62, 65, -83, 66, -98, -117,\n            30, 66, -107, -39, 78, 66, 107, 12, 64, 66, -71, 44, 119, 66, -93,\n            -122, -123, 66, -64, 68, 78, 66, 87, 23, 80, 66, -115, 52, 24, 66,\n            93, 97, -6, 66, -117, 98, 109, 66, 121, 58, 88, 66, -65, -111, 110,\n            66, -95, -57, 20, 66, -90, 119, -30, 66, 76, 46, 64, 66, -92, 11,\n            107, 66, -69, 87, -10, 66, -72, 122, -112, 66, 97, 40, 8, 66, 78,\n            -47, -55, 66, 100, -87, 13, 66, -113, 69, -109, 66, -74, -95, 21,\n            66, 120, 76, 60, 66, -100, -85, 18, 66, -117, 67, 56, 66, -113, -51,\n            88, 66, -76, -76, -42, 66, -72, 16, -51, 66, -89, -22, -74, 66, -74,\n            -45, -11, 66, -99, 79, 115, 66, -112, 118, 0, 66, -97, 100, 7, 66,\n            112, -39, -107, 66, -125, -10, 92, 66, -70, -32, -37, 66, 127, 93,\n            -10, 66, 109, -38, -110, 66, -122, -3, -18, 66, 104, 50, -22, 66,\n            -61, -9, 118, 66, -114, -9, 32, 66, -125, -77, 48, 66, 123, -26, 47,\n            66, -101, 79, -59, 66, -73, -84, 5, 66, 87, -100, -28, 66, 125, 116,\n            4, 66, -103, -5, 122, 66, -87, 42, 74, 66, 93, -43, -73, 66, 92,\n            -29, -101, 66, 97, -20, -100, 66, 109, -35, -102, 66, -102, -56,\n            -27, 66, 115, -82, 119, 66, 85, 117, 62, 66, -120, -2, 36, 66, 86,\n            119, 12, 66, -128, 67, -66, 66, -112, -56, 19, 66, -97, 39, 46, 66,\n            -116, 11, 101, 66, -102, 106, 64, 66, -102, 41, -11, 66, -88, 15,\n            52, 66, -90, -20, -57, 66, -125, -67, -48, 66, 111, -19, 97, 66,\n            102, -91, 53, 66, -93, 21, -24, 66, 123, -37, -58, 66, -77, 121,\n            -108, 66, -104, -109, 84, 66, -113, -120, 91, 66, -96, -61, 122, 66,\n            -85, -5, 105, 66, 100, -104, 5, 66, 85, 84, -89, 66, -80, -125, 33,\n            66, 92, 55, -120, 66, -69, -108, -42, 66, 80, 17, 114, 66, -95, 40,\n            -89, 66, -72, -21, 54, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1114429589, 1147905998, 1104097922, 643900796,\n            772984187, 982901212, 973951262, 1156682156, 597843904, 581218456,\n            600350090, 581159164, 596033707, 1093\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1033119116, 1147674176, 1147826942, 629748716,\n            755505682, 1112040646, 1012208236, 1028073697, 588217679, 624204673,\n            1026507451, 753575767, 595718105, 1175\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -9202437649290432236,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 535719095, 867105787, 151614999, 707219049, 84183276,\n            234971147, 149604553, 642161656, 696657209, 852529586, 886947438,\n            656297559, 492732625, 62060633, 614388041, 56075691, 740370285,\n            885229957, 706316741, 760457038, 384840541, 849947140, 299208006,\n            537546950, 355483385, 830564046, 754257754, 133924414, 160215686,\n            253534916, 486409978, 277181321, 887324940, 396786524, 133542942,\n            296075823, 377299001, 77452291, 642617502, 704455112, 767255632,\n            354700450, 13175\n          ],\n          \"cutValueData\": [\n            66, 70, 121, -124, 66, -98, 70, 107, 66, -88, 117, -54, 66, -120,\n            -83, 12, 66, -124, -32, 87, 66, -74, -30, -51, 66, -78, -9, -19, 66,\n            -119, 47, 85, 66, -114, 11, 24, 66, -95, -42, 30, 66, -120, -38, -8,\n            66, 106, -54, -102, 66, -107, 31, -38, 66, -115, 63, -107, 66, 113,\n            7, -63, 66, -71, -89, -99, 66, 101, -119, 48, 66, -71, -6, -118, 66,\n            -119, -83, -42, 66, -87, -29, 107, 66, -98, -27, 0, 66, 91, 41, -22,\n            66, -93, -4, -99, 66, -124, 70, 59, 66, 69, 95, 112, 66, -114, 15,\n            -18, 66, 94, -62, -56, 66, -113, 76, -93, 66, -124, -95, -114, 66,\n            127, 32, 4, 66, -79, -31, 11, 66, -90, -64, 78, 66, -81, 17, 35, 66,\n            -71, 25, 15, 66, -118, -63, -10, 66, -105, -100, -96, 66, -71, 124,\n            -62, 66, 86, -111, -3, 66, -98, -47, -91, 66, -89, 13, 8, 66, -109,\n            121, -28, 66, 109, -105, 114, 66, -67, 107, 93, 66, -122, 13, -39,\n            66, -93, -69, 27, 66, -75, 17, 124, 66, -93, 18, 58, 66, 101, 72,\n            -72, 66, -71, 75, 53, 66, 124, 66, 77, 66, -116, -3, 114, 66, -111,\n            -16, -122, 66, -125, 4, -15, 66, -92, -83, 15, 66, -69, -95, -110,\n            66, -99, -113, -42, 66, 93, 1, -125, 66, 118, -59, 93, 66, -102, 32,\n            -4, 66, 127, -49, -123, 66, -81, 92, -80, 66, -74, 20, 12, 66, -116,\n            -88, -23, 66, -93, -61, -92, 66, -93, -87, -1, 66, -91, 13, -97, 66,\n            -85, -51, 125, 66, -72, -4, 102, 66, -90, -128, 121, 66, -128, 79,\n            -73, 66, -126, 21, 109, 66, 97, -45, 85, 66, 124, -25, -72, 66,\n            -127, -23, -118, 66, 72, -87, 94, 66, 119, 53, -73, 66, -115, -41,\n            -28, 66, -87, -76, -93, 66, 76, -50, -86, 66, 87, -81, 77, 66, -94,\n            94, 105, 66, -100, -43, 61, 66, -121, -22, -21, 66, 123, -46, -111,\n            66, 101, -93, -85, 66, 127, -99, -48, 66, -128, 22, 101, 66, 113,\n            92, -91, 66, -126, 115, 58, 66, -126, 24, -82, 66, -70, -127, -126,\n            66, -72, -59, -96, 66, -81, 29, 77, 66, -67, -128, 50, 66, -123,\n            -99, 49, 66, -96, 98, 115, 66, 94, 119, 117, 66, -98, 40, 22, 66,\n            -99, 3, -30, 66, 100, 13, -20, 66, -79, -123, -26, 66, -119, 52,\n            112, 66, -97, -1, -122, 66, -93, 115, 25, 66, 102, -88, -68, 66, 81,\n            -65, -10, 66, -113, 73, 93, 66, -125, -29, 35, 66, -74, -12, 101,\n            66, -123, -9, 96, 66, -123, -1, 90, 66, -84, -116, 67, 66, -91, 28,\n            -41, 66, -77, 58, -55, 66, -116, 56, -101, 66, 119, -80, -124, 66,\n            118, 15, -43, 66, -74, -65, 45, 66, -119, -85, 13, 66, -113, 30, 22,\n            66, 82, -5, 110, 66, -87, -28, 0, 66, -93, 119, 3, 66, -86, 98, 127,\n            66, -80, 35, 110, 66, -128, 62, 111, 66, -86, 47, -102, 66, -116,\n            -67, 49, 66, -101, -72, 97, 66, -113, 29, -103, 66, 83, 75, -122,\n            66, -115, -67, -102, 66, -69, -2, 124, 66, 100, -40, -98, 66, 116,\n            -13, -113, 66, -115, -106, 86, 66, -108, 23, -14, 66, -121, -82,\n            -30, 66, -127, -64, -2, 66, -78, 8, 54, 66, -106, -65, 36, 66, -72,\n            -101, 48, 66, -113, 18, -60, 66, -116, 35, -47, 66, -106, -98, -92,\n            66, 99, -57, 94, 66, 120, -71, -43, 66, -84, 47, -53, 66, -98, 117,\n            25, 66, 114, -7, 66, 66, 87, 92, -8, 66, -110, 110, 107, 66, -82,\n            70, -68, 66, -75, 14, 77, 66, -95, 14, 7, 66, 108, 66, -40, 66, -87,\n            120, 100, 66, 82, 91, 95, 66, -121, -101, 62, 66, -109, -65, -127,\n            66, -76, -128, 122, 66, -122, 42, 107, 66, 81, -125, -25, 66, -65,\n            24, 114, 66, 94, 88, 85, 66, -120, 77, -118, 66, -111, -62, -5, 66,\n            -79, -98, -98, 66, -107, 11, -47, 66, 126, -4, -94, 66, 110, -46,\n            -80, 66, 104, 34, -89, 66, 117, -78, 45, 66, -63, -15, -94, 66, 93,\n            -103, 35, 66, -92, 98, 45, 66, -83, 88, 52, 66, -105, -9, -127, 66,\n            -121, -111, 51, 66, -104, 65, 80, 66, 112, 100, 5, 66, 102, 100,\n            107, 66, -107, -45, 112, 66, -123, -48, -73, 66, -102, 16, -115, 66,\n            -106, 54, 84, 66, -72, 0, 16, 66, -101, 35, -117, 66, 125, 59, -103,\n            66, -65, 78, -91, 66, -63, 117, 123, 66, -68, -67, -15, 66, -113,\n            121, -69, 66, -70, 106, -74, 66, 86, 89, 21, 66, -102, -64, -45, 66,\n            -114, -47, 102, 66, 126, 48, -118, 66, 89, -3, -17, 66, -89, 18,\n            -75, 66, -103, 2, 65, 66, -114, -76, -114, 66, -92, 112, -51, 66,\n            122, 25, 124, 66, -94, 79, 91, 66, -102, 125, 100, 66, -64, 82, 98,\n            66, -120, 50, -43, 66, 123, -121, 122, 66, -113, 58, -91, 66, 92,\n            57, 29, 66, -100, 109, 120, 66, 120, -127, -13, 66, -126, 35, 94,\n            66, -89, 13, 85, 66, -95, 47, -96, 66, -114, 89, -12, 66, 76, 73,\n            28, 66, 100, -118, -119, 66, -106, -50, 112, 66, -87, -35, -6, 66,\n            71, 35, -125, 66, -81, -107, -51, 66, -123, 110, -113, 66, -115,\n            -12, -18, 66, -82, -38, -68, 66, -109, 35, -83, 66, -106, -70, 66,\n            66, -122, 34, 105, 66, -118, -86, -106, 66, -126, -124, 123, 66,\n            123, 63, -39, 66, -62, -91, -8, 66, 80, -40, -26, 66, 80, -39, 78,\n            66, -109, 121, 77, 66, 118, 123, -20, 66, 120, 94, -78, 66, 120, 75,\n            2, 66, -86, 96, -9, 66, 96, -83, 16, 66, -109, -122, 37, 66, 72,\n            -112, -110, 66, 111, -51, -39, 66, -124, -77, -37, 66, -81, -27,\n            -29, 66, -128, 32, -68, 66, -89, 84, 78, 66, -90, 62, 48, 66, -96,\n            25, 106, 66, -122, -67, -26, 66, -117, -53, -113, 66, -84, 42, -103,\n            66, -112, -75, -97, 66, 112, 11, 22\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1044373375, 872148407, 22248433, 387200477, 762354131,\n            402689047, 6856100, 311476328, 5154\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 1044381565, 309447863, 326926004, 928185553, 772906643,\n            24468693, 553929732, 186810656, 112\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -3274031270900754220,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 578209893, 257924091, 585725913, 352298682, 1059957473,\n            1068420343, 227716159, 749119150, 514098745, 715069249, 87672902,\n            330159845, 871206710, 132696553, 975129162, 254793854, 128544595,\n            909768015, 52383069, 774183261, 185947346, 257414195, 603028837,\n            1016255798, 911256998, 77956717, 86996186, 580475745, 336965233,\n            866872495, 731348601, 259210925, 463572070, 858477874, 78428495,\n            44030675, 733064953, 719022758, 712485181, 254453550, 846256505,\n            917936994, 581\n          ],\n          \"cutValueData\": [\n            66, -69, 17, 18, 66, -64, -11, 112, 66, -70, -40, -77, 66, -68, -23,\n            -115, 66, -73, 22, -21, 66, -122, -98, -89, 66, -114, 11, -41, 66,\n            -110, -68, -31, 66, 101, 0, 84, 66, -77, 34, -118, 66, 114, 119,\n            -127, 66, -65, -50, 117, 66, -88, -39, -128, 66, -88, 97, 21, 66,\n            -122, 64, 105, 66, 100, -24, 43, 66, 111, -109, -29, 66, -85, 106,\n            118, 66, -97, 19, 82, 66, -75, 96, -89, 66, 126, 92, 103, 66, 69,\n            112, -21, 66, 127, -7, 95, 66, -85, -101, -22, 66, -85, 92, -101,\n            66, -79, 73, -122, 66, -93, -120, 76, 66, -98, 1, -107, 66, -123,\n            102, 54, 66, -122, -43, -19, 66, 100, -117, 55, 66, 109, -37, -28,\n            66, 126, 8, -127, 66, -96, 109, -92, 66, -103, 37, -128, 66, -125,\n            -13, 103, 66, -112, -75, -70, 66, 108, 72, 68, 66, -72, 9, 41, 66,\n            -122, 47, 41, 66, -69, 111, 19, 66, -89, -1, -88, 66, -124, -107,\n            -95, 66, -119, 56, 27, 66, -91, -77, 119, 66, -107, 75, -11, 66,\n            111, 82, 93, 66, -100, 69, -1, 66, 72, -53, -8, 66, -85, 113, 0, 66,\n            -94, -71, 21, 66, -71, 79, 30, 66, -102, -108, -100, 66, -95, 48,\n            -69, 66, -93, -31, 106, 66, -69, 16, -83, 66, -96, -89, -64, 66,\n            -91, 30, -25, 66, -72, -83, -60, 66, 114, 67, -43, 66, -79, -81,\n            121, 66, 123, 112, 80, 66, -95, -112, 66, 66, 73, -18, 21, 66, -112,\n            10, -128, 66, -65, 65, 3, 66, -112, -112, 103, 66, 68, 111, -79, 66,\n            -109, 98, -9, 66, -91, 63, 75, 66, -87, -43, -57, 66, -97, 40, 38,\n            66, -80, -99, -3, 66, 105, -120, 123, 66, 97, -114, -58, 66, -102,\n            17, 86, 66, -115, 120, 94, 66, 105, 111, -111, 66, -110, -55, 70,\n            66, -94, 69, -113, 66, -92, 75, -11, 66, -101, -31, 30, 66, -73, 60,\n            90, 66, -102, 58, -61, 66, -109, -1, 27, 66, -109, 43, 84, 66, -104,\n            92, 99, 66, 102, 124, -97, 66, -114, 30, -127, 66, 115, 30, -35, 66,\n            -98, -15, -127, 66, 116, -108, -13, 66, -109, 42, -93, 66, 112, 49,\n            105, 66, -82, -5, -127, 66, -118, 6, 16, 66, 72, -45, -103, 66, -78,\n            32, -115, 66, -66, -72, 54, 66, 91, -62, -56, 66, 80, 18, 113, 66,\n            -124, 5, 82, 66, -81, 8, 61, 66, 96, -15, -36, 66, -81, -82, 25, 66,\n            -72, -119, 12, 66, -110, -44, -5, 66, 120, -84, -110, 66, 99, -67,\n            120, 66, -70, -117, 1, 66, -128, -99, -106, 66, -126, -71, 55, 66,\n            90, 4, 123, 66, -78, 102, 46, 66, -113, 124, 55, 66, -121, -114, 0,\n            66, -72, -3, -33, 66, 122, 84, -98, 66, -116, -36, 46, 66, -113,\n            -40, -83, 66, -80, -56, -10, 66, -111, 119, -15, 66, -109, 91, -91,\n            66, -104, -118, -28, 66, 122, -120, 105, 66, -109, -27, 26, 66,\n            -120, -77, -6, 66, -123, 36, -26, 66, 93, 94, 24, 66, -125, -102,\n            -27, 66, -103, -68, -96, 66, 93, 110, 95, 66, -102, -69, 79, 66,\n            -108, 51, 77, 66, -116, -77, -90, 66, -120, 109, -9, 66, 91, 77, 45,\n            66, -108, -13, 6, 66, -123, 58, -97, 66, -118, 94, -99, 66, -125,\n            117, 56, 66, 117, 16, 125, 66, 107, -99, -33, 66, -99, -54, 123, 66,\n            -94, -47, -50, 66, -119, -91, -5, 66, 77, 39, -17, 66, -106, -57,\n            -55, 66, -110, -82, -90, 66, -113, -13, 4, 66, 87, 121, 112, 66,\n            100, 29, 62, 66, -123, 23, 87, 66, -109, 1, -90, 66, 123, 43, 52,\n            66, -94, -35, -49, 66, 93, 91, 98, 66, -106, -44, 22, 66, -110, -89,\n            78, 66, -104, -105, -89, 66, 109, 70, -68, 66, -122, 55, -99, 66,\n            -123, 40, -4, 66, 107, -63, 123, 66, -89, -17, -25, 66, -113, 109,\n            44, 66, 90, -35, 72, 66, -98, -84, 67, 66, -102, 29, 97, 66, -105,\n            17, -118, 66, -97, -60, 55, 66, -79, -54, -3, 66, -90, -61, -124,\n            66, -86, 63, -78, 66, -86, -119, -61, 66, -116, 88, -86, 66, -117,\n            -72, 105, 66, -76, -60, -3, 66, -111, -76, 96, 66, -123, 87, 46, 66,\n            -112, 121, -73, 66, -91, 94, -85, 66, -73, -61, -18, 66, -101, 17,\n            36, 66, -111, -114, -119, 66, -73, 100, -98, 66, 83, -84, 91, 66,\n            -118, -66, -114, 66, 94, 27, -40, 66, 94, 11, 17, 66, -71, -63, 108,\n            66, 102, 20, 7, 66, -127, 16, -75, 66, -83, 112, 14, 66, -95, -70,\n            104, 66, -112, -96, 80, 66, -64, -128, -111, 66, -121, -26, 4, 66,\n            109, -33, 107, 66, -89, 120, 0, 66, 120, -89, 20, 66, -127, 49, 117,\n            66, 86, 15, -21, 66, 99, 51, -21, 66, -110, -33, 45, 66, -103, 127,\n            -99, 66, -91, -114, 12, 66, -87, 101, 71, 66, 74, -126, 74, 66,\n            -104, 12, -17, 66, -120, 112, 102, 66, 124, -77, -84, 66, -89, 71,\n            23, 66, 111, -80, 116, 66, -126, 62, 98, 66, -120, 75, -115, 66,\n            -128, -38, -115, 66, -101, 96, -123, 66, -104, -11, 119, 66, -81,\n            23, -111, 66, 111, 123, 85, 66, -66, 74, -62, 66, -78, -124, -81,\n            66, 112, 124, 14, 66, -87, 35, 72, 66, 102, -49, -25, 66, 83, 74,\n            -78, 66, -103, 13, 24, 66, -72, -103, -126, 66, -123, 87, 15, 66,\n            -112, -25, 112, 66, 76, -66, 104, 66, 91, 86, 61, 66, -91, 111, 25,\n            66, -92, -46, 49, 66, -121, 37, 112, 66, -101, 4, 5, 66, -123, 24,\n            -40, 66, -86, -82, 16, 66, -125, -109, 76, 66, -90, 38, -23, 66,\n            -121, -5, -101, 66, -103, 0, -98, 66, 84, 115, -46, 66, 88, 1, -81,\n            66, -86, -13, 79, 66, -112, 57, -101, 66, 82, -108, -96, 66, -114,\n            -110, -23, 66, -112, -84, -38, 66, -125, 90, 88, 66, -110, -1, 124,\n            66, 113, -60, 118, 66, -76, -110, 94, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1157477768, 587688443, 1147846949, 975665816, 602574268,\n            1026185486, 973596749, 758185996, 753337331, 715789547, 715232068,\n            969083423, 754977551, 1093\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 645700004, 1117021184, 1156704272, 600502082, 726393487,\n            768264632, 629039186, 717202579, 758888728, 717424600, 581316871,\n            970172036, 640356988, 1102\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -3778355347224434329,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 1020871983, 94566889, 760979307, 762517201, 74089533,\n            720059238, 1039236787, 882164983, 443010978, 1067685491, 744437879,\n            44227651, 712934875, 586599657, 478058170, 712497074, 584890827,\n            1067023138, 212507518, 644517879, 57637087, 922614059, 211124159,\n            630942554, 212127663, 980757562, 922680098, 251194977, 651341409,\n            182695597, 992923474, 907738585, 530524267, 524254673, 350455678,\n            636851930, 978783919, 460889075, 854407546, 1050527682, 441574977,\n            1063108273, 743\n          ],\n          \"cutValueData\": [\n            66, -82, 59, -41, 66, -109, 15, 112, 66, -94, 21, -4, 66, -74, 10,\n            -84, 66, -124, -19, 86, 66, 106, 118, -85, 66, 116, 125, -39, 66,\n            -83, -4, 63, 66, 108, 68, -113, 66, -128, -76, -32, 66, 124, 119,\n            18, 66, 75, -24, -25, 66, -115, -4, 120, 66, -107, -35, 49, 66, 86,\n            65, 17, 66, -122, 97, 67, 66, 100, -62, 9, 66, 110, 17, 46, 66, 122,\n            -118, 91, 66, -94, 52, 78, 66, -89, 92, -90, 66, -109, -85, -95, 66,\n            -121, -34, -11, 66, -59, -80, -45, 66, -94, -79, -49, 66, -127,\n            -100, -4, 66, -125, 32, 71, 66, -74, -73, 86, 66, -125, 106, -99,\n            66, -121, -105, -112, 66, -101, 0, -118, 66, 127, 99, -46, 66, 95,\n            -80, -85, 66, -99, -40, 10, 66, 74, 76, 9, 66, 77, -10, -36, 66,\n            118, -13, -57, 66, -98, 94, -27, 66, -121, -30, 119, 66, -112, 16,\n            -45, 66, -99, 124, -9, 66, 94, -64, 77, 66, -67, -39, 32, 66, -83,\n            -92, -92, 66, -117, 85, 59, 66, -92, 22, -24, 66, -120, 77, 6, 66,\n            -62, 43, 35, 66, 117, -77, 31, 66, 114, 38, -5, 66, 109, -108, -103,\n            66, -89, -27, 2, 66, -87, 116, -109, 66, 71, 1, 107, 66, -113, 0,\n            -93, 66, 112, -124, 57, 66, -96, -39, -105, 66, -99, 66, 101, 66,\n            -100, -109, -59, 66, -108, 103, 49, 66, -81, -114, -36, 66, -119,\n            113, -75, 66, -91, -120, -82, 66, -101, 67, -60, 66, -123, -7, -103,\n            66, -94, -55, -115, 66, 126, -107, -48, 66, -109, 114, 34, 66, 119,\n            124, -67, 66, -112, -36, 103, 66, -63, 113, -6, 66, -98, 74, 62, 66,\n            -102, -64, 35, 66, 111, -37, -125, 66, 123, 65, 18, 66, 115, 34, -3,\n            66, -81, -19, 16, 66, -66, 29, -84, 66, -121, -38, -46, 66, -111,\n            -10, 24, 66, -125, -53, -119, 66, -117, 32, -44, 66, -99, 106, 24,\n            66, 75, -122, 21, 66, -68, -96, 113, 66, 102, 32, -14, 66, -84, -77,\n            1, 66, 107, 18, -95, 66, -93, -119, -35, 66, -105, -102, -117, 66,\n            -98, 87, 51, 66, -99, 25, 14, 66, -101, 94, -5, 66, -99, 75, 24, 66,\n            -61, 122, -72, 66, -103, 31, 45, 66, 102, 25, -43, 66, -70, -15,\n            121, 66, -79, 59, 90, 66, -79, 11, -24, 66, -77, 51, 105, 66, 115,\n            -30, 11, 66, -95, 23, 43, 66, -126, -68, -41, 66, 126, -10, -4, 66,\n            -81, -81, 105, 66, -114, -112, -114, 66, 81, 58, -103, 66, -118, 45,\n            -41, 66, 97, -99, 91, 66, -100, -23, -48, 66, -95, 3, -22, 66, -117,\n            -70, 36, 66, -98, 67, 63, 66, -65, 127, 92, 66, -69, -117, -109, 66,\n            -81, 120, -54, 66, 121, 97, -112, 66, 96, -93, 23, 66, -79, -74, 21,\n            66, -115, -128, -124, 66, 108, 96, -32, 66, 118, 102, 53, 66, 124,\n            54, -73, 66, -127, -49, -78, 66, -119, 87, -30, 66, -82, 17, -63,\n            66, -59, -62, 101, 66, -71, -45, 21, 66, -85, -109, -35, 66, -93,\n            113, -99, 66, -90, 67, -74, 66, -121, -27, -83, 66, -92, 109, 21,\n            66, -111, -64, 102, 66, -63, 126, -20, 66, -75, -80, 72, 66, -126,\n            -46, -91, 66, -100, 49, -79, 66, -88, 107, 39, 66, -92, -6, -30, 66,\n            94, -101, -33, 66, -111, -40, 91, 66, 110, -88, 32, 66, -96, 35,\n            -78, 66, -93, 107, -15, 66, -96, 52, -122, 66, 104, 51, -47, 66,\n            112, 101, -81, 66, -89, 32, 104, 66, -97, 123, -36, 66, -76, 67,\n            -15, 66, -114, -22, 52, 66, 125, 24, 86, 66, -76, -23, 16, 66, -103,\n            54, -33, 66, -98, -45, -58, 66, -96, 106, 100, 66, -105, 29, 14, 66,\n            -63, -37, 1, 66, 85, -121, -36, 66, -118, 21, -46, 66, -100, -74, 5,\n            66, -116, 35, -126, 66, 93, 78, -110, 66, 117, 69, -61, 66, 82, -90,\n            -30, 66, -95, 27, -71, 66, -71, -2, -24, 66, 124, -52, -24, 66, -99,\n            86, 104, 66, 122, 110, 81, 66, -73, -25, -100, 66, -103, 43, -92,\n            66, -112, -127, 89, 66, 77, 28, -49, 66, 98, -50, 82, 66, -113,\n            -123, -94, 66, 92, -55, -114, 66, 107, -124, 63, 66, -108, 61, 127,\n            66, 94, 12, 125, 66, 91, -110, -101, 66, -101, -83, 31, 66, -94,\n            119, 47, 66, -120, 13, -72, 66, -126, -34, -2, 66, -97, 30, 14, 66,\n            127, -53, -84, 66, 117, -30, -49, 66, -109, -59, -23, 66, 106, -111,\n            -83, 66, -67, 66, -93, 66, -95, -99, -64, 66, -85, -105, -11, 66,\n            117, 28, 49, 66, -95, 68, 116, 66, 83, 88, -94, 66, -125, 115, 89,\n            66, -84, 123, 106, 66, 86, -105, 35, 66, -86, 90, -89, 66, 104,\n            -121, 118, 66, -105, 122, 59, 66, -101, 114, 18, 66, -94, -32, 55,\n            66, -79, 104, 58, 66, 106, 103, -120, 66, 108, 35, -40, 66, 118,\n            116, 11, 66, 114, -105, 105, 66, -81, 96, 19, 66, -96, 7, -107, 66,\n            91, -33, -2, 66, 123, 125, 125, 66, -122, 47, 125, 66, 68, -56, 65,\n            66, -101, 50, -35, 66, -73, -67, -4, 66, -105, -120, 21, 66, -88,\n            100, 70, 66, -104, -8, -98, 66, 123, 120, -3, 66, -106, 4, 74, 66,\n            -102, -11, -14, 66, 115, 2, -32, 66, -69, 89, -102, 66, -123, -69,\n            22, 66, 124, 28, 37, 66, 91, 46, 2, 66, 74, 94, 27, 66, -90, 75,\n            -64, 66, 87, -97, -53, 66, -76, -108, 105, 66, -110, -68, -72, 66,\n            -107, -118, 67, 66, -103, -66, 28, 66, -79, 11, 38, 66, -120, -52,\n            33, 66, 88, 48, -103, 66, -99, 67, 117, 66, -109, -110, -55, 66,\n            -63, 93, -65, 66, -115, 19, -79, 66, 84, -29, 100, 66, -121, -9, 41,\n            66, -69, 28, -80, 66, -101, -20, -72, 66, 121, -20, -51, 66, -95,\n            -107, -54, 66, -65, 32, 28, 66, -100, -97, 96, 66, 114, -31, -119,\n            66, 113, 35, 83, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1112837453, 1117086767, 975718871, 1013967445,\n            1117440845, 1032503558, 629218502, 1104077698, 724699304, 640674031,\n            753495493, 588216641, 581721979, 1093\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1162259279, 1156767371, 1112830567, 1031461285,\n            1117082416, 631266250, 581330083, 1102502788, 725178301, 768279937,\n            753851246, 645613321, 768198073, 1096\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -6113428335752356252,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 754238655, 303134165, 935192163, 354379938, 171411282,\n            919938218, 481872430, 195147813, 187248085, 473528881, 262749606,\n            483639285, 318319270, 498904418, 523308769, 731084623, 521846193,\n            362265005, 1017487010, 258991987, 103656913, 643804798, 901504089,\n            383989611, 883230441, 847214373, 707074923, 115531125, 584362039,\n            742046455, 107588339, 391829959, 1067542898, 329321707, 1026113227,\n            634501623, 35719918, 37406818, 937236853, 91827443, 895862653,\n            90536699, 5\n          ],\n          \"cutValueData\": [\n            66, -92, -3, 98, 66, -80, 90, -29, 66, 74, -38, 51, 66, 94, 4, -48,\n            66, -120, 80, 11, 66, -91, -118, -70, 66, 92, -127, 43, 66, -66,\n            -74, 9, 66, 122, -21, 93, 66, -91, 103, 64, 66, -91, -113, -117, 66,\n            124, -18, 3, 66, 82, -96, -11, 66, 105, 126, -18, 66, 93, 117, -117,\n            66, -98, 25, -11, 66, -94, 99, -104, 66, -111, -117, 37, 66, -110,\n            -102, 74, 66, -73, -8, 35, 66, -99, 74, 8, 66, 95, 109, -56, 66,\n            108, -23, -9, 66, 102, 107, 2, 66, -63, 116, 124, 66, 84, 114, -86,\n            66, -124, -88, -117, 66, -92, -39, -48, 66, -121, 15, -70, 66, -101,\n            -113, 92, 66, -100, -121, -17, 66, -112, -75, 78, 66, 127, 33, 61,\n            66, 117, -70, 57, 66, 82, -1, 119, 66, -76, -35, 43, 66, 85, 78, 29,\n            66, -123, 116, -123, 66, -93, 96, 7, 66, -118, -110, 111, 66, 78,\n            -64, 67, 66, -121, -24, -40, 66, -97, 70, 40, 66, -99, 71, 64, 66,\n            -94, 124, -55, 66, 82, -2, 12, 66, -113, -22, 111, 66, -118, 121,\n            -13, 66, 125, 111, 41, 66, 101, 4, 110, 66, -120, -75, 20, 66, -97,\n            86, 66, 66, 106, 67, -41, 66, -106, -68, -17, 66, -80, 115, -105,\n            66, -81, 52, 44, 66, -99, -67, -30, 66, -70, -128, -37, 66, 126,\n            -37, 80, 66, -105, 85, 118, 66, -60, -17, 116, 66, -85, -71, -77,\n            66, -83, -12, 26, 66, -106, 19, 47, 66, 120, -85, 24, 66, -107, 44,\n            19, 66, -111, -116, 45, 66, -82, -90, -93, 66, -103, 118, 43, 66,\n            -124, -47, -16, 66, 97, -125, -31, 66, -83, 51, -104, 66, -88, 70,\n            15, 66, 105, 88, -87, 66, -92, 86, -72, 66, 119, 120, -121, 66,\n            -128, -128, 82, 66, -79, 35, -105, 66, -74, 42, -112, 66, -76, -53,\n            85, 66, 127, -114, -89, 66, 127, 110, 24, 66, -99, 78, 18, 66, -89,\n            -123, -30, 66, -99, 123, -66, 66, -128, 20, 79, 66, 93, 112, 16, 66,\n            -127, 74, 70, 66, -73, 46, 12, 66, -101, 29, -120, 66, 115, 104,\n            105, 66, 115, 75, -122, 66, -88, 17, -20, 66, -104, -54, 68, 66,\n            -91, 54, 101, 66, -126, 81, -23, 66, 126, -1, 96, 66, 103, -16, 121,\n            66, -86, 83, -36, 66, 122, 9, 57, 66, -104, 20, 1, 66, -114, -20,\n            14, 66, 118, -4, -108, 66, -107, 35, -59, 66, 107, 116, -108, 66,\n            87, -62, 29, 66, 78, -36, 125, 66, 104, -4, -65, 66, -76, 119, -8,\n            66, -103, 54, -35, 66, -124, 109, -103, 66, -95, -14, 22, 66, -110,\n            44, 68, 66, -79, -102, 29, 66, -118, -126, 83, 66, -82, 20, 45, 66,\n            -102, -50, -52, 66, -123, -41, -113, 66, -125, 75, -105, 66, -110,\n            -34, -120, 66, -103, 90, -122, 66, -60, 24, -98, 66, -97, -85, -62,\n            66, 82, -30, -78, 66, -79, 93, -64, 66, 109, 14, 97, 66, -71, 83,\n            85, 66, 111, 53, 22, 66, -106, -23, 79, 66, -79, -88, -27, 66, -59,\n            4, 36, 66, -121, 41, -39, 66, 125, 71, -45, 66, 96, 72, -92, 66, 81,\n            -83, 91, 66, -90, -79, 15, 66, -111, 8, -2, 66, -90, -90, -51, 66,\n            -85, -36, -106, 66, 122, 76, 35, 66, -99, -104, -23, 66, 117, 10,\n            22, 66, 104, -117, 28, 66, 106, 86, -50, 66, -86, -109, -31, 66,\n            -97, 71, -106, 66, -125, 125, -109, 66, -103, -81, -61, 66, -88, 51,\n            69, 66, -80, 51, -108, 66, 127, 102, 55, 66, -83, -121, 80, 66, -99,\n            7, -47, 66, -116, 30, 104, 66, 108, 87, 96, 66, -126, -120, -49, 66,\n            -88, 122, 58, 66, -71, -65, 59, 66, -75, -31, -109, 66, -91, -9,\n            117, 66, -106, 20, -70, 66, 126, -76, 63, 66, -90, -113, -49, 66,\n            -69, -101, 102, 66, 84, 1, -80, 66, -118, 58, 118, 66, -106, -8,\n            -90, 66, -108, 122, 125, 66, -79, 69, -124, 66, -69, 39, 2, 66,\n            -111, -92, 110, 66, -77, 75, 61, 66, 72, -99, -27, 66, -90, -49,\n            -90, 66, 92, -52, -71, 66, -112, 32, -90, 66, -106, -108, -15, 66,\n            -70, -50, -65, 66, 99, -120, 78, 66, -118, -105, -6, 66, -127, -50,\n            108, 66, 110, -27, -122, 66, -112, -108, -43, 66, 102, -55, 1, 66,\n            -78, 83, -109, 66, 114, 50, -24, 66, -125, 99, 87, 66, -121, -13,\n            -64, 66, -107, 79, 70, 66, -94, 110, -85, 66, -98, -78, -60, 66,\n            122, 37, 98, 66, -94, 57, -63, 66, 104, -66, -7, 66, -108, 22, -122,\n            66, 115, -20, -122, 66, 103, -117, -128, 66, 102, -128, 62, 66,\n            -125, -3, 96, 66, -120, -62, 82, 66, 118, 67, -114, 66, -127, 46,\n            92, 66, -126, 59, 70, 66, -70, -33, -6, 66, -102, -8, -24, 66, -125,\n            25, 12, 66, -109, 14, 125, 66, -119, -110, -60, 66, -82, 35, -34,\n            66, -123, 24, -97, 66, -112, 28, -20, 66, -105, -54, -56, 66, -83,\n            -15, -39, 66, 121, -67, -44, 66, -105, 34, 88, 66, -110, -9, 114,\n            66, -111, -93, -94, 66, 122, 72, 67, 66, -102, 65, -45, 66, -98,\n            -85, -72, 66, -86, -91, -77, 66, -127, 17, -75, 66, 96, -59, 36, 66,\n            -120, 62, 13, 66, -82, 16, -60, 66, -115, -102, 4, 66, -95, -63,\n            -27, 66, -102, 55, -54, 66, -102, 31, -49, 66, -112, -126, 32, 66,\n            -85, 55, 60, 66, 88, 84, 95, 66, -111, -111, 14, 66, -102, -37, 34,\n            66, -123, 30, 68, 66, -121, 84, 42, 66, -72, -109, -53, 66, -105,\n            51, 89, 66, -92, 107, -83, 66, 81, 116, 62, 66, 125, -7, 31, 66,\n            110, 94, 114, 66, -121, 17, -12, 66, -103, 115, -37, 66, -113, 64,\n            -92, 66, -124, -12, -76, 66, -71, 12, 7, 66, -114, 113, -92, 66,\n            -107, -58, 8, 66, -77, -128, 103, 66, 122, -50, 34, 66, -123, 42, 4,\n            66, -95, 18, -7, 0, 0, 0, 0, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 1161552878, 970915724, 1162082041, 1147201774,\n            711874240, 1025966572, 1016440232, 1156918381, 774287572,\n            1098407515, 758651710, 582803878, 712042646, 391\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1162084076, 1104838865, 772712942, 1155331957,\n            1018061585, 773239120, 1159870334, 769517485, 1018586147, 969102601,\n            715054135, 582786302, 753320561, 364\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -1388210874864416000,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            1, 31, 255, 425622219, 144909231, 844096932, 408239312, 581172034,\n            16895927, 857323289, 614083712, 768110542, 599362554, 494134646,\n            246430980, 722192539, 574816296, 387821868, 69295080, 870210871,\n            503884653, 816670625, 853067783, 810444633, 713880196, 755881543,\n            768770204, 639365931, 612809248, 625484104, 257958187, 520591636,\n            141167312, 374792846, 755614941, 527588315, 169125597, 491698020,\n            619845819, 405795430, 851208309, 817350660, 314382308, 383822855,\n            744296626, 7780\n          ],\n          \"cutValueData\": [\n            66, -60, -48, 100, 66, -73, 116, -105, 66, -101, -6, -47, 66, -88,\n            -52, 126, 66, 90, 118, -34, 66, 102, -91, -120, 66, 112, 79, 82, 66,\n            -85, -40, -4, 66, 86, 49, -38, 66, 94, 76, 32, 66, -105, -17, 109,\n            66, -61, 19, 121, 66, 109, -52, 91, 66, -72, -36, 67, 66, -73, -118,\n            -119, 66, 81, 64, -77, 66, -105, 125, 14, 66, -103, -23, -104, 66,\n            -126, 59, -120, 66, -90, -105, -94, 66, 85, 67, -23, 66, 69, -31,\n            -106, 66, -75, 102, 105, 66, -88, 94, -105, 66, -124, 49, 17, 66,\n            -85, 11, -16, 66, 115, -82, 36, 66, -88, 87, 25, 66, 126, 44, -25,\n            66, -85, 21, -16, 66, -89, -76, -91, 66, -96, -6, -25, 66, -117, 47,\n            33, 66, 107, 4, -78, 66, -106, -105, -59, 66, -66, 103, 17, 66,\n            -115, 8, -60, 66, -117, 91, -71, 66, 99, 44, -35, 66, -90, -102, 31,\n            66, -88, 16, 42, 66, -120, -62, -68, 66, -93, 29, -13, 66, -92, -38,\n            110, 66, 78, 75, -119, 66, -65, -1, 35, 66, -83, 3, 121, 66, -70,\n            31, 25, 66, -116, 42, 8, 66, -120, 107, -85, 66, -67, -4, -77, 66,\n            -122, 32, 94, 66, -97, -89, 2, 66, 107, 109, 66, 66, -107, 99, 76,\n            66, 117, 78, -119, 66, 74, 35, 93, 66, -91, -66, 16, 66, -70, -70,\n            113, 66, 111, -60, 109, 66, -62, 47, 50, 66, 97, 126, -30, 66, -66,\n            67, 61, 66, -118, -9, -110, 66, -72, -71, 38, 66, 127, 87, 19, 66,\n            -110, -14, 47, 66, -67, -118, -69, 66, -103, 20, -121, 66, -83, -32,\n            25, 66, -76, 78, 68, 66, -127, -64, 2, 66, -74, -58, 19, 66, -76,\n            -6, 99, 66, -119, -103, 73, 66, -123, -11, -52, 66, -109, -61, 84,\n            66, -128, 79, -118, 66, -109, -22, 39, 66, 68, 12, -90, 66, 124, 58,\n            -27, 66, -96, 29, 63, 66, -117, 21, -23, 66, -121, -125, -58, 66,\n            -103, 121, -54, 66, -105, 69, 15, 66, -103, -16, 113, 66, -100,\n            -111, -24, 66, 125, 24, 6, 66, -90, 111, 8, 66, -84, 115, -110, 66,\n            109, -72, -111, 66, -119, -19, -67, 66, -95, 74, -21, 66, -117, 107,\n            -126, 66, 103, 54, -68, 66, 111, -44, -105, 66, -125, 25, -12, 66,\n            -111, 62, -103, 66, -95, 90, 20, 66, -66, -87, 28, 66, -115, 26, -9,\n            66, -77, -17, 12, 66, -112, -44, -35, 66, -68, 38, -35, 66, -125,\n            47, -30, 66, -71, -7, -62, 66, -94, 48, 7, 66, -120, -43, 1, 66,\n            -86, -11, 62, 66, -91, -21, 40, 66, 123, 120, 45, 66, -80, 108, 62,\n            66, 101, 56, 7, 66, -75, -13, 124, 66, -113, 2, 78, 66, -82, 52,\n            -11, 66, -96, 95, -58, 66, -83, -38, 114, 66, -98, 52, 32, 66, -96,\n            19, -78, 66, -76, 56, -2, 66, -110, 43, 4, 66, 80, 125, -61, 66,\n            124, -45, -38, 66, -112, 122, -49, 66, -111, 43, -18, 66, -112, -8,\n            -126, 66, 90, -97, 104, 66, -106, 78, 11, 66, -70, 86, -55, 66,\n            -109, 60, -44, 66, -117, -92, 100, 66, -117, 70, -47, 66, -118,\n            -109, 80, 66, 98, -91, -56, 66, -64, 44, -57, 66, -110, 8, -67, 66,\n            -118, 117, -126, 66, -110, -128, 110, 66, -85, 48, 31, 66, -115,\n            -125, -29, 66, 85, -109, -55, 66, -115, -96, -63, 66, -70, 97, -114,\n            66, -96, 112, -88, 66, -105, 0, 27, 66, -98, 114, 84, 66, 68, 70,\n            11, 66, -106, -54, -123, 66, -128, 27, -62, 66, -86, 31, -11, 66,\n            -116, 21, 0, 66, -108, -90, -12, 66, -78, -59, 89, 66, -121, 27, 8,\n            66, -104, -92, 85, 66, -104, -68, 116, 66, -91, 125, -78, 66, -89,\n            95, -106, 66, 86, 33, 105, 66, 82, -28, 4, 66, -103, -61, -121, 66,\n            121, 25, 38, 66, 103, 10, -63, 66, -82, 21, 60, 66, -109, -67, -81,\n            66, -104, 24, 46, 66, -128, -36, -15, 66, -118, -59, -1, 66, -121,\n            -38, 69, 66, 113, -58, -65, 66, -121, -71, 52, 66, -97, -44, 22, 66,\n            -60, -96, 7, 66, -99, 45, 34, 66, -106, -14, 86, 66, -97, 29, -106,\n            66, -81, 84, 127, 66, -111, -89, -81, 66, -71, -90, 124, 66, -117,\n            -1, -4, 66, -120, -73, 126, 66, -119, 14, -30, 66, 92, 100, -10, 66,\n            -77, -127, -9, 66, -124, 118, -118, 66, 93, -49, -98, 66, -119, -86,\n            -1, 66, 106, -119, -124, 66, -126, 99, 75, 66, -103, -11, 30, 66,\n            -117, 46, -22, 66, -115, 89, -33, 66, -89, -11, 29, 66, -78, 59, 14,\n            66, -112, -17, 65, 66, -69, -79, -21, 66, -78, -55, 23, 66, -89,\n            102, -43, 66, -100, -95, -76, 66, -100, 124, -110, 66, -103, 57,\n            -39, 66, -109, -26, -34, 66, -66, 102, -21, 66, 92, 105, 6, 66, -94,\n            -110, 0, 66, -84, -106, -57, 66, -121, 36, -67, 66, -125, -123, 6,\n            66, -101, -95, -91, 66, -111, 59, 104, 66, -115, 123, -48, 66, -92,\n            -102, 11, 66, 88, -95, 48, 66, 92, 27, 41, 66, -119, -35, -76, 66,\n            -122, -64, 53, 66, -122, -77, -127, 66, -123, 99, 65, 66, -116, 95,\n            -121, 66, 121, -67, 108, 66, -65, 92, 86, 66, -126, -11, 15, 66,\n            -105, -16, 96, 66, 94, 83, -63, 66, -101, 31, 46, 66, -95, 42, -103,\n            66, -82, -100, -36, 66, 92, 19, -23, 66, -106, -75, -54, 66, -124,\n            -104, 89, 66, 117, 71, 2, 66, -73, 11, 111, 66, -127, -14, 50, 66,\n            110, -13, 112, 66, -105, -118, -45, 66, -81, 0, -95, 66, -84, -47,\n            -42, 66, -94, 98, 114, 66, -123, 63, -28, 66, 123, -2, 4, 66, -107,\n            72, 51, 66, 72, 67, -102, 66, -98, -46, -29, 66, -99, 78, 90, 66,\n            -120, 87, 28, 66, 106, -15, 29, 66, -116, -108, -121, 66, 103, -61,\n            -121, 66, -77, -45, -73, 66, 100, -116, -127, 66, -113, 101, 81, 66,\n            125, 15, 54, 66, -120, -71, 11\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 255,\n          \"leftIndex\": [\n            0, 1, 255, 1071445919, 530489087, 567670760, 668846143, 18740279,\n            266131074, 557802017, 840968560, 0\n          ],\n          \"rightIndex\": [\n            0, 1, 255, 360626143, 797186431, 32615112, 384598228, 281864855,\n            242933764, 619652, 709388776, 6272\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": -1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": -1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -5115030242711725490,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 108505767, 479371230, 476801865, 129435123, 357886958,\n            879169134, 440351138, 456493873, 398023646, 771081298, 249470198,\n            645773885, 1039967609, 938438618, 1038667563, 78167277, 230547394,\n            1016853958, 978511219, 370245062, 395871426, 387233217, 802873034,\n            1016568690, 1056001443, 590855073, 460443587, 97898103, 513088234,\n            1069505259, 624010986, 1052219045, 133424678, 663848258, 656512707,\n            926770483, 212651459, 922220895, 861469929, 320138303, 459621995,\n            482134238, 23\n          ],\n          \"cutValueData\": [\n            66, -71, 50, -39, 66, -104, 37, 46, 66, 120, 14, 77, 66, -63, 81,\n            -111, 66, -123, 61, 34, 66, 75, -113, 110, 66, -95, 115, -122, 66,\n            -79, 108, -117, 66, -114, 74, -46, 66, -112, 88, -9, 66, -124, -61,\n            -58, 66, 104, 119, 112, 66, -67, -27, -48, 66, -78, 106, -107, 66,\n            -92, -55, -90, 66, -95, -127, 69, 66, -81, -27, 117, 66, -124, 4,\n            -116, 66, -122, -28, -70, 66, -108, -3, -18, 66, -92, -83, 71, 66,\n            -100, -63, -121, 66, -101, 8, 94, 66, 117, 114, -5, 66, -77, -47,\n            -51, 66, -70, 52, 48, 66, 125, 77, 14, 66, -84, -2, -79, 66, 68, 3,\n            -28, 66, -81, 100, 37, 66, 75, -68, -47, 66, -120, 32, -44, 66,\n            -114, -10, 11, 66, 72, -82, -64, 66, -103, 96, -79, 66, -110, -17,\n            -90, 66, -103, 119, 38, 66, 76, -11, -32, 66, -113, -5, 94, 66, -86,\n            119, -75, 66, -109, 113, -40, 66, -113, 51, -57, 66, -79, 29, -53,\n            66, 117, -43, 19, 66, -77, -111, 0, 66, -102, 46, 85, 66, -104,\n            -127, 84, 66, -82, 79, 118, 66, -104, -49, -33, 66, -71, -55, 124,\n            66, -99, -14, -75, 66, -112, 113, -44, 66, -112, -92, 53, 66, 115,\n            -113, -109, 66, -120, -103, -63, 66, -90, 37, 86, 66, -117, 33, -20,\n            66, -105, 67, -94, 66, 97, 1, -109, 66, -109, 16, 28, 66, 114, -125,\n            83, 66, -108, 113, -70, 66, -114, -11, -8, 66, -122, 89, 50, 66,\n            -122, -102, -2, 66, -102, 72, 9, 66, -99, 44, -117, 66, 121, 119,\n            105, 66, -105, 14, -16, 66, -64, 91, -58, 66, -117, 68, -40, 66,\n            -80, 13, 7, 66, -118, 81, -107, 66, 123, 31, 17, 66, 95, 111, -82,\n            66, -105, -127, -119, 66, -81, 114, -99, 66, -96, 71, -66, 66, -93,\n            -103, 35, 66, -107, 43, 89, 66, -94, -89, -8, 66, -120, -66, 121,\n            66, -75, 97, 2, 66, -90, -8, 54, 66, -117, 30, 69, 66, -116, -70,\n            -102, 66, -70, -1, -94, 66, -71, 14, -91, 66, -122, -46, 49, 66,\n            110, -81, 15, 66, 124, 73, 28, 66, 92, 57, 67, 66, 113, 98, 58, 66,\n            111, 3, -111, 66, -124, -6, 49, 66, 103, 35, 122, 66, 119, 8, -3,\n            66, -99, -25, 125, 66, 108, -17, -35, 66, -117, -53, 4, 66, -71,\n            121, -49, 66, 110, -86, 117, 66, -104, -21, 48, 66, -111, -67, 125,\n            66, -101, 75, 79, 66, 127, -66, -45, 66, -96, 23, -114, 66, -90,\n            -89, -114, 66, -84, -94, 116, 66, -100, -123, -50, 66, -85, 84, 6,\n            66, 88, -124, 116, 66, 126, 38, -23, 66, -89, -100, -93, 66, -95,\n            -102, -114, 66, -127, -110, 111, 66, 111, -28, -119, 66, -111, -35,\n            -12, 66, -81, -31, 104, 66, -77, 112, 37, 66, -101, -107, 16, 66,\n            -80, -31, 47, 66, -103, -16, -118, 66, -98, -58, -90, 66, -82, 27,\n            -62, 66, -94, 23, 67, 66, -106, 75, 103, 66, -113, -102, 6, 66, -84,\n            119, 16, 66, 85, -128, -82, 66, -86, -65, 19, 66, 106, -121, 112,\n            66, 108, -61, -41, 66, 95, 112, -28, 66, 122, 35, 83, 66, 124, 104,\n            35, 66, -105, -8, -39, 66, -125, 22, -122, 66, -96, 98, -32, 66,\n            -90, 93, -24, 66, 119, -27, 29, 66, -127, -50, 8, 66, 114, 95, 80,\n            66, -95, 72, -40, 66, -95, -22, -71, 66, -69, -125, -61, 66, -65,\n            -22, -125, 66, 110, -124, -42, 66, -81, 26, 72, 66, -115, -15, -84,\n            66, 93, -102, 39, 66, -126, -90, -88, 66, -92, -81, 127, 66, -83, 7,\n            7, 66, -99, 84, 117, 66, 100, -66, -4, 66, -73, 22, -36, 66, -114,\n            63, 9, 66, -103, -43, 54, 66, -95, -127, -36, 66, 119, 126, 71, 66,\n            -102, -31, -61, 66, -120, 63, -113, 66, 83, 22, -45, 66, -66, -61,\n            13, 66, -105, 124, 11, 66, -99, 95, 118, 66, -114, -38, -68, 66,\n            -119, 49, 94, 66, -125, 14, -68, 66, -121, 117, -38, 66, 108, 85,\n            90, 66, 109, 14, -111, 66, -89, -6, -20, 66, 102, 2, 40, 66, -83,\n            20, -98, 66, -90, -55, -2, 66, 94, -77, 70, 66, -124, -6, 81, 66,\n            -104, -6, -85, 66, 112, -95, 68, 66, -109, 21, -42, 66, -81, 74, 80,\n            66, -94, -107, -2, 66, -120, -122, 47, 66, -122, -16, 27, 66, 108,\n            104, 82, 66, 125, -98, 81, 66, -102, 40, -19, 66, -100, 34, 47, 66,\n            -98, 111, -82, 66, -114, 86, 14, 66, -71, -81, 52, 66, -110, 22,\n            -58, 66, -108, -105, -53, 66, 89, -34, 55, 66, -76, -30, -114, 66,\n            -119, -11, -60, 66, -125, -102, -30, 66, -106, 8, 66, 66, -109, 13,\n            -86, 66, 100, -106, 44, 66, 100, 112, -80, 66, -82, -29, 47, 66,\n            113, -115, -118, 66, 112, 74, 1, 66, -116, 126, 84, 66, -118, -49,\n            122, 66, -110, -83, 78, 66, -79, 63, -57, 66, -103, 29, -8, 66, 92,\n            -82, 31, 66, -84, -60, -40, 66, 94, 32, -16, 66, -120, -17, 54, 66,\n            -76, -111, 7, 66, -80, 103, -127, 66, -110, 115, 72, 66, -91, -122,\n            60, 66, -115, -116, -60, 66, -103, -124, -123, 66, 99, -30, -20, 66,\n            70, -102, -26, 66, -106, -125, -102, 66, -106, -59, -119, 66, -92,\n            -109, -122, 66, 125, -10, -52, 66, -125, -54, 84, 66, -120, 13, 78,\n            66, 118, -93, -67, 66, -68, -91, 50, 66, -125, 62, 81, 66, 81, -96,\n            13, 66, -102, 42, -101, 66, -69, -24, -39, 66, 105, -78, -98, 66,\n            -115, 99, -92, 66, 121, 75, -68, 66, 106, -19, 36, 66, -81, -102,\n            41, 66, -97, -31, -6, 66, -117, 63, -90, 66, -87, 52, 64, 66, -67,\n            -26, 123, 66, -113, -74, 105, 66, -83, -80, 115, 66, -91, -50, 83,\n            66, -114, 86, -123, 66, -122, -35, -14, 66, -112, 34, 85, 66, -120,\n            -9, 66, 66, 126, 93, 53, 66, 124, 39, 45, 0, 0, 0, 0, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 1119207941, 640839029, 1155884080, 626391752, 643515029,\n            753398657, 760423210, 583460159, 970704040, 1016971657, 970736855,\n            1025956570, 973356061, 395\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1119195062, 1031467838, 1157478377, 640189574,\n            767675527, 638546156, 1160598265, 583263305, 984671869, 758122793,\n            581748520, 984523163, 581151145, 365\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -878626023249091604,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 920336034, 1056012206, 1010822202, 346413815, 173243682,\n            925326126, 873813705, 652821547, 1042528895, 919783727, 368819802,\n            635762943, 173315539, 75338801, 1063726397, 346987043, 221824105,\n            518752847, 773672570, 596716713, 364584403, 647849797, 908967589,\n            112458541, 85831497, 445881671, 622167083, 913803195, 532380410,\n            628705195, 747981359, 804882686, 508992891, 801064179, 367967019,\n            590850150, 376736057, 624530799, 722766457, 393281118, 73266270,\n            749844714, 627\n          ],\n          \"cutValueData\": [\n            66, -66, -16, 34, 66, -101, -37, -70, 66, -95, -60, 49, 66, -124,\n            -128, -118, 66, -58, 103, -102, 66, -128, 78, -119, 66, -66, -87,\n            -55, 66, -93, 116, 6, 66, -65, -23, 102, 66, 73, 88, -52, 66, -124,\n            -60, -6, 66, -98, 22, -111, 66, -74, 83, 91, 66, -80, 71, -32, 66,\n            118, 103, -24, 66, -89, 3, 48, 66, 116, -85, 10, 66, -103, 53, -90,\n            66, -69, 117, 88, 66, -69, -41, 8, 66, 115, 4, -44, 66, 123, 38, 90,\n            66, -76, -62, 7, 66, -98, -38, 9, 66, -112, 4, -8, 66, -121, -27,\n            85, 66, -87, -101, -81, 66, 111, -53, 52, 66, -107, 88, 77, 66, -81,\n            -82, -96, 66, -87, 111, 43, 66, -101, -82, -122, 66, 108, 112, 86,\n            66, -99, 55, -17, 66, -125, 21, -77, 66, -91, -105, 65, 66, -122,\n            -49, 25, 66, -128, -34, 89, 66, 114, 64, -43, 66, -110, -85, 88, 66,\n            122, 120, 121, 66, -60, 82, 125, 66, -111, -56, -87, 66, 123, 55,\n            16, 66, 125, -24, 82, 66, 115, -93, -67, 66, -88, 15, -120, 66, -59,\n            -128, 81, 66, 81, 12, 88, 66, -117, 29, -110, 66, 91, 74, 105, 66,\n            -102, -26, -105, 66, -95, -40, 63, 66, -104, -94, 61, 66, -84, 22,\n            -67, 66, -67, -52, -43, 66, 113, 50, -76, 66, -124, 59, -59, 66,\n            -110, 28, -74, 66, -117, 52, -46, 66, -80, 88, -77, 66, -69, 103, 4,\n            66, -90, -44, -18, 66, 116, -39, -83, 66, -119, -29, -28, 66, 93,\n            -17, 64, 66, -80, -94, -71, 66, -72, 45, -81, 66, 126, -94, -90, 66,\n            -99, -34, -115, 66, -81, -88, -92, 66, -79, 27, -17, 66, 89, 113,\n            -51, 66, -122, -116, -12, 66, -119, -36, -87, 66, -94, 54, -7, 66,\n            -88, 22, 33, 66, 108, -80, 2, 66, 88, 59, 47, 66, 98, 98, -34, 66,\n            -120, 68, -106, 66, 121, 5, 81, 66, -100, -67, -25, 66, 77, 114,\n            -35, 66, -77, 35, 108, 66, -89, -2, -28, 66, -116, -82, 25, 66, 85,\n            18, -11, 66, 92, -117, -92, 66, -73, -55, 14, 66, -97, -60, 113, 66,\n            -112, -47, 63, 66, -105, 122, 100, 66, -126, -39, -47, 66, -64, -24,\n            4, 66, -100, -72, 97, 66, 119, -115, 37, 66, -99, -57, -124, 66,\n            -104, -62, -63, 66, -74, 49, 113, 66, -96, -40, 102, 66, -89, 18,\n            28, 66, -92, 84, 22, 66, -97, 119, -19, 66, -109, -106, 121, 66,\n            -104, 79, 99, 66, 86, 42, -17, 66, -125, -10, -31, 66, -127, -70,\n            31, 66, 106, -80, -79, 66, -119, 32, 52, 66, 89, 93, -89, 66, 93,\n            56, 1, 66, 88, 123, 56, 66, -111, -95, -101, 66, -119, 41, 118, 66,\n            115, -14, 2, 66, 110, 34, 117, 66, -116, 110, 32, 66, -122, -114,\n            123, 66, -116, 77, 19, 66, -117, -52, 41, 66, 85, -98, -48, 66, 93,\n            -69, 69, 66, 123, -27, 31, 66, -78, -104, 43, 66, 104, -1, -118, 66,\n            -84, -37, 95, 66, -104, -92, 54, 66, -103, 116, -66, 66, -97, 111,\n            -112, 66, 104, 65, 37, 66, -70, 23, 88, 66, -112, -8, -12, 66, -107,\n            -50, -27, 66, 127, -31, -110, 66, -128, -109, -1, 66, -96, -18,\n            -110, 66, -87, 31, 27, 66, -88, -103, 125, 66, 99, -52, -97, 66,\n            -71, 117, -6, 66, -117, -50, 17, 66, -97, 28, -116, 66, -127, -48,\n            -38, 66, -81, 93, -87, 66, -121, -55, 59, 66, -108, -30, -17, 66,\n            85, 12, -19, 66, 74, -74, 22, 66, 85, 11, 105, 66, -96, -106, 33,\n            66, -119, -52, -57, 66, 115, -16, -3, 66, -116, 46, 80, 66, -88, 55,\n            112, 66, 116, 8, 101, 66, -88, 46, 32, 66, 97, 103, 9, 66, -127, 34,\n            72, 66, -94, -93, 55, 66, -84, -89, 82, 66, -92, 7, 4, 66, 127, -4,\n            36, 66, -77, -7, -1, 66, -78, 123, 5, 66, -120, 50, 2, 66, -98, -92,\n            66, 66, -96, -99, -67, 66, -106, 59, 104, 66, -97, 16, -77, 66, -72,\n            -25, -97, 66, -118, 46, -90, 66, -77, -64, -55, 66, -84, 20, -108,\n            66, -99, -37, -102, 66, -111, -100, -37, 66, -111, 79, 40, 66, -88,\n            -70, -5, 66, -77, 13, -100, 66, -106, 77, 65, 66, -85, -3, 57, 66,\n            99, -127, 12, 66, -89, 44, 42, 66, -79, -22, 107, 66, -96, -54, 3,\n            66, -80, -61, 100, 66, -128, 30, -77, 66, -125, -80, 56, 66, 122,\n            26, 51, 66, 74, -72, 45, 66, 80, 66, 46, 66, -120, -24, 12, 66, -88,\n            61, -61, 66, -102, -1, -24, 66, -91, 28, -70, 66, -87, -50, -23, 66,\n            -123, 93, 56, 66, -128, -64, -96, 66, 124, -13, 21, 66, -111, -32,\n            -68, 66, 121, -73, -6, 66, -72, 39, -98, 66, -118, -3, -128, 66,\n            -104, -7, 65, 66, -76, 58, -123, 66, -91, -103, -30, 66, -125, -117,\n            35, 66, -75, -95, 39, 66, -72, -29, -67, 66, -124, 74, -52, 66,\n            -104, 39, -37, 66, 104, -35, 102, 66, -91, 118, 56, 66, -87, 125,\n            64, 66, -114, 93, 78, 66, -114, 28, 20, 66, -61, -101, 66, 66, -100,\n            121, 69, 66, -66, 51, -97, 66, -125, 53, 98, 66, 103, 74, -82, 66,\n            -81, -94, -108, 66, -119, -14, 79, 66, -121, -50, -126, 66, -115,\n            -75, -84, 66, 99, -36, 58, 66, -117, 59, 26, 66, -117, 87, 26, 66,\n            -82, -16, -95, 66, -77, 24, -96, 66, -117, -62, 45, 66, -103, -40,\n            -74, 66, -85, 59, 37, 66, 96, -66, 70, 66, -73, -117, -93, 66, -69,\n            0, 43, 66, 126, 28, -97, 66, -114, 66, -38, 66, -112, 118, 99, 66,\n            78, -114, -3, 66, -82, 40, 18, 66, 113, 43, 22, 66, -98, 40, -70,\n            66, -96, 117, -77, 66, -101, -16, -76, 66, -92, 29, 19, 66, -117,\n            81, 106, 66, 115, -89, -32, 66, -103, -127, 52, 66, -106, -127, -13,\n            66, -74, -1, 62, 66, 88, -97, -103, 66, -120, 122, -33, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162241783, 987870086, 772537813, 1162057343,\n            1155094576, 712399181, 1026721133, 1119192844, 975639533, 970736047,\n            582734047, 643861210, 595482152, 1093\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1161486458, 731026618, 975528818, 1114234865, 772629190,\n            755679911, 629559934, 983689973, 586447357, 1104305696, 581310070,\n            596013277, 601866733, 1102\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -7506740242986070872,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 437434827, 233024426, 769601339, 321501377, 526254065,\n            69943122, 360514594, 190753338, 1026539113, 758482278, 331312977,\n            52894174, 229180501, 869629874, 872191853, 710778574, 737776949,\n            455964503, 639308843, 974457843, 606307926, 903319223, 938782053,\n            380155693, 104528113, 804996289, 884677314, 204516543, 311766229,\n            385443951, 441530611, 117006069, 463570654, 899931313, 476007418,\n            670767710, 72451166, 925853661, 51723313, 500267607, 376482798,\n            1013050623, 859\n          ],\n          \"cutValueData\": [\n            66, 85, 7, -128, 66, -65, 41, -73, 66, -122, 112, 47, 66, -74, 36,\n            64, 66, -84, 77, 15, 66, -65, -71, -6, 66, -99, -112, 65, 66, 113,\n            -44, -1, 66, -110, -99, -125, 66, 85, -42, 37, 66, -127, 37, -41,\n            66, 97, -115, 83, 66, 112, -50, -82, 66, -82, -84, -5, 66, -123,\n            117, 107, 66, -62, 69, -82, 66, -81, -71, 10, 66, 124, 63, -84, 66,\n            -118, -39, 57, 66, -119, -109, 89, 66, -97, 86, 99, 66, -95, 58,\n            -14, 66, -118, -91, 72, 66, -115, -61, 41, 66, -117, -32, -7, 66,\n            -80, 121, 12, 66, -87, -109, 16, 66, -82, 40, -12, 66, 119, 85,\n            -120, 66, -128, 82, 17, 66, -117, -101, -117, 66, 79, 32, 91, 66,\n            -76, -128, -109, 66, -86, 123, 32, 66, 119, -11, -102, 66, -95,\n            -113, -128, 66, 73, 87, 21, 66, -125, 46, -18, 66, -73, -102, -32,\n            66, -76, 70, -79, 66, 114, -88, -101, 66, -69, -50, -65, 66, -80,\n            124, 14, 66, -118, -31, 67, 66, -72, 5, 87, 66, -128, -1, 75, 66,\n            -97, 115, -12, 66, -96, -20, -12, 66, 108, -65, -9, 66, -125, 42,\n            68, 66, -125, 69, -113, 66, -99, -10, -3, 66, -111, 24, -1, 66, -84,\n            -29, 7, 66, 112, -68, 96, 66, -124, 49, 121, 66, -103, 29, 7, 66,\n            97, 106, 9, 66, -83, -33, -92, 66, -101, 121, 98, 66, -93, -37, 37,\n            66, -73, 65, -128, 66, -90, 20, -98, 66, 98, 11, -11, 66, -88, -84,\n            -59, 66, -105, -22, 23, 66, 79, -10, 46, 66, 122, -122, 20, 66, 113,\n            -4, -53, 66, 86, 70, -37, 66, -86, -44, 16, 66, 126, -123, 111, 66,\n            89, 12, 111, 66, 120, -119, -70, 66, -97, -58, 29, 66, 125, -26,\n            115, 66, -97, 114, 106, 66, -82, -29, -8, 66, -102, 10, -26, 66, 75,\n            69, -119, 66, -120, 43, -106, 66, -106, 79, 58, 66, 109, -79, -119,\n            66, 114, -70, -79, 66, -75, -66, -120, 66, -118, 37, 13, 66, -69,\n            -8, 118, 66, 123, 55, 28, 66, -99, 82, -55, 66, -115, 110, 22, 66,\n            -114, 120, 107, 66, -123, -104, 90, 66, -128, 122, 63, 66, 107,\n            -116, 45, 66, -122, -75, -8, 66, -104, 100, -34, 66, -113, 28, -78,\n            66, -91, 56, -39, 66, -116, -4, 122, 66, -90, -22, 104, 66, -109,\n            54, -49, 66, 98, 81, -43, 66, 115, -33, 45, 66, -66, 77, 1, 66, -76,\n            33, 91, 66, -91, -86, 113, 66, -68, -60, 99, 66, 98, 44, 76, 66,\n            -68, -47, 55, 66, -99, -107, -80, 66, 78, -117, 66, 66, 83, -85, 68,\n            66, 122, -56, -100, 66, 114, -28, -80, 66, -84, 116, -113, 66, 119,\n            27, 89, 66, -101, -2, -76, 66, -73, -34, -43, 66, -127, -6, 109, 66,\n            107, -12, -50, 66, -71, -94, -50, 66, -82, 88, -18, 66, -107, -51,\n            110, 66, 85, 98, 120, 66, 92, 49, -115, 66, -82, 77, -39, 66, -82,\n            -6, -94, 66, -109, 69, 7, 66, -121, -104, 27, 66, 93, 27, 99, 66,\n            -67, -101, -53, 66, -128, -70, 30, 66, 122, -53, -86, 66, -82, 83,\n            81, 66, -128, 112, -114, 66, -109, -47, 10, 66, 93, 65, -117, 66,\n            -79, 4, -70, 66, -91, 93, 91, 66, -126, -31, -109, 66, -84, 43, 104,\n            66, 102, -13, 98, 66, -91, -123, 62, 66, -125, 57, 43, 66, -67, -25,\n            82, 66, -77, 16, -16, 66, 87, 10, 33, 66, -98, -76, 91, 66, -86,\n            120, -122, 66, -128, -124, -96, 66, -120, 69, -125, 66, -81, 121,\n            -41, 66, 109, 99, 40, 66, -99, 87, 98, 66, -114, 67, -15, 66, -125,\n            108, 63, 66, -117, 108, -30, 66, -110, 53, -121, 66, -97, -34, 59,\n            66, 122, 12, 86, 66, -86, -63, 53, 66, -125, -76, -96, 66, -121,\n            -124, 62, 66, -61, -11, -86, 66, -87, -43, -62, 66, -113, 72, 64,\n            66, -80, -48, 55, 66, -106, 122, -31, 66, -106, -37, -57, 66, -125,\n            33, -11, 66, -117, 100, -49, 66, -65, -65, 20, 66, -79, -3, 70, 66,\n            -94, 11, 2, 66, -89, -77, -93, 66, -104, 45, -43, 66, -119, -70, 33,\n            66, -87, -69, -111, 66, -115, -41, 36, 66, -127, -100, -88, 66, -88,\n            109, -3, 66, -120, 0, -61, 66, -80, 118, -67, 66, -86, -121, 15, 66,\n            -86, 61, 91, 66, -115, -118, 90, 66, -99, 45, -91, 66, -74, -103,\n            81, 66, -115, -128, -112, 66, 115, -10, 89, 66, -96, 100, -52, 66,\n            -120, -36, -92, 66, -66, 6, 50, 66, -106, 45, 102, 66, -88, -97, 55,\n            66, -107, -69, 73, 66, -84, 68, 100, 66, -85, -13, 73, 66, -105,\n            104, -29, 66, -97, 71, -59, 66, -102, 100, -91, 66, -104, -15, 86,\n            66, -94, 33, 37, 66, -66, -24, 7, 66, -99, -119, 95, 66, -121, 81,\n            -62, 66, 107, -88, 68, 66, 125, -78, -122, 66, -88, -21, -48, 66,\n            -75, 38, 91, 66, -67, -109, -104, 66, 88, 61, -68, 66, -114, 55,\n            -49, 66, 98, -50, -62, 66, 125, 21, 12, 66, -100, -5, -110, 66, 71,\n            110, -51, 66, 109, 39, -48, 66, 73, 11, 74, 66, -118, -42, -58, 66,\n            -125, -43, 47, 66, 106, -5, 77, 66, -76, -98, -73, 66, 126, -18,\n            -31, 66, -112, 55, -15, 66, -105, -36, 26, 66, -76, -126, -8, 66,\n            -93, 9, 124, 66, 88, 18, -10, 66, -98, 56, -84, 66, -114, 94, -7,\n            66, -82, -124, 60, 66, -75, 104, 2, 66, -72, -69, 61, 66, -94, 51,\n            89, 66, -107, 51, -70, 66, -98, -51, -19, 66, -114, -45, -96, 66,\n            -105, -105, -2, 66, -92, -97, 55, 66, -104, -5, 0, 66, -99, 119,\n            106, 66, -92, -104, -80, 66, -107, 31, 88, 66, -114, -82, -108, 66,\n            -79, -12, -87, 66, 117, -31, 2, 66, -93, -119, -67, 66, -123, 37,\n            -32, 66, -92, 1, -46, 66, -82, 102, 67, 66, -71, 31, 17, 66, -76,\n            125, -81, 66, 126, -44, 28, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162261466, 583338509, 1160666783, 715296712, 755531108,\n            1030736492, 1011775184, 1031438498, 755621260, 1141003547,\n            985271819, 725152217, 629676571, 1093\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 760491311, 1119028850, 1145767031, 600972007, 731786966,\n            1026478403, 581327819, 768259210, 1028092378, 1145549536, 729461849,\n            581133973, 597842330, 1123\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -3745362813213582331,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 1071894349, 262392401, 257511083, 98391255, 91742062,\n            850185766, 995206250, 984824490, 511667011, 870751138, 335264801,\n            124823161, 343369397, 442419129, 623434923, 741997686, 708038333,\n            227662061, 44246255, 402625571, 223692217, 225505141, 80866255,\n            914056386, 120429287, 1033366499, 190709614, 733935805, 332623726,\n            1003992757, 1042277686, 1001371058, 68803790, 384415567, 60989145,\n            774999078, 169796962, 224634973, 641390009, 844876479, 1004906978,\n            668584821, 847\n          ],\n          \"cutValueData\": [\n            66, -125, 29, -58, 66, 110, -50, -127, 66, -127, -5, -3, 66, -114,\n            121, 28, 66, -71, 9, 36, 66, 123, -110, -87, 66, -123, 119, 33, 66,\n            -82, -98, 73, 66, 109, 84, 18, 66, -89, 111, 99, 66, -104, -33, -77,\n            66, 127, -46, 103, 66, -115, -93, 6, 66, -108, 98, -17, 66, -73,\n            105, -12, 66, 119, -13, -123, 66, -90, -122, -107, 66, -105, 47,\n            -82, 66, 108, 18, 116, 66, -95, 74, -48, 66, -127, 22, -100, 66,\n            119, -24, 61, 66, 84, -120, -73, 66, -99, 46, -49, 66, -97, 113,\n            -17, 66, 113, -68, 48, 66, -81, 90, 88, 66, 109, -26, 108, 66, -85,\n            -82, 32, 66, -123, -16, 85, 66, -118, -83, 99, 66, 113, 48, 49, 66,\n            -99, 104, 5, 66, -74, 33, -50, 66, -110, 84, 22, 66, -110, 104, 7,\n            66, -88, -38, -40, 66, -104, 30, -101, 66, -103, 125, -76, 66, -128,\n            85, -69, 66, -89, 59, -109, 66, 95, 13, -29, 66, -110, 60, -24, 66,\n            -79, 60, -112, 66, -94, 80, 127, 66, -74, 109, 26, 66, -85, 122,\n            119, 66, -90, -101, -52, 66, 73, -87, -118, 66, -63, 6, -74, 66,\n            -77, -46, -105, 66, -123, -92, -54, 66, 123, 39, 122, 66, 97, -110,\n            -50, 66, -104, 30, -2, 66, 110, 19, 121, 66, -101, -87, 48, 66, 127,\n            -25, 22, 66, -99, 84, 26, 66, -109, -81, 68, 66, -111, 51, 122, 66,\n            -74, -55, -57, 66, 123, 48, -111, 66, -119, 27, 48, 66, -68, -16,\n            122, 66, -107, -16, -61, 66, -99, 35, 54, 66, 69, -68, 116, 66,\n            -101, -101, 46, 66, -106, 87, -86, 66, -122, -95, -78, 66, -108,\n            -77, 95, 66, -106, 13, -99, 66, 119, 69, -52, 66, -95, -80, -19, 66,\n            -86, 4, -61, 66, -120, 44, 109, 66, -82, -53, 38, 66, -88, 95, 94,\n            66, 124, -13, 97, 66, -64, -45, 71, 66, -90, 50, -98, 66, 97, -13,\n            108, 66, 85, -87, -25, 66, -104, -24, 9, 66, -69, -39, -30, 66, -83,\n            34, -31, 66, 91, 119, 54, 66, 111, 47, 43, 66, -120, -48, 89, 66,\n            -69, -105, 23, 66, 117, -36, 21, 66, -120, -74, 5, 66, -124, 38,\n            100, 66, 120, 37, -125, 66, -100, 97, 35, 66, -76, 55, 6, 66, 116,\n            -76, 75, 66, -118, 100, 75, 66, 73, 29, -86, 66, 106, -102, 73, 66,\n            -98, 117, 4, 66, -79, 18, 57, 66, 116, 100, 12, 66, 113, -12, -11,\n            66, -113, -75, 123, 66, -123, 58, -82, 66, -102, 4, -98, 66, -105,\n            68, 40, 66, 125, -7, 40, 66, -69, -70, -18, 66, 101, -88, 29, 66,\n            118, 76, -23, 66, 99, 24, -84, 66, -70, -34, 72, 66, -114, -107,\n            -113, 66, 99, 119, -38, 66, -96, 108, 100, 66, 112, -85, 23, 66,\n            -100, 5, 108, 66, 72, -121, 98, 66, 103, -127, 112, 66, 79, 70, -14,\n            66, -65, -80, -4, 66, -100, -76, 54, 66, -88, -85, -108, 66, -106,\n            -9, -72, 66, -75, 56, -78, 66, -111, -111, 20, 66, -98, 86, 81, 66,\n            -104, -64, -49, 66, -99, 72, -53, 66, -121, 82, 79, 66, -106, 82,\n            -124, 66, -79, -95, -7, 66, -104, -82, -61, 66, -71, 32, -52, 66,\n            -69, -12, -46, 66, -89, -85, 9, 66, 84, -78, 110, 66, -111, 109, 40,\n            66, -95, 127, -64, 66, 84, -43, 81, 66, 119, 103, 38, 66, -114, 36,\n            96, 66, 126, -103, -34, 66, -117, -118, -99, 66, -93, 77, -36, 66,\n            -103, 75, -114, 66, -104, 25, 66, 66, -69, -52, 61, 66, 96, 97, 12,\n            66, -76, -89, 17, 66, -90, 20, -12, 66, -107, 110, 102, 66, -88,\n            -110, -90, 66, -95, 30, -20, 66, -106, 16, 105, 66, -75, 100, 69,\n            66, 114, 31, -68, 66, -80, -24, -46, 66, -93, -113, 116, 66, -102,\n            104, 54, 66, 94, -116, -81, 66, -107, 86, 62, 66, 87, 110, -96, 66,\n            115, 94, -60, 66, -79, 100, 89, 66, -81, -88, 27, 66, -122, -6,\n            -101, 66, 120, 63, 60, 66, -95, 53, 125, 66, -107, -82, 101, 66,\n            -97, 52, -73, 66, -88, -62, -101, 66, -61, -11, 102, 66, -72, -15,\n            -38, 66, -96, 64, -31, 66, 121, -53, -66, 66, 123, 100, -90, 66,\n            127, 106, 1, 66, -110, 102, 72, 66, -98, 58, -104, 66, -82, -103,\n            -51, 66, -84, 28, -64, 66, -86, -37, 91, 66, -109, 43, 113, 66, -93,\n            94, 11, 66, -96, -36, 61, 66, -114, 16, 106, 66, -85, 104, -40, 66,\n            -105, 119, 97, 66, -93, 85, -91, 66, -68, 0, -30, 66, -83, 127, -96,\n            66, -93, 66, -112, 66, -101, -38, -81, 66, 121, 56, 52, 66, -125,\n            98, -118, 66, 126, 88, 72, 66, 89, 114, -88, 66, -74, 78, 60, 66,\n            -126, -52, -22, 66, -112, 22, -32, 66, -89, 46, -73, 66, -93, 91,\n            125, 66, 120, -56, -92, 66, -87, 7, 77, 66, -64, 78, -4, 66, -126,\n            -66, 90, 66, -84, 71, 3, 66, -128, 110, 98, 66, -122, 83, -82, 66,\n            -111, 11, 87, 66, 86, -14, 35, 66, 116, -70, -70, 66, 99, -39, -80,\n            66, -119, 101, -22, 66, 115, 19, -63, 66, -113, 70, -107, 66, -95,\n            -68, 14, 66, -81, 71, 30, 66, -80, -28, -60, 66, 105, -91, -127, 66,\n            -111, -52, 91, 66, -123, 119, 41, 66, -102, -104, 34, 66, -95, 80,\n            61, 66, -124, 99, 62, 66, -73, 106, 30, 66, 105, 112, -57, 66, 109,\n            50, -120, 66, -71, -6, -15, 66, 110, 98, 52, 66, 120, -12, -37, 66,\n            -57, -33, -68, 66, -69, 86, 46, 66, 96, 58, -19, 66, -76, 126, -21,\n            66, -73, 125, -63, 66, -79, -85, 53, 66, -72, 58, 77, 66, -106, 49,\n            121, 66, -125, 61, -59, 66, 122, -121, -65, 66, -110, -99, 45, 66,\n            -115, 92, 56, 66, -116, 109, -37, 66, -121, -107, -78, 66, -97, -39,\n            -38, 66, -107, 11, -41, 66, -108, -54, -41, 66, 119, 51, 56, 66,\n            -93, 87, -87, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162254905, 1160126198, 1146318205, 768218714,\n            970323758, 1099876927, 643867019, 600501721, 774110435, 588217703,\n            1112592472, 638712340, 715239841, 1336\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1162259279, 731733019, 1141513144, 774636602, 984674554,\n            1100001829, 600800612, 595561288, 731085628, 968735182, 638605348,\n            1026153239, 1016558170, 1123\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -5933807794767235558,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 1060563679, 69190726, 718516189, 577461197, 128264663,\n            240233213, 731333706, 439483711, 892984650, 863421774, 715044269,\n            915485741, 341097923, 527756130, 917234755, 917813929, 388605485,\n            1050916429, 99063898, 661487011, 226399306, 530525987, 187151915,\n            510891610, 171042099, 739313630, 731102061, 1019020399, 402126665,\n            876427621, 724770639, 514530557, 790624125, 597932745, 61642213,\n            393570995, 584509945, 856739626, 624420981, 933801047, 756661035,\n            342556667, 1\n          ],\n          \"cutValueData\": [\n            66, -114, -102, 21, 66, -83, -108, -29, 66, 73, 111, 122, 66, -59,\n            25, 14, 66, -86, 1, 97, 66, -82, 30, -59, 66, -112, -48, -84, 66,\n            -117, 120, 31, 66, -114, -27, -77, 66, 86, 59, 35, 66, -127, 26,\n            -100, 66, -80, -11, -113, 66, -110, -17, -13, 66, -94, -8, 40, 66,\n            -73, 5, -66, 66, 104, -44, -19, 66, -112, 3, 107, 66, -125, 120, 27,\n            66, -66, 112, 66, 66, -99, 106, 10, 66, -82, -112, -124, 66, -115,\n            -88, -31, 66, 93, -65, 94, 66, -66, 85, 1, 66, 76, -82, -120, 66,\n            -118, 124, -23, 66, 113, -57, 87, 66, -69, -76, -126, 66, 118, -105,\n            35, 66, 74, -35, -21, 66, -63, -95, 127, 66, -92, 84, -76, 66, -103,\n            -110, -94, 66, 82, -23, 28, 66, -101, 118, -60, 66, -106, 96, -9,\n            66, -115, 92, 95, 66, 100, -29, 18, 66, 71, -3, 24, 66, -77, 76,\n            -68, 66, -117, 60, 49, 66, 107, 73, 105, 66, 86, -2, -94, 66, -100,\n            -102, -7, 66, 82, -33, 52, 66, -78, 123, -106, 66, 118, -20, 27, 66,\n            -95, -84, 108, 66, -112, -23, -40, 66, 121, -98, 53, 66, -98, 106,\n            37, 66, 118, -104, 19, 66, -91, 101, 76, 66, -73, 127, -64, 66, -88,\n            -24, -81, 66, -108, 67, 4, 66, -79, -16, -46, 66, -83, 30, -84, 66,\n            108, -41, 95, 66, -78, 38, 55, 66, -120, 49, 125, 66, -97, 53, 48,\n            66, -122, 62, 26, 66, 125, -49, 38, 66, 122, -24, -83, 66, -98, 1,\n            -3, 66, -119, 74, -95, 66, -65, 88, -49, 66, 75, -117, 77, 66, -95,\n            98, -14, 66, -108, 0, -39, 66, -118, 87, 73, 66, 118, -71, 93, 66,\n            69, -51, 21, 66, -95, -47, -69, 66, -103, 108, 20, 66, -92, -120,\n            -67, 66, -112, 121, 110, 66, 96, 120, 64, 66, 116, 121, -87, 66,\n            -100, -27, -107, 66, -75, -72, -40, 66, -122, -30, 59, 66, -106,\n            100, 22, 66, -113, 116, -82, 66, -110, 69, -14, 66, -80, -11, 40,\n            66, -100, -35, -111, 66, -75, 52, 91, 66, -106, -60, 37, 66, 105,\n            49, 52, 66, -117, 16, -36, 66, -59, -100, -14, 66, -125, -115, -30,\n            66, -121, 111, -52, 66, -115, 90, 57, 66, -98, 72, -72, 66, 119, 98,\n            32, 66, 110, -105, 22, 66, -96, 98, 123, 66, -102, 116, 14, 66, 118,\n            100, -105, 66, -122, -26, 42, 66, -117, -74, 84, 66, -124, -32,\n            -126, 66, -65, -95, -28, 66, -99, -71, 73, 66, 114, -124, 100, 66,\n            -113, 69, 16, 66, -110, -70, 24, 66, -127, 89, 38, 66, -114, -2, 19,\n            66, 118, -39, 28, 66, -108, -18, 60, 66, 91, -68, -115, 66, -107,\n            48, -123, 66, -102, -111, 40, 66, -94, 112, -89, 66, -77, 21, 44,\n            66, -128, 115, -37, 66, -122, -45, 64, 66, -102, -56, -36, 66, -123,\n            20, 79, 66, -103, 76, -90, 66, -81, 75, -2, 66, 97, -115, 106, 66,\n            -104, 80, 114, 66, -107, -86, 76, 66, -81, 112, -42, 66, -89, 80,\n            -60, 66, -77, 63, -50, 66, 70, -106, -67, 66, -113, 12, -43, 66,\n            -87, 16, 110, 66, 115, 60, -15, 66, -120, 78, 8, 66, 88, -121, 32,\n            66, -99, -58, -35, 66, -123, 62, 110, 66, 120, 71, -84, 66, -87,\n            -45, 90, 66, 100, 14, 122, 66, -113, 121, -39, 66, -98, 93, 80, 66,\n            -118, 70, -48, 66, -100, -73, -31, 66, 97, 1, 125, 66, -94, 31, 15,\n            66, -67, -98, -16, 66, -111, -15, 126, 66, -80, -52, 103, 66, 97,\n            -86, 81, 66, -74, 22, -49, 66, -127, -28, 39, 66, -121, -92, 55, 66,\n            -72, -2, 27, 66, -91, 96, -36, 66, -78, 50, 88, 66, -79, -34, -97,\n            66, -125, -49, 123, 66, -107, 20, 19, 66, 127, 34, -116, 66, 112,\n            85, 73, 66, -103, 36, -110, 66, -110, 57, 62, 66, -122, -92, -71,\n            66, -79, -75, 121, 66, -75, -73, 60, 66, 87, 60, 105, 66, -100, 46,\n            25, 66, 109, -117, -44, 66, -115, -105, 82, 66, 122, -112, 2, 66,\n            84, -111, -7, 66, 91, 14, 55, 66, -95, -108, -77, 66, 111, -89, 49,\n            66, -91, 34, 11, 66, -77, 57, 55, 66, -120, -31, 76, 66, 123, -27,\n            -70, 66, -123, -38, -34, 66, -125, -12, -115, 66, -123, -14, 9, 66,\n            -76, 75, 15, 66, -124, 50, 73, 66, 87, -83, -62, 66, -110, 55, 100,\n            66, 125, -84, 36, 66, -110, 48, -73, 66, 101, -33, 49, 66, 105, 28,\n            19, 66, -107, 100, 19, 66, -121, 55, -109, 66, -86, -115, 84, 66,\n            -104, 67, 24, 66, 118, -3, 67, 66, -91, 73, 121, 66, -123, -13, 91,\n            66, -128, 77, 6, 66, 102, -51, -37, 66, -123, -98, -113, 66, 117,\n            68, -76, 66, -109, 19, -46, 66, -91, 80, -47, 66, 95, 42, 30, 66,\n            86, -35, 90, 66, 125, 93, 71, 66, -76, -51, 20, 66, -65, -36, -69,\n            66, -114, -1, 58, 66, 86, -24, -87, 66, -128, 100, -78, 66, -128,\n            -104, 62, 66, -99, 76, 85, 66, -118, -89, 32, 66, -91, -44, -121,\n            66, -67, 20, -61, 66, -98, -92, 111, 66, -116, 115, -74, 66, -123,\n            -97, 38, 66, 126, -99, -115, 66, -128, -77, -27, 66, 120, -73, 94,\n            66, -107, 80, -22, 66, -66, 34, 93, 66, -127, -78, 83, 66, 103, -4,\n            -125, 66, 93, -15, -110, 66, -94, -43, -74, 66, -125, 89, 97, 66,\n            -124, 26, -3, 66, 80, -64, -107, 66, 102, -108, 43, 66, -96, -115,\n            -63, 66, -75, -85, 103, 66, 115, 39, -124, 66, 91, 89, -42, 66, -91,\n            50, -66, 66, -122, 43, 60, 66, -63, 88, -69, 66, -106, 108, 77, 66,\n            -90, -63, 120, 66, -113, 116, 0, 66, 113, 66, 60, 66, -125, -55,\n            -37, 66, -105, -128, 112, 66, 113, 52, 45, 66, -115, 114, 72, 66,\n            125, 29, -110, 66, -91, 61, 17, 66, -109, -127, -117, 66, -100,\n            -111, 30, 0, 0, 0, 0, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 253,\n          \"leftIndex\": [\n            -1, 1, 255, 1119194819, 1160645003, 1162241755, 1118427389,\n            640740667, 969169022, 582754243, 588121196, 987860569, 582961333,\n            968638810, 624243065, 988276516, 394\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1157478227, 1160660555, 759938759, 968738200, 628989106,\n            990013019, 712070797, 588296401, 983668091, 729658975, 767675542,\n            1097927590, 597080605, 368\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 1,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 1,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -2736961492578362267,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 1013443169, 921676590, 129582783, 245296757, 341366730,\n            753698021, 230382562, 353726331, 374681205, 526874929, 871585749,\n            1026469745, 786607786, 1009817027, 91003690, 979301591, 187745486,\n            439143994, 180000978, 749375469, 880502454, 359921235, 191974743,\n            253057958, 916515141, 746270301, 753583687, 984447703, 602649694,\n            880101359, 574072255, 765684939, 234039721, 586603589, 392416711,\n            668261841, 494078249, 719006946, 393916235, 110694101, 757009206,\n            61583357, 311\n          ],\n          \"cutValueData\": [\n            66, -89, 4, -101, 66, -112, 101, 72, 66, -73, 46, 28, 66, -118, 16,\n            -79, 66, -121, -123, -7, 66, 108, 116, 25, 66, -99, -98, 65, 66,\n            113, 36, -93, 66, -108, 55, -4, 66, -93, 44, -62, 66, 124, 83, 126,\n            66, -96, 120, -69, 66, -122, 93, 36, 66, -86, 30, 13, 66, -93, 112,\n            77, 66, -113, 115, -45, 66, -121, -94, 50, 66, -97, -36, -88, 66,\n            -124, 85, -83, 66, -71, -104, -14, 66, -122, 126, 38, 66, -121, 62,\n            117, 66, -112, 95, 8, 66, 120, 79, 107, 66, -112, 58, -119, 66, -82,\n            -19, 80, 66, -119, 75, 102, 66, -126, -112, -40, 66, -127, 70, -19,\n            66, 115, -26, -7, 66, -116, 88, 40, 66, 97, 125, 58, 66, 99, 116,\n            -122, 66, -111, 58, 81, 66, -74, 68, -26, 66, -99, -30, -68, 66,\n            -86, 34, -53, 66, -115, 34, -35, 66, -107, 13, -50, 66, -123, 68,\n            13, 66, 115, -29, 86, 66, -77, -76, 78, 66, -89, -120, 9, 66, -95,\n            13, 102, 66, 127, 19, -14, 66, -124, -80, 99, 66, -68, 20, -56, 66,\n            -70, 91, -114, 66, -80, -50, -76, 66, -104, 109, 99, 66, -113, -96,\n            66, 66, -104, 27, -74, 66, -94, 29, 13, 66, -90, -116, 51, 66, 87,\n            -26, 92, 66, -102, 13, 2, 66, -59, -83, 10, 66, 81, 83, 60, 66, 100,\n            -90, -102, 66, -86, 36, 63, 66, -73, 84, 78, 66, -82, -120, -22, 66,\n            84, 79, -61, 66, -84, -31, -49, 66, -95, 123, -41, 66, -87, -27, 68,\n            66, -88, -61, 122, 66, 104, 89, 118, 66, -117, -128, -93, 66, -79,\n            52, -51, 66, -73, 112, 38, 66, 93, 122, 77, 66, -111, 80, 103, 66,\n            -99, -102, 80, 66, -121, 44, -11, 66, -104, 3, -100, 66, -90, -85,\n            123, 66, -124, -4, -15, 66, -68, -86, 24, 66, -87, 108, -31, 66,\n            -106, 124, -61, 66, -85, -6, -78, 66, -72, -33, -89, 66, -86, -85,\n            -75, 66, -120, -41, 76, 66, -124, -83, -79, 66, -89, -12, 108, 66,\n            -67, -118, 123, 66, -122, 50, -96, 66, -96, -117, 83, 66, -113, 75,\n            123, 66, 115, -17, -61, 66, 116, 29, -85, 66, -98, -67, -124, 66,\n            115, -84, -12, 66, 94, 5, 85, 66, 116, -79, -34, 66, 102, 11, 27,\n            66, -118, 115, 2, 66, -120, -5, 17, 66, 82, -63, 59, 66, -78, -39,\n            89, 66, -107, -28, 60, 66, -127, -17, -16, 66, 116, 33, -47, 66,\n            -100, -51, -104, 66, 107, -68, 125, 66, -114, 45, 21, 66, -71, 122,\n            1, 66, 118, -64, 16, 66, -69, -120, 47, 66, -127, 125, -25, 66, -96,\n            -16, 26, 66, -61, 108, 113, 66, -58, -84, 13, 66, 101, 84, -9, 66,\n            -126, -69, -78, 66, 68, -29, 25, 66, -74, 24, -90, 66, 88, -72, 48,\n            66, -102, 64, -8, 66, 96, 107, 105, 66, 76, -50, 75, 66, -104, -98,\n            109, 66, 103, -81, -34, 66, 76, -33, 85, 66, -123, -81, 66, 66, -70,\n            85, -19, 66, -73, 39, 87, 66, -77, -36, -91, 66, 100, 14, -5, 66,\n            111, 54, 30, 66, -94, -67, -105, 66, 119, -34, 18, 66, 94, 44, -53,\n            66, 88, 78, 110, 66, -94, 58, 96, 66, -72, 27, -111, 66, -112, 6,\n            -69, 66, -111, -108, 16, 66, 95, 32, -56, 66, 114, -47, 115, 66,\n            -121, -114, -89, 66, 102, -56, 119, 66, -93, 7, 98, 66, -94, -115,\n            -119, 66, -84, -55, 47, 66, 83, -40, 87, 66, 80, -123, -37, 66, -72,\n            89, -25, 66, -83, 53, 54, 66, -101, 90, 3, 66, 116, 96, -69, 66,\n            -82, 16, -120, 66, -93, -64, 110, 66, -122, 56, 85, 66, -96, 100,\n            98, 66, -78, 84, 102, 66, -68, -83, -73, 66, 93, -59, -99, 66, -95,\n            14, -44, 66, -112, 110, 78, 66, -119, -45, 0, 66, -94, -45, -96, 66,\n            -119, -102, -113, 66, -94, -99, -10, 66, 86, -115, 115, 66, -79,\n            -115, -128, 66, 96, 12, -51, 66, 112, 56, 13, 66, -99, 80, -125, 66,\n            -109, 91, -48, 66, -78, 17, 25, 66, 109, -87, -20, 66, 91, -123,\n            -128, 66, 85, -104, 125, 66, -92, -75, -67, 66, 72, -60, -7, 66,\n            -91, -65, 57, 66, -98, 87, -54, 66, -118, 125, 15, 66, -102, 3, 81,\n            66, -86, 30, -34, 66, -89, 49, 35, 66, -71, 57, 28, 66, 127, 120,\n            121, 66, -85, 5, 43, 66, -100, 46, 72, 66, -87, 98, 49, 66, -86,\n            106, -4, 66, -99, 119, 114, 66, -108, -114, 66, 66, -127, -88, -27,\n            66, 78, -21, -90, 66, -84, 78, 106, 66, -77, -99, -5, 66, -93, -85,\n            -25, 66, 124, -118, 126, 66, -128, -126, -122, 66, 121, 6, -60, 66,\n            72, 83, -68, 66, 116, 27, 69, 66, -74, 94, -128, 66, -79, -87, 114,\n            66, -96, 29, 112, 66, -85, 48, -62, 66, 86, 5, -122, 66, -109, 34,\n            45, 66, -105, 105, 127, 66, 121, -92, -106, 66, -125, 84, 123, 66,\n            -128, 55, 87, 66, -120, -44, -72, 66, -105, -126, -21, 66, -97, -85,\n            -108, 66, 118, 120, 92, 66, 102, 46, -66, 66, -68, 86, -108, 66,\n            -123, -21, -82, 66, -62, -122, -84, 66, -126, 100, -50, 66, -79,\n            -22, -117, 66, 100, 14, -60, 66, 86, -29, -32, 66, 104, 14, 31, 66,\n            -96, 78, -105, 66, -122, -25, 29, 66, -95, 112, 89, 66, -99, 68, 48,\n            66, -122, 20, 74, 66, -97, 67, 75, 66, 89, -111, -36, 66, -122, -80,\n            -114, 66, -99, 121, -101, 66, -61, -103, 30, 66, -67, -6, -81, 66,\n            110, -45, -29, 66, -116, 92, 10, 66, 75, -83, 102, 66, -117, 96,\n            121, 66, -111, 0, 59, 66, -108, 6, 106, 66, 84, 37, 100, 66, -101,\n            -61, -10, 66, -125, 120, 96, 66, -112, 70, 85, 66, 90, 36, -41, 66,\n            123, 29, 29, 66, -122, -28, 35, 66, -108, -63, 86, 66, -105, -37,\n            15, 66, -93, 63, -47, 66, -107, -79, 93, 66, -104, 88, 29, 0, 0, 0,\n            0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 1162261466, 1157419448, 1117082416, 725160833,\n            1097779625, 587573663, 983103602, 710823019, 753384229, 1116832748,\n            985045928, 595683841, 581153359, 1177\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1155707027, 1157417261, 755170369, 1160469332,\n            715080437, 625955849, 600466756, 970680173, 1145606612, 586117202,\n            597096914, 600816650, 968570995, 1094\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": 139913034750054867,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      },\n      {\n        \"version\": \"2.0\",\n        \"root\": 0,\n        \"maxSize\": 256,\n        \"outputAfter\": 32,\n        \"storeSequenceIndexesEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"nodeStoreState\": {\n          \"version\": \"2.0\",\n          \"capacity\": 255,\n          \"compressed\": true,\n          \"cutDimension\": [\n            0, 31, 255, 173344421, 926922319, 125094975, 104849077, 996340926,\n            128308961, 716421494, 895929981, 330169982, 45303595, 854808011,\n            120186078, 1042148470, 1052286139, 1072392027, 212793033, 926587978,\n            936551517, 771080187, 922220089, 711932081, 337168875, 204823778,\n            626301163, 668526305, 631039174, 222092901, 1017769562, 366822986,\n            447699281, 221028266, 882048966, 780140150, 707091531, 658438330,\n            925759037, 840869161, 614251182, 502513462, 327059393, 1073313478,\n            354489394, 559\n          ],\n          \"cutValueData\": [\n            66, 99, -36, -127, 66, 81, 109, 108, 66, -106, -119, -66, 66, -124,\n            62, -90, 66, -66, -72, -62, 66, -128, -50, -52, 66, 110, 65, -25,\n            66, 85, -21, 76, 66, -110, -88, -65, 66, -97, -33, 99, 66, -102, 9,\n            -78, 66, -106, -11, -93, 66, -75, 36, -85, 66, -125, 26, 30, 66,\n            -90, 65, -17, 66, -101, -101, 23, 66, -115, -45, -97, 66, -121, 71,\n            96, 66, 127, -74, 124, 66, 101, -92, -36, 66, -110, -116, 122, 66,\n            -67, 127, 57, 66, -85, 47, 35, 66, -124, 27, 37, 66, 102, -70, -73,\n            66, 72, 86, -98, 66, -102, 26, -128, 66, 83, -112, 22, 66, -126,\n            -93, 45, 66, -104, 59, -104, 66, 104, -84, 113, 66, -110, -38, -81,\n            66, -104, -117, -40, 66, -123, -61, -128, 66, -108, -59, 66, 66,\n            -99, -21, 125, 66, 84, 59, 85, 66, -78, -21, -47, 66, -109, -36, 93,\n            66, -112, -45, -19, 66, -73, -90, 106, 66, -99, 90, -63, 66, 126,\n            -81, 57, 66, 110, -19, 39, 66, -98, -20, 40, 66, -84, 33, 13, 66,\n            -123, -22, 30, 66, -108, -48, -20, 66, -68, -106, 41, 66, -114, 10,\n            118, 66, 125, 60, -101, 66, 97, -43, 25, 66, -125, 28, -33, 66, -87,\n            88, 51, 66, -127, -79, -38, 66, -86, 48, 6, 66, -98, 20, 59, 66,\n            -64, 49, 72, 66, -104, -91, -93, 66, -89, -5, 74, 66, 86, -13, 18,\n            66, -121, 58, 99, 66, -94, -31, 105, 66, -79, -68, 49, 66, -92, -14,\n            -81, 66, -97, -28, 65, 66, -123, 103, -66, 66, -102, 116, 23, 66,\n            -71, -120, -66, 66, 69, 30, 78, 66, -93, -50, -124, 66, -98, 5, 7,\n            66, -70, 78, -24, 66, 113, 67, -46, 66, -89, -23, -32, 66, 121, 69,\n            16, 66, -117, -108, -42, 66, -64, -53, 12, 66, 117, 10, 53, 66, -82,\n            102, 47, 66, -96, 18, -45, 66, -108, -32, 54, 66, -126, -80, -31,\n            66, -93, 1, 90, 66, -103, 76, -107, 66, -106, 25, 119, 66, -125,\n            -71, 95, 66, -62, 53, -29, 66, -98, -17, 0, 66, -119, 55, 97, 66,\n            -116, 89, -124, 66, 77, -106, -67, 66, -114, -61, -23, 66, 79, 54,\n            -48, 66, -82, 15, -123, 66, -104, 61, -74, 66, 98, 1, -22, 66, -93,\n            -64, -33, 66, -102, 45, 47, 66, -119, -117, -12, 66, -76, 82, 26,\n            66, -78, 16, 110, 66, -94, 25, -6, 66, -96, 63, -32, 66, 109, 23,\n            -105, 66, 83, 76, 31, 66, -69, -47, 55, 66, -110, 26, -90, 66, -104,\n            11, -54, 66, -80, 117, 123, 66, 118, 46, -112, 66, -124, 53, -21,\n            66, 96, 33, 50, 66, -91, 42, -27, 66, -90, -41, 59, 66, -107, 16,\n            85, 66, -74, -80, 111, 66, -108, 42, -88, 66, -64, -107, -82, 66,\n            -89, 12, 28, 66, -69, 81, 90, 66, -109, 76, -102, 66, -75, 62, -59,\n            66, 110, 47, -59, 66, -76, -99, -86, 66, -101, 20, -98, 66, -114,\n            -92, -73, 66, 105, -93, 6, 66, -106, -41, -84, 66, -68, -107, 98,\n            66, -65, -33, 15, 66, -67, 78, 16, 66, -96, 114, -12, 66, -84, 27,\n            -115, 66, -115, 3, 115, 66, -105, 104, -1, 66, -87, 74, 30, 66, -87,\n            -47, -33, 66, -124, 117, 113, 66, -99, -88, -32, 66, -119, -113,\n            -95, 66, -93, -88, 25, 66, 120, 69, -40, 66, -125, 121, 8, 66, -91,\n            120, 18, 66, -93, -50, 108, 66, 124, -11, 52, 66, -97, -37, -115,\n            66, -101, -107, 109, 66, -122, -49, 114, 66, -75, 98, 50, 66, -105,\n            40, 18, 66, 106, -31, -115, 66, 81, 98, -52, 66, -128, -42, 123, 66,\n            -79, -126, -16, 66, -96, 98, 51, 66, -123, -105, 7, 66, 86, -80, 30,\n            66, -95, -9, -97, 66, -82, 105, -101, 66, -125, -113, 46, 66, 108,\n            76, -61, 66, -97, -22, -21, 66, -62, -114, -113, 66, -117, -19, -6,\n            66, 104, 27, -116, 66, -113, 43, 64, 66, 116, 107, 15, 66, -94, 108,\n            -110, 66, -59, -50, -107, 66, 84, 13, 86, 66, 80, -114, 5, 66, -107,\n            28, 101, 66, 96, 120, -27, 66, -94, -26, 23, 66, -68, -122, -93, 66,\n            -83, -93, -117, 66, -86, 9, 61, 66, -82, -73, -20, 66, -73, 52, -29,\n            66, 119, 92, -3, 66, -124, -73, 29, 66, 105, 91, 88, 66, -69, 19,\n            88, 66, -93, 66, -43, 66, 104, -11, 29, 66, -78, -110, 115, 66, -85,\n            26, 104, 66, -86, -96, 84, 66, -115, 105, 116, 66, -61, 68, -22, 66,\n            102, 9, 123, 66, 86, 86, -73, 66, 121, -36, 85, 66, -101, 126, 51,\n            66, 127, -26, 16, 66, -83, -95, -123, 66, -91, -49, 80, 66, -101,\n            -104, 7, 66, -109, -58, -37, 66, -99, -74, -58, 66, -86, 41, 75, 66,\n            93, 47, -33, 66, 126, -126, 96, 66, -78, -120, 57, 66, -111, -106,\n            -81, 66, 113, 53, 87, 66, -83, -15, -87, 66, -121, -15, -11, 66,\n            -122, 46, 66, 66, -120, -48, 72, 66, -107, -53, -23, 66, 115, 77,\n            24, 66, -112, -47, 105, 66, 125, -23, -56, 66, -103, -23, -92, 66,\n            -106, -59, 126, 66, -92, 78, -12, 66, -106, 112, -96, 66, 117, -41,\n            33, 66, -128, -9, 4, 66, 87, -65, 70, 66, 71, 46, 7, 66, -92, 59,\n            105, 66, -98, -66, -104, 66, 75, 12, -77, 66, -124, 78, 23, 66, -90,\n            -109, -54, 66, 106, -31, -12, 66, -97, 122, -79, 66, -99, -125, -77,\n            66, -110, -34, -123, 66, -87, 32, 49, 66, 120, -71, -77, 66, -126,\n            124, -18, 66, -95, 79, 104, 66, -103, 50, 88, 66, -119, -33, 18, 66,\n            114, -13, -120, 66, -104, -69, 105, 66, -81, -121, 58, 66, -91, 50,\n            -88, 66, 116, -102, -103, 66, -107, 50, 69, 66, -94, 42, -57, 66,\n            -110, -67, -27, 66, 98, 1, 122, 66, -115, 5, -4, 66, 111, 16, 83,\n            66, -96, -86, -95, 66, -107, 111, -81, 66, 107, -111, -55, 66, -68,\n            -95, 55, 0, 0, 0, 0\n          ],\n          \"precision\": \"FLOAT_32\",\n          \"root\": 0,\n          \"canonicalAndNotALeaf\": true,\n          \"size\": 254,\n          \"leftIndex\": [\n            -1, 1, 255, 774838787, 974123738, 726990791, 984753580, 1033101512,\n            768198292, 710333195, 1027541941, 970154305, 629494405, 970230928,\n            581724166, 582964438, 1201\n          ],\n          \"rightIndex\": [\n            -1, 1, 255, 1032589862, 644106491, 774755684, 1142951710,\n            1155705935, 1159936414, 1099543865, 581190029, 630616823, 624356896,\n            582784870, 726805696, 595718023, 1102\n          ],\n          \"nodeFreeIndexes\": [],\n          \"nodeFreeIndexPointer\": 0,\n          \"leafFreeIndexes\": [],\n          \"leafFreeIndexPointer\": 0,\n          \"partialTreeStateEnabled\": true\n        },\n        \"boundingBoxCacheFraction\": 0.0,\n        \"partialTreeState\": true,\n        \"seed\": -950767441100490523,\n        \"id\": 0,\n        \"dimensions\": 32,\n        \"staticSeed\": 0,\n        \"weight\": 0.0,\n        \"hasAuxiliaryData\": false\n      }\n    ],\n    \"executionContext\": {\n      \"parallelExecutionEnabled\": false,\n      \"threadPoolSize\": 0\n    },\n    \"saveTreeStateEnabled\": true,\n    \"saveSamplerStateEnabled\": true,\n    \"saveCoordinatorStateEnabled\": true\n  },\n  \"thresholderState\": {\n    \"randomseed\": 0,\n    \"inAnomaly\": false,\n    \"elasticity\": 0.01,\n    \"attributionEnabled\": false,\n    \"count\": 1225,\n    \"minimumScores\": 10,\n    \"primaryDeviationState\": {\n      \"discount\": 0.0050000000000000044,\n      \"weight\": 199.41879980792027,\n      \"sumSquared\": 135.95690716058067,\n      \"sum\": 163.721886587894,\n      \"count\": 1225\n    },\n    \"secondaryDeviationState\": {\n      \"discount\": 0.0050000000000000044,\n      \"weight\": 199.41879980792027,\n      \"sumSquared\": 135.61793917480634,\n      \"sum\": 163.54696035290627,\n      \"count\": 1225\n    },\n    \"thresholdDeviationState\": {\n      \"discount\": 0.0025000000000000022,\n      \"weight\": 374.76476562578705,\n      \"sumSquared\": 15.531414684382712,\n      \"sum\": 15.531414684382712,\n      \"count\": 1225\n    },\n    \"upperThreshold\": 2.0,\n    \"lowerThreshold\": 1.0,\n    \"absoluteThreshold\": 1.0,\n    \"autoThreshold\": false,\n    \"initialThreshold\": 1.5,\n    \"zFactor\": 2.5,\n    \"upperZfactor\": 5.0,\n    \"absoluteScoreFraction\": 0.5,\n    \"horizon\": 0.5\n  },\n  \"preprocessorStates\": [\n    {\n      \"version\": \"2.1\",\n      \"useImputedFraction\": 0.5,\n      \"imputationMethod\": \"PREVIOUS\",\n      \"forestMode\": \"STANDARD\",\n      \"transformMethod\": \"NONE\",\n      \"weights\": [\n        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,\n        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,\n        1.0, 1.0, 1.0, 1.0, 1.0\n      ],\n      \"lastShingledPoint\": [\n        0.0, 79.5, 83.0, 76.0, 0.0, 94.0, 94.0, 94.0, 0.0, 85.0, 85.0, 85.0,\n        0.0, 53.0, 53.0, 53.0, 0.0, 53.0, 53.0, 53.0, 0.0, 97.0, 97.0, 97.0,\n        0.0, 66.0, 66.0, 66.0, 0.0, 53.0, 53.0, 53.0\n      ],\n      \"lastShingledInput\": [\n        0.0, 79.5, 83.0, 76.0, 0.0, 94.0, 94.0, 94.0, 0.0, 85.0, 85.0, 85.0,\n        0.0, 53.0, 53.0, 53.0, 0.0, 53.0, 53.0, 53.0, 0.0, 97.0, 97.0, 97.0,\n        0.0, 66.0, 66.0, 66.0, 0.0, 53.0, 53.0, 53.0\n      ],\n      \"timeDecay\": 0.0,\n      \"startNormalization\": 10,\n      \"stopNormalization\": 2147483647,\n      \"shingleSize\": 8,\n      \"dimensions\": 32,\n      \"inputLength\": 32,\n      \"clipFactor\": 10.0,\n      \"normalizeTime\": false,\n      \"previousTimeStamps\": [0, 0, 0, 0, 0, 0, 0, 0],\n      \"valuesSeen\": 1257,\n      \"internalTimeStamp\": 1257,\n      \"dataQualityState\": {\n        \"discount\": 1.0e-4,\n        \"weight\": 629.4992050874407,\n        \"sumSquared\": 629.4992050874407,\n        \"sum\": 629.4992050874407,\n        \"count\": 1257\n      },\n      \"timeStampDeviationState\": {\n        \"discount\": 1.0e-4,\n        \"weight\": 629.4992050874407,\n        \"sumSquared\": 0.0,\n        \"sum\": 0.0,\n        \"count\": 1257\n      }\n    }\n  ],\n  \"ignoreSimilarFactor\": 0.3,\n  \"triggerFactor\": 3.5,\n  \"lastAnomalyTimeStamp\": 1216,\n  \"lastAnomalyScore\": 1.0341965562255886,\n  \"lastAnomalyAttribution\": {\n    \"high\": [\n      0.0, 0.02556739560898381, 0.040413161124477896, 0.014998565390821401, 0.0,\n      0.0, 0.0, 0.0, 0.0, 0.001540388397430486, 0.001540388397430486,\n      0.001540388397430486, 0.0, 0.0010928893489314821, 0.0010928893489314821,\n      0.0010928893489314821, 0.0, 3.4845364418932664e-4, 3.4845364418932664e-4,\n      3.4845364418932664e-4, 0.0, 0.027020193090515413, 0.025163469786736423,\n      0.03163984692871691, 0.0, 0.020971108223835824, 0.018605444907579965,\n      0.023485239185523532, 0.0, 0.03325596329975221, 0.027706520161384685,\n      0.03947519320649786\n    ],\n    \"low\": [\n      0.0, 0.010918632411441403, 0.006208194892003597, 0.01901386776274787, 0.0,\n      0.059629428026460686, 0.06324446015591077, 0.051947604761362835, 0.0,\n      0.05282156107385229, 0.055397573203406596, 0.047853434780376417, 0.0,\n      0.03752281674104219, 0.03939725735101295, 0.034211600213650495, 0.0,\n      0.060902128997274485, 0.061830771305554276, 0.05807699919735293, 0.0,\n      0.008181304113898433, 0.010937794354236228, 0.006927164429921987, 0.0,\n      0.001092991610111269, 0.0016351195525133246, 9.267826113141719e-4, 0.0,\n      0.002423995798281548, 0.003423781997100454, 0.002423995798281548\n    ]\n  },\n  \"lastScore\": 0.0,\n  \"lastAnomalyPoint\": [\n    0.0, 73.0, 78.0, 68.0, 0.0, 50.0, 50.0, 50.0, 0.0, 52.0, 52.0, 52.0, 0.0,\n    52.0, 52.0, 52.0, 0.0, 50.0, 50.0, 50.0, 0.0, 84.0, 84.0, 84.0, 0.0, 95.0,\n    95.0, 95.0, 0.0, 94.0, 94.0, 94.0\n  ],\n  \"lastExpectedPoint\": [\n    0.0, 73.0, 78.0, 68.0, 0.0, 50.0, 50.0, 50.0, 0.0, 52.0, 52.0, 52.0, 0.0,\n    52.0, 52.0, 52.0, 0.0, 73.0, 73.0, 73.0, 0.0, 84.0, 84.0, 84.0, 0.0, 95.0,\n    95.0, 95.0, 0.0, 94.0, 94.0, 94.0\n  ],\n  \"previousIsPotentialAnomaly\": false,\n  \"inHighScoreRegion\": false,\n  \"ignoreSimilar\": false,\n  \"numberOfAttributors\": 5,\n  \"randomSeed\": 0,\n  \"forestMode\": \"STANDARD\",\n  \"transformMethod\": \"NONE\",\n  \"lastRelativeIndex\": 0,\n  \"lastReset\": 0\n}\n"
  },
  {
    "path": "Java/parkservices/src/test/resources/com/amazon/randomcutforest/parkservices/state/state_2.json",
    "content": "{\n    \"version\": \"2.1\",\n    \"forestState\": {\n        \"version\": \"2.0\",\n        \"totalUpdates\": 505,\n        \"timeDecay\": 1.0E-4,\n        \"numberOfTrees\": 30,\n        \"sampleSize\": 256,\n        \"shingleSize\": 8,\n        \"dimensions\": 32,\n        \"outputAfter\": 32,\n        \"compressed\": true,\n        \"partialTreeState\": true,\n        \"boundingBoxCacheFraction\": 0.0,\n        \"storeSequenceIndexesEnabled\": false,\n        \"compact\": true,\n        \"internalShinglingEnabled\": false,\n        \"centerOfMassEnabled\": false,\n        \"precision\": \"FLOAT_32\",\n        \"pointStoreState\": {\n            \"version\": \"2.0\",\n            \"dimensions\": 32,\n            \"capacity\": 7681,\n            \"shingleSize\": 8,\n            \"precision\": \"FLOAT_32\",\n            \"startOfFreeSegment\": 2048,\n            \"pointData\": [\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                64,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                69,\n                -47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -119,\n                23,\n                70,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -123,\n                85,\n                85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -52,\n                -51,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                93,\n                23,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                28,\n                114,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                113,\n                -57,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -114,\n                57,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -103,\n                -102,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                69,\n                -47,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -64,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                51,\n                51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                102,\n                102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                113,\n                -57,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                0,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                -103,\n                -102,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                69,\n                -47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                51,\n                51,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                -120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                69,\n                -47,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                102,\n                102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                -122,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -94,\n                -23,\n                66,\n                -56,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                23,\n                70,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -128,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -103,\n                -102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -114,\n                57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                113,\n                -57,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                23,\n                70,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                -128,\n                0,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -29,\n                -114,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -128,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                113,\n                -57,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -127,\n                102,\n                102,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -94,\n                -23,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -114,\n                57,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                -86,\n                -85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                113,\n                -57,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -57,\n                28,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                28,\n                114,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                56,\n                -28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -117,\n                -93,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                67,\n                -5,\n                -26,\n                102,\n                69,\n                -121,\n                -72,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -119,\n                102,\n                102,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                -103,\n                -102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -24,\n                -70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -127,\n                -86,\n                -85,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                23,\n                70,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -86,\n                -85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -29,\n                -114,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -94,\n                -23,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                -103,\n                -102,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -86,\n                -85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                -114,\n                57,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                113,\n                -57,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                46,\n                -116,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -57,\n                28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                116,\n                93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                113,\n                -57,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -24,\n                -70,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                64,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                64,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -91,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                69,\n                -47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -29,\n                -114,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                120,\n                102,\n                102,\n                66,\n                -104,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -47,\n                116,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -24,\n                -70,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                66,\n                -86,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                93,\n                23,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                46,\n                -116,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                -29,\n                -114,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                42,\n                -85,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                -86,\n                -85,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -127,\n                102,\n                102,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -94,\n                -23,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                127,\n                -114,\n                57,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -123,\n                23,\n                70,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                -52,\n                -51,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                56,\n                -28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -90,\n                0,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                -47,\n                116,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                56,\n                -28,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                51,\n                51,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                102,\n                102,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                65,\n                32,\n                0,\n                0,\n                66,\n                2,\n                -52,\n                -51,\n                66,\n                -66,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                51,\n                51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -128,\n                113,\n                -57,\n                66,\n                -102,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -128,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                51,\n                51,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                42,\n                -85,\n                66,\n                -56,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                -103,\n                -102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                -106,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                0,\n                0,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                56,\n                -28,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                0,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                113,\n                -57,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                -108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                102,\n                102,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -64,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                46,\n                -116,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -24,\n                -70,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -94,\n                -23,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                51,\n                51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                -103,\n                -102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                113,\n                -57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -103,\n                -102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                51,\n                51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                -103,\n                -102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -52,\n                -51,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -103,\n                -102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -86,\n                -85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                113,\n                -57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -117,\n                -93,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                51,\n                51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                51,\n                51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -117,\n                -93,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                23,\n                70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                -29,\n                -114,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -128,\n                0,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                -70,\n                47,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                119,\n                -103,\n                -102,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -43,\n                85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                46,\n                -116,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -117,\n                -93,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -103,\n                -102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                126,\n                46,\n                -116,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -114,\n                57,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -52,\n                -51,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                56,\n                -28,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                51,\n                51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                -128,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -128,\n                -117,\n                -93,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -94,\n                -23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                93,\n                23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -70,\n                47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                85,\n                85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                -114,\n                57,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -103,\n                -102,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -24,\n                -70,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                28,\n                114,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -70,\n                47,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -103,\n                -102,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                56,\n                -28,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                46,\n                -116,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                0,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -86,\n                -85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                23,\n                70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                0,\n                0,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                69,\n                -47,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -24,\n                -70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -117,\n                -93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                65,\n                96,\n                0,\n                0,\n                65,\n                -110,\n                -86,\n                -85,\n                66,\n                -78,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                42,\n                -85,\n                66,\n                -104,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                93,\n                23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -94,\n                -23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -103,\n                -102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                85,\n                85,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -29,\n                -114,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                102,\n                102,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                46,\n                -116,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                113,\n                -57,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -57,\n                28,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -70,\n                47,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -103,\n                -102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -114,\n                57,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                -20,\n                79,\n                66,\n                -56,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                -114,\n                57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -29,\n                -114,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                120,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                46,\n                -116,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                51,\n                51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                69,\n                -47,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                46,\n                -116,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -114,\n                57,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -24,\n                -70,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                113,\n                -57,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                51,\n                51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                102,\n                102,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                51,\n                51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                0,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -123,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -114,\n                57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                85,\n                85,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                113,\n                -57,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                51,\n                51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -103,\n                -102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                113,\n                -57,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                0,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -24,\n                -70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                56,\n                -28,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                0,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                66,\n                -86,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -86,\n                -85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -88,\n                -103,\n                -102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                -124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                0,\n                0,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -94,\n                -23,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -52,\n                -51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -86,\n                -85,\n                66,\n                -86,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -52,\n                -51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                28,\n                114,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                -128,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -52,\n                -51,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                0,\n                0,\n                66,\n                -96,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                56,\n                -28,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -85,\n                -94,\n                -23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                -124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                -57,\n                28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                93,\n                23,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -57,\n                28,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                -70,\n                47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                64,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -70,\n                47,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                0,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -128,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                51,\n                51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -91,\n                116,\n                93,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                113,\n                -57,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -70,\n                47,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                93,\n                23,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                -57,\n                28,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                42,\n                -85,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                93,\n                23,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -29,\n                -114,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -70,\n                47,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -117,\n                -93,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                56,\n                -28,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                122,\n                85,\n                85,\n                66,\n                -100,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -86,\n                -85,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -103,\n                -102,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -95,\n                -47,\n                116,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                -128,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                -24,\n                -70,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -70,\n                47,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                113,\n                -57,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                42,\n                -85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                65,\n                32,\n                0,\n                0,\n                66,\n                26,\n                0,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -29,\n                -114,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -70,\n                47,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                85,\n                85,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                85,\n                85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -70,\n                47,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -114,\n                57,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                69,\n                -47,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                56,\n                -28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                102,\n                102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                51,\n                51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                -128,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                0,\n                0,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                85,\n                85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                102,\n                102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                28,\n                114,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                51,\n                51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -24,\n                -70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -29,\n                -114,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -70,\n                47,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                -122,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                93,\n                23,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -57,\n                28,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                28,\n                114,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                56,\n                -28,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                64,\n                0,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                85,\n                85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                -103,\n                -102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                64,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                0,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                51,\n                51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                122,\n                56,\n                -28,\n                66,\n                -96,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -114,\n                57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -114,\n                57,\n                66,\n                -98,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                -52,\n                -51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                113,\n                -57,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                42,\n                -85,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                51,\n                51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                -37,\n                110,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -117,\n                -93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                69,\n                -47,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                46,\n                -116,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                102,\n                102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -97,\n                -57,\n                28,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -100,\n                51,\n                51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                51,\n                51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -52,\n                -51,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                51,\n                51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -86,\n                -85,\n                66,\n                -90,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -103,\n                -102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                0,\n                0,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                116,\n                93,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                113,\n                -57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                -103,\n                -102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                56,\n                -28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -85,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                51,\n                51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                69,\n                -47,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -114,\n                57,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                69,\n                -47,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                85,\n                85,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                0,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -119,\n                -57,\n                28,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -57,\n                28,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -24,\n                -70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                0,\n                0,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                118,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -103,\n                -102,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -119,\n                -86,\n                -85,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                69,\n                -47,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -117,\n                -93,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -103,\n                -102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -101,\n                51,\n                51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                0,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                93,\n                23,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -47,\n                116,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                0,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                0,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                69,\n                -47,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -96,\n                113,\n                -57,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                112,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                51,\n                51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                102,\n                102,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                102,\n                102,\n                66,\n                -94,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                126,\n                -86,\n                -85,\n                66,\n                -96,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -79,\n                23,\n                70,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                -128,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                46,\n                -116,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -52,\n                -51,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -84,\n                -86,\n                -85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                42,\n                -85,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                102,\n                102,\n                66,\n                -90,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                -103,\n                -102,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                113,\n                -57,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                46,\n                -116,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -103,\n                -102,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -52,\n                -51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                -64,\n                0,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -86,\n                -85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                56,\n                -28,\n                66,\n                -86,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -52,\n                -51,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                102,\n                102,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                116,\n                93,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                28,\n                114,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                -94,\n                -23,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -47,\n                116,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -43,\n                85,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                116,\n                93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -103,\n                -102,\n                66,\n                -56,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -64,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                125,\n                51,\n                51,\n                66,\n                -86,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -117,\n                -93,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -57,\n                28,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -124,\n                56,\n                -28,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -120,\n                113,\n                -57,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -125,\n                23,\n                70,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -123,\n                -57,\n                28,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                102,\n                102,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -86,\n                -85,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                -117,\n                -93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                117,\n                85,\n                85,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -24,\n                -70,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                116,\n                93,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                -70,\n                47,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -123,\n                28,\n                114,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -57,\n                28,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                93,\n                23,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -114,\n                -43,\n                85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                56,\n                -28,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                56,\n                -28,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                -86,\n                -85,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -117,\n                -93,\n                66,\n                -74,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -89,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                123,\n                -128,\n                0,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                -52,\n                -51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -93,\n                28,\n                114,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                -116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                -117,\n                -93,\n                66,\n                -82,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                42,\n                -85,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                -52,\n                -51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -109,\n                69,\n                -47,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -92,\n                0,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                0,\n                0,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                102,\n                102,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -119,\n                102,\n                102,\n                66,\n                -88,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -52,\n                -51,\n                66,\n                -76,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -108,\n                113,\n                -57,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                -52,\n                -51,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -117,\n                -93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                56,\n                -28,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                51,\n                51,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                85,\n                85,\n                66,\n                -90,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -121,\n                -52,\n                -51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                -103,\n                -102,\n                66,\n                -66,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -106,\n                56,\n                -28,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                51,\n                51,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                116,\n                93,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                46,\n                -116,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                80,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                -117,\n                -93,\n                66,\n                -56,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                124,\n                0,\n                0,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -127,\n                51,\n                51,\n                66,\n                -90,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -126,\n                102,\n                102,\n                66,\n                -84,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                0,\n                0,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -107,\n                -47,\n                116,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -99,\n                116,\n                93,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                104,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -98,\n                -117,\n                -93,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                116,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -117,\n                23,\n                70,\n                66,\n                -80,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -110,\n                51,\n                51,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                100,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                102,\n                102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -115,\n                -103,\n                -102,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -103,\n                -114,\n                57,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                124,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -102,\n                0,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                -128,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                108,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -104,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -92,\n                -128,\n                0,\n                66,\n                -58,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                102,\n                102,\n                66,\n                -68,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -113,\n                -57,\n                28,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                96,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -122,\n                102,\n                102,\n                66,\n                -78,\n                0,\n                0,\n                66,\n                68,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -118,\n                0,\n                0,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                116,\n                93,\n                66,\n                -70,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -94,\n                51,\n                51,\n                66,\n                -60,\n                0,\n                0,\n                66,\n                88,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -111,\n                -64,\n                0,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -128,\n                -103,\n                -102,\n                66,\n                -88,\n                0,\n                0,\n                66,\n                72,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -87,\n                -52,\n                -51,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                92,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -112,\n                56,\n                -28,\n                66,\n                -72,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -116,\n                -70,\n                47,\n                66,\n                -88,\n                0,\n                0,\n                66,\n                76,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -85,\n                85,\n                85,\n                66,\n                -64,\n                0,\n                0,\n                66,\n                -126,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                66,\n                -105,\n                -64,\n                0,\n                66,\n                -62,\n                0,\n                0,\n                66,\n                84,\n                0,\n                0\n            ],\n            \"compressed\": true,\n            \"refCount\": [\n                4,\n                22,\n                505,\n                488645087,\n                627314938,\n                582700772,\n                399780962,\n                393065644,\n                493329789,\n                348405217,\n                399993616,\n                204415313,\n                496738328,\n                681754803,\n                538579750,\n                504435967,\n                497026482,\n                498871798,\n                592916307,\n                452127812,\n                511204333,\n                403138722,\n                491772828,\n                550815193,\n                452278665,\n                733388952,\n                508324222,\n                528467679,\n                449961106,\n                350404130,\n                449138598,\n                466853076,\n                362626119,\n                439225686,\n                248861557,\n                429062015,\n                624839501,\n                362886465,\n                719423305,\n                219046244,\n                263992202,\n                628055381,\n                499098559,\n                482781665,\n                504168810,\n                501925309,\n                630311957,\n                533598347,\n                556015254,\n                602409978,\n                499090983,\n                578059376,\n                345154108,\n                350733058,\n                588319029,\n                452650544,\n                461820581,\n                501176100,\n                452560325,\n                254595844,\n                533593920,\n                407318270,\n                412155743,\n                503627008,\n                454598108,\n                496752770,\n                449959602,\n                538428890,\n                455440652,\n                461942735,\n                452528503,\n                305537155,\n                360787231,\n                536178278,\n                456428308,\n                10\n            ],\n            \"directLocationMap\": false,\n            \"locationList\": [\n                0,\n                2016,\n                505,\n                8068,\n                24212,\n                40356,\n                56500,\n                72644,\n                88788,\n                104932,\n                121076,\n                137220,\n                153364,\n                169508,\n                185652,\n                201796,\n                217940,\n                234084,\n                250228,\n                266372,\n                282516,\n                298660,\n                314804,\n                330948,\n                347092,\n                363236,\n                379380,\n                395524,\n                411668,\n                427812,\n                443956,\n                460100,\n                476244,\n                492388,\n                508532,\n                524676,\n                540820,\n                556964,\n                573108,\n                589252,\n                605396,\n                621540,\n                637684,\n                653828,\n                669972,\n                686116,\n                702260,\n                718404,\n                734548,\n                750692,\n                766836,\n                782980,\n                799124,\n                815268,\n                831412,\n                847556,\n                863700,\n                879844,\n                895988,\n                912132,\n                928276,\n                944420,\n                960564,\n                976708,\n                992852,\n                1008996,\n                1025140,\n                1041284,\n                1057428,\n                1073572,\n                1089716,\n                1105860,\n                1122004,\n                1138148,\n                1154292,\n                1170436,\n                1186580,\n                1202724,\n                1218868,\n                1235012,\n                1251156,\n                1267300,\n                1283444,\n                1299588,\n                1315732,\n                1331876,\n                1348020,\n                1364164,\n                1380308,\n                1396452,\n                1412596,\n                1428740,\n                1444884,\n                1461028,\n                1477172,\n                1493316,\n                1509460,\n                1525604,\n                1541748,\n                1557892,\n                1574036,\n                1590180,\n                1606324,\n                1622468,\n                1638612,\n                1654756,\n                1670900,\n                1687044,\n                1703188,\n                1719332,\n                1735476,\n                1751620,\n                1767764,\n                1783908,\n                1800052,\n                1816196,\n                1832340,\n                1848484,\n                1864628,\n                1880772,\n                1896916,\n                1913060,\n                1929204,\n                1945348,\n                1961492,\n                1977636,\n                1993780,\n                2009924,\n                2026068,\n                2042212,\n                2058356,\n                2074500,\n                2090644,\n                2106788,\n                2122932,\n                2139076,\n                2155220,\n                2171364,\n                2187508,\n                2203652,\n                2219796,\n                2235940,\n                2252084,\n                2268228,\n                2284372,\n                2300516,\n                2316660,\n                2332804,\n                2348948,\n                2365092,\n                2381236,\n                2397380,\n                2413524,\n                2429668,\n                2445812,\n                2461956,\n                2478100,\n                2494244,\n                2510388,\n                2526532,\n                2542676,\n                2558820,\n                2574964,\n                2591108,\n                2607252,\n                2623396,\n                2639540,\n                2655684,\n                2671828,\n                2687972,\n                2704116,\n                2720260,\n                2736404,\n                2752548,\n                2768692,\n                2784836,\n                2800980,\n                2817124,\n                2833268,\n                2849412,\n                2865556,\n                2881700,\n                2897844,\n                2913988,\n                2930132,\n                2946276,\n                2962420,\n                2978564,\n                2994708,\n                3010852,\n                3026996,\n                3043140,\n                3059284,\n                3075428,\n                3091572,\n                3107716,\n                3123860,\n                3140004,\n                3156148,\n                3172292,\n                3188436,\n                3204580,\n                3220724,\n                3236868,\n                3253012,\n                3269156,\n                3285300,\n                3301444,\n                3317588,\n                3333732,\n                3349876,\n                3366020,\n                3382164,\n                3398308,\n                3414452,\n                3430596,\n                3446740,\n                3462884,\n                3479028,\n                3495172,\n                3511316,\n                3527460,\n                3543604,\n                3559748,\n                3575892,\n                3592036,\n                3608180,\n                3624324,\n                3640468,\n                3656612,\n                3672756,\n                3688900,\n                3705044,\n                3721188,\n                3737332,\n                3753476,\n                3769620,\n                3785764,\n                3801908,\n                3818052,\n                3834196,\n                3850340,\n                3866484,\n                3882628,\n                3898772,\n                3914916,\n                3931060,\n                3947204,\n                3963348,\n                3979492,\n                3995636,\n                4011780,\n                4027924,\n                4044068,\n                4060212,\n                2016\n            ],\n            \"reverseAvailable\": false,\n            \"internalShinglingEnabled\": false,\n            \"lastTimeStamp\": 505,\n            \"rotationEnabled\": false,\n            \"dynamicResizingEnabled\": true,\n            \"currentStoreCapacity\": 512,\n            \"indexCapacity\": 512\n        },\n        \"compactSamplerStates\": [\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.56757265,\n                    -0.5781482,\n                    -0.5900879,\n                    -0.6161663,\n                    -0.74962693,\n                    -0.6151643,\n                    -0.61670864,\n                    -0.67350674,\n                    -0.62119406,\n                    -0.7689292,\n                    -0.7968979,\n                    -0.63657176,\n                    -0.6180514,\n                    -0.7882518,\n                    -0.663554,\n                    -0.94079185,\n                    -1.0720736,\n                    -0.6319752,\n                    -0.6278517,\n                    -0.8380472,\n                    -0.9604324,\n                    -0.8651731,\n                    -0.798815,\n                    -0.6567839,\n                    -0.6820692,\n                    -0.9421621,\n                    -0.88186073,\n                    -0.7905466,\n                    -0.8823367,\n                    -0.8736085,\n                    -0.85339946,\n                    -0.9529896,\n                    -1.5253699,\n                    -1.1973315,\n                    -1.3102506,\n                    -0.7358565,\n                    -0.8139095,\n                    -0.7017528,\n                    -1.0430143,\n                    -1.0908111,\n                    -0.89280295,\n                    -1.3095568,\n                    -0.98362166,\n                    -0.8744968,\n                    -0.96791035,\n                    -1.2037274,\n                    -1.0653238,\n                    -0.7173222,\n                    -0.696716,\n                    -0.9508131,\n                    -0.7733483,\n                    -1.1465819,\n                    -1.3489482,\n                    -1.1664346,\n                    -1.076245,\n                    -0.8089149,\n                    -1.9500605,\n                    -1.164026,\n                    -2.239261,\n                    -1.3196678,\n                    -1.0025091,\n                    -1.0488088,\n                    -1.221525,\n                    -2.491666,\n                    -1.0704556,\n                    -1.9203418,\n                    -1.6465822,\n                    -1.5930494,\n                    -2.1173043,\n                    -1.454771,\n                    -2.3791387,\n                    -1.2175801,\n                    -1.4976561,\n                    -0.97529536,\n                    -0.9776957,\n                    -0.7076328,\n                    -1.9713205,\n                    -1.994005,\n                    -1.4144034,\n                    -1.7856992,\n                    -1.8549204,\n                    -1.3750582,\n                    -1.182027,\n                    -2.3264935,\n                    -3.4964423,\n                    -1.971149,\n                    -1.2945454,\n                    -1.548858,\n                    -1.2041738,\n                    -1.468671,\n                    -1.4650595,\n                    -2.2554448,\n                    -1.3044451,\n                    -1.1853373,\n                    -1.730136,\n                    -1.241813,\n                    -1.1292748,\n                    -0.71116483,\n                    -0.79887444,\n                    -1.2845273,\n                    -1.105379,\n                    -1.2366987,\n                    -0.92921096,\n                    -1.1657026,\n                    -2.6717122,\n                    -1.7439715,\n                    -1.9934863,\n                    -1.2263087,\n                    -1.7673148,\n                    -2.3491075,\n                    -1.0961275,\n                    -0.89520067,\n                    -1.0248098,\n                    -4.909743,\n                    -4.153232,\n                    -2.1522255,\n                    -1.7650629,\n                    -2.4412453,\n                    -2.7482073,\n                    -3.0675554,\n                    -3.1178305,\n                    -1.8242788,\n                    -1.892877,\n                    -3.210243,\n                    -1.9061444,\n                    -1.2413985,\n                    -1.2598059,\n                    -4.203074,\n                    -2.7223814,\n                    -2.2999434,\n                    -1.3408724,\n                    -7.0299697,\n                    -4.8111978,\n                    -3.3258471,\n                    -2.0125356,\n                    -3.1982787,\n                    -2.8312833,\n                    -2.5334399,\n                    -2.3389008,\n                    -2.9442873,\n                    -1.8930559,\n                    -2.3911982,\n                    -2.5047672,\n                    -4.4145966,\n                    -1.2764541,\n                    -3.9048793,\n                    -3.238948,\n                    -1.5198247,\n                    -1.2750254,\n                    -1.8376037,\n                    -3.6638694,\n                    -1.5739453,\n                    -1.1896442,\n                    -2.0554008,\n                    -3.439994,\n                    -3.6345048,\n                    -4.235151,\n                    -4.323954,\n                    -1.6634188,\n                    -3.0570233,\n                    -2.0862427,\n                    -3.161959,\n                    -3.9133036,\n                    -2.0392869,\n                    -3.2230003,\n                    -1.7840478,\n                    -1.9822911,\n                    -2.66759,\n                    -2.8719387,\n                    -4.63498,\n                    -3.9776332,\n                    -2.2720344,\n                    -3.0024054,\n                    -3.007087,\n                    -1.3444325,\n                    -5.0787582,\n                    -2.1520054,\n                    -1.8487936,\n                    -1.926888,\n                    -2.18643,\n                    -4.2005286,\n                    -1.6425658,\n                    -1.5144346,\n                    -5.0267115,\n                    -2.5702274,\n                    -2.866389,\n                    -2.01247,\n                    -1.2021661,\n                    -3.4260905,\n                    -1.7879579,\n                    -3.3959882,\n                    -4.2575583,\n                    -2.2555158,\n                    -2.5958145,\n                    -3.0608132,\n                    -1.8771381,\n                    -0.9143291,\n                    -1.0414093,\n                    -1.7113725,\n                    -1.3806139,\n                    -1.5017926,\n                    -1.4232012,\n                    -2.1246378,\n                    -1.4538587,\n                    -1.328111,\n                    -2.9455528,\n                    -1.5375834,\n                    -2.6173167,\n                    -1.3469445,\n                    -2.9239001,\n                    -3.6207416,\n                    -2.3897822,\n                    -4.7668986,\n                    -2.2512138,\n                    -3.4965847,\n                    -2.324559,\n                    -1.2498051,\n                    -2.5054939,\n                    -2.3657448,\n                    -4.1115704,\n                    -2.3869483,\n                    -2.610965,\n                    -6.1069016,\n                    -4.5214825,\n                    -2.4539022,\n                    -1.4635035\n                ],\n                \"pointIndex\": [\n                    4,\n                    504,\n                    226,\n                    35529227,\n                    112927561,\n                    49466190,\n                    87494404,\n                    14543996,\n                    46779260,\n                    60007490,\n                    107327202,\n                    98951852,\n                    22404378,\n                    16035566,\n                    95163157,\n                    53840153,\n                    109554390,\n                    71544057,\n                    80474669,\n                    91089142,\n                    113143202,\n                    13047000,\n                    38971917,\n                    119024767,\n                    816686,\n                    43233034,\n                    17622851,\n                    2195915,\n                    19686486,\n                    78660725,\n                    110063416,\n                    98516391,\n                    23698955,\n                    75550084,\n                    26028907,\n                    84501483,\n                    88406807,\n                    95864199,\n                    104978001,\n                    115947880,\n                    30120112,\n                    8535367,\n                    33647170,\n                    12618829,\n                    37229842,\n                    6852326,\n                    42331373,\n                    50854248,\n                    43799784,\n                    24507333,\n                    3305776,\n                    74196777,\n                    89702118,\n                    22617836,\n                    26248412,\n                    111735732,\n                    20711699,\n                    57937369,\n                    125458152,\n                    9661186,\n                    67170512,\n                    66812728,\n                    10178588,\n                    23985659,\n                    121632067,\n                    111096052,\n                    1915242,\n                    81124642,\n                    109101203,\n                    116266930,\n                    226297,\n                    89366943,\n                    94814190,\n                    2307544,\n                    100343308,\n                    106639341,\n                    121462560,\n                    119722949,\n                    402\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 226,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 6231208282143323767\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.6413124,\n                    -0.6741439,\n                    -0.6560893,\n                    -0.6797705,\n                    -0.74844384,\n                    -0.66546565,\n                    -0.67169714,\n                    -0.7227025,\n                    -0.68723875,\n                    -0.7745091,\n                    -0.76826525,\n                    -0.6939699,\n                    -0.7236429,\n                    -0.77351135,\n                    -0.67195445,\n                    -0.80146384,\n                    -0.82743365,\n                    -0.7136329,\n                    -0.7724732,\n                    -0.9194121,\n                    -0.79330975,\n                    -0.88315845,\n                    -0.81832063,\n                    -0.7143264,\n                    -0.94557303,\n                    -0.785616,\n                    -0.8377497,\n                    -0.7925366,\n                    -1.0203317,\n                    -0.9742233,\n                    -0.80757946,\n                    -0.9707234,\n                    -0.90228313,\n                    -0.8993705,\n                    -1.5087696,\n                    -1.0732617,\n                    -0.8467861,\n                    -0.8320001,\n                    -1.2694417,\n                    -0.9735975,\n                    -1.0536188,\n                    -0.8474321,\n                    -0.9971086,\n                    -0.9269841,\n                    -0.92866796,\n                    -1.0152696,\n                    -0.9399167,\n                    -0.84670246,\n                    -0.7283,\n                    -1.119393,\n                    -1.1513233,\n                    -0.9878237,\n                    -1.0573844,\n                    -0.9405707,\n                    -0.9608169,\n                    -0.93561846,\n                    -2.0413952,\n                    -1.3069793,\n                    -1.4010427,\n                    -1.0850079,\n                    -1.1713014,\n                    -0.8807193,\n                    -1.6299798,\n                    -1.897046,\n                    -1.1469332,\n                    -1.4496559,\n                    -1.254596,\n                    -1.1360562,\n                    -1.3226919,\n                    -2.6269495,\n                    -1.5345647,\n                    -1.209115,\n                    -1.4497056,\n                    -0.9226162,\n                    -1.5420644,\n                    -1.4579161,\n                    -1.810374,\n                    -1.7850882,\n                    -1.4213358,\n                    -2.0186284,\n                    -1.1599022,\n                    -1.2460217,\n                    -1.8482556,\n                    -1.623476,\n                    -1.0834888,\n                    -1.0720153,\n                    -1.5517547,\n                    -1.0411204,\n                    -1.9863597,\n                    -1.015509,\n                    -1.0393355,\n                    -1.1298063,\n                    -1.161867,\n                    -1.8876396,\n                    -1.513813,\n                    -1.0615944,\n                    -1.0518087,\n                    -1.0012404,\n                    -0.96941787,\n                    -1.4972963,\n                    -1.5582548,\n                    -1.3225719,\n                    -1.9551991,\n                    -1.137049,\n                    -1.1723896,\n                    -1.282302,\n                    -1.3758812,\n                    -0.9793669,\n                    -1.1025751,\n                    -1.0587844,\n                    -1.3776231,\n                    -0.9507962,\n                    -3.5233085,\n                    -2.147424,\n                    -3.5976098,\n                    -3.1932046,\n                    -1.3534847,\n                    -2.8176162,\n                    -2.3236613,\n                    -2.8634186,\n                    -1.4690521,\n                    -1.3486727,\n                    -1.8834981,\n                    -3.77391,\n                    -1.4108094,\n                    -2.0396273,\n                    -1.6383679,\n                    -3.9983404,\n                    -2.399237,\n                    -1.210579,\n                    -1.3090019,\n                    -1.5072656,\n                    -2.0974455,\n                    -4.0409245,\n                    -2.2359335,\n                    -1.7645096,\n                    -2.3572674,\n                    -1.6028525,\n                    -1.357799,\n                    -3.522606,\n                    -3.2044213,\n                    -1.55917,\n                    -2.6649456,\n                    -1.7507952,\n                    -1.51323,\n                    -4.9820375,\n                    -1.7488083,\n                    -1.6731998,\n                    -3.0470006,\n                    -1.6102738,\n                    -3.8390405,\n                    -2.1085012,\n                    -1.6249218,\n                    -2.0381305,\n                    -3.8072212,\n                    -1.9960753,\n                    -5.4924846,\n                    -2.476714,\n                    -2.6750987,\n                    -2.3795562,\n                    -2.590261,\n                    -1.3741866,\n                    -1.1860225,\n                    -2.5269265,\n                    -1.8842105,\n                    -5.061702,\n                    -2.7306504,\n                    -1.7086381,\n                    -2.189826,\n                    -2.2177384,\n                    -2.156229,\n                    -1.7054623,\n                    -1.239881,\n                    -2.8083131,\n                    -4.8517027,\n                    -1.7392544,\n                    -1.1228195,\n                    -3.4175887,\n                    -2.3523648,\n                    -1.8688406,\n                    -1.4306833,\n                    -1.4683838,\n                    -1.6348096,\n                    -1.8269991,\n                    -2.3902066,\n                    -2.7796946,\n                    -4.2938395,\n                    -3.7553287,\n                    -2.3795054,\n                    -1.584336,\n                    -2.9925914,\n                    -2.4059489,\n                    -1.5080501,\n                    -2.1570578,\n                    -1.4014544,\n                    -1.555314,\n                    -1.0247319,\n                    -2.5908108,\n                    -1.0894471,\n                    -2.9602163,\n                    -2.5347006,\n                    -1.7199854,\n                    -2.3623478,\n                    -2.3948119,\n                    -2.9381523,\n                    -4.0739775,\n                    -3.5914814,\n                    -2.88282,\n                    -4.8593273,\n                    -2.0374897,\n                    -3.0119255,\n                    -3.012831,\n                    -3.3967464,\n                    -3.4023802,\n                    -2.5469584,\n                    -1.0278263,\n                    -1.0037334,\n                    -4.807614,\n                    -1.4685438,\n                    -1.847896,\n                    -1.6382484,\n                    -2.4243822,\n                    -1.3925955,\n                    -2.429869,\n                    -1.9127114\n                ],\n                \"pointIndex\": [\n                    2,\n                    504,\n                    225,\n                    111671647,\n                    118198531,\n                    47366142,\n                    82611799,\n                    15422422,\n                    18950857,\n                    62711760,\n                    5746078,\n                    103679816,\n                    89022947,\n                    43163095,\n                    91680981,\n                    74392388,\n                    35584963,\n                    10637222,\n                    73258702,\n                    93278378,\n                    19939543,\n                    33895632,\n                    119743310,\n                    7157833,\n                    16779238,\n                    87639544,\n                    116476223,\n                    72966754,\n                    106712,\n                    58701826,\n                    4475707,\n                    23909850,\n                    66472368,\n                    68701516,\n                    27495848,\n                    79357249,\n                    91508298,\n                    104685242,\n                    14140464,\n                    79803883,\n                    14234893,\n                    93429684,\n                    15144965,\n                    37198795,\n                    92147146,\n                    100692200,\n                    41931150,\n                    51576948,\n                    20643713,\n                    76949619,\n                    1748312,\n                    32986811,\n                    19136205,\n                    77524577,\n                    28191894,\n                    57981206,\n                    21495427,\n                    89797535,\n                    94962464,\n                    63377850,\n                    75190544,\n                    27960305,\n                    116953797,\n                    89193273,\n                    25578096,\n                    29811451,\n                    72161665,\n                    73395529,\n                    82882943,\n                    125153256,\n                    90067599,\n                    96460508,\n                    108002029,\n                    100994745,\n                    2733205,\n                    112295199,\n                    118907112,\n                    127013007\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 225,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 6910102835766708129\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.6361297,\n                    -0.6427886,\n                    -0.64978087,\n                    -0.70756996,\n                    -0.66756016,\n                    -0.6652314,\n                    -0.67686117,\n                    -0.738501,\n                    -0.86563295,\n                    -0.8297991,\n                    -0.7050236,\n                    -0.69653434,\n                    -0.7010046,\n                    -0.8382546,\n                    -1.0409559,\n                    -0.74255246,\n                    -0.9452534,\n                    -0.8861673,\n                    -1.0988894,\n                    -0.86182266,\n                    -0.9004108,\n                    -0.8393799,\n                    -0.7601388,\n                    -0.8780445,\n                    -0.7044544,\n                    -0.74571294,\n                    -0.8152079,\n                    -1.0929025,\n                    -1.0292293,\n                    -1.3781352,\n                    -1.4376355,\n                    -1.3130229,\n                    -1.0308373,\n                    -0.9641338,\n                    -1.1163272,\n                    -0.89741856,\n                    -1.0924834,\n                    -1.4286077,\n                    -1.2312396,\n                    -1.0095732,\n                    -0.90361285,\n                    -0.96598524,\n                    -0.9863629,\n                    -0.87630546,\n                    -1.1279582,\n                    -1.368041,\n                    -0.8136685,\n                    -1.3092943,\n                    -0.9266263,\n                    -0.74128693,\n                    -0.8960323,\n                    -0.8825871,\n                    -0.79810524,\n                    -0.9010219,\n                    -0.97901225,\n                    -1.4633446,\n                    -1.5441315,\n                    -1.0377563,\n                    -1.0494223,\n                    -2.1532643,\n                    -1.5144418,\n                    -1.4966323,\n                    -1.640983,\n                    -1.9846714,\n                    -1.8514707,\n                    -1.2629799,\n                    -2.4018369,\n                    -1.8355949,\n                    -1.6449745,\n                    -1.356063,\n                    -1.2787979,\n                    -1.4097439,\n                    -1.1225487,\n                    -1.9527433,\n                    -1.655858,\n                    -2.0141113,\n                    -2.74355,\n                    -1.4586512,\n                    -1.5609925,\n                    -2.3343732,\n                    -1.0382442,\n                    -1.4256576,\n                    -1.0880938,\n                    -1.2506218,\n                    -2.26243,\n                    -1.0773342,\n                    -1.0644708,\n                    -0.9891659,\n                    -0.9573225,\n                    -2.5247407,\n                    -1.1891508,\n                    -1.4889679,\n                    -2.2274039,\n                    -1.5068356,\n                    -1.1923847,\n                    -1.7803392,\n                    -1.8480661,\n                    -1.2104905,\n                    -1.089501,\n                    -0.8458398,\n                    -0.7424034,\n                    -1.1034354,\n                    -0.95535153,\n                    -1.430842,\n                    -1.4128844,\n                    -1.2114067,\n                    -0.8276177,\n                    -0.92598075,\n                    -0.91475046,\n                    -1.0110403,\n                    -3.731133,\n                    -2.5332088,\n                    -1.8363091,\n                    -2.5755098,\n                    -1.9488442,\n                    -1.2307419,\n                    -1.0537357,\n                    -1.9857179,\n                    -2.2417715,\n                    -3.314498,\n                    -3.8412423,\n                    -2.8418424,\n                    -2.4061882,\n                    -1.7295107,\n                    -2.1518376,\n                    -1.7952791,\n                    -2.0380082,\n                    -4.090556,\n                    -2.7759373,\n                    -2.0462887,\n                    -2.7760093,\n                    -3.8271885,\n                    -3.2707825,\n                    -3.4327483,\n                    -2.5401921,\n                    -1.8877523,\n                    -3.4253228,\n                    -1.8831528,\n                    -1.8994313,\n                    -1.6148615,\n                    -5.4198527,\n                    -1.314268,\n                    -1.3719753,\n                    -2.688971,\n                    -2.585797,\n                    -2.2576668,\n                    -1.1549048,\n                    -5.106076,\n                    -1.954672,\n                    -6.7728157,\n                    -3.7644725,\n                    -3.1551533,\n                    -4.8891025,\n                    -4.727654,\n                    -4.586425,\n                    -1.7163793,\n                    -3.6443462,\n                    -1.732317,\n                    -2.1879315,\n                    -4.6237965,\n                    -2.814348,\n                    -1.2486494,\n                    -2.1599646,\n                    -1.7666686,\n                    -1.4898446,\n                    -1.3215132,\n                    -4.152714,\n                    -1.413151,\n                    -4.8497276,\n                    -2.790615,\n                    -3.3900342,\n                    -2.3118632,\n                    -1.1577141,\n                    -1.6863161,\n                    -3.1471484,\n                    -2.5949144,\n                    -1.1936404,\n                    -5.16345,\n                    -1.0650976,\n                    -3.213641,\n                    -2.6836355,\n                    -3.2522056,\n                    -1.2563653,\n                    -2.3604238,\n                    -2.864063,\n                    -2.9894278,\n                    -4.0283117,\n                    -1.9091228,\n                    -2.1098876,\n                    -1.6736158,\n                    -1.4758114,\n                    -1.9855922,\n                    -2.9283683,\n                    -3.017182,\n                    -2.0328176,\n                    -3.7195306,\n                    -1.5892555,\n                    -3.5429056,\n                    -2.7333112,\n                    -1.3380169,\n                    -0.9410527,\n                    -2.6401072,\n                    -0.9119615,\n                    -2.025092,\n                    -3.1671853,\n                    -1.0942268,\n                    -3.079749,\n                    -3.9915621,\n                    -2.765839,\n                    -1.9798393,\n                    -1.5078577,\n                    -3.4867995,\n                    -2.388034,\n                    -3.0964606,\n                    -2.8108213,\n                    -2.1528425,\n                    -1.8067331,\n                    -1.5127798,\n                    -1.1197842\n                ],\n                \"pointIndex\": [\n                    1,\n                    503,\n                    219,\n                    7019155,\n                    86646336,\n                    120179378,\n                    91734547,\n                    17320238,\n                    96151162,\n                    105435557,\n                    36079528,\n                    127178896,\n                    60140693,\n                    42538739,\n                    83950375,\n                    55457344,\n                    12546722,\n                    20888350,\n                    81270008,\n                    112022852,\n                    32868989,\n                    35156862,\n                    98160653,\n                    68597093,\n                    7927708,\n                    46520646,\n                    1883114,\n                    112105826,\n                    124338430,\n                    93506926,\n                    63630244,\n                    37324365,\n                    72408637,\n                    76885895,\n                    80009297,\n                    109326382,\n                    101401805,\n                    115582259,\n                    123203273,\n                    33876601,\n                    4654804,\n                    35893754,\n                    37607557,\n                    38489150,\n                    103329392,\n                    40243586,\n                    17148944,\n                    63002618,\n                    8336288,\n                    67760943,\n                    50594140,\n                    95952481,\n                    52730153,\n                    84864861,\n                    28444654,\n                    59823071,\n                    67866944,\n                    61754053,\n                    10357866,\n                    66015676,\n                    67433952,\n                    91270347,\n                    109234443,\n                    74395631,\n                    81912440,\n                    78081117,\n                    122867865,\n                    106319468,\n                    91502132,\n                    93797791,\n                    102667445,\n                    104529340,\n                    109866779,\n                    116915765,\n                    124217325,\n                    32637650\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 219,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -8153191400930165074\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.48931122,\n                    -0.49723798,\n                    -0.49012178,\n                    -0.623267,\n                    -0.6048579,\n                    -0.5018685,\n                    -0.493251,\n                    -0.6810186,\n                    -0.62533385,\n                    -0.611521,\n                    -0.6176875,\n                    -0.5930478,\n                    -0.57753223,\n                    -0.52694714,\n                    -0.6676626,\n                    -0.7207007,\n                    -0.7123615,\n                    -0.6789895,\n                    -0.745815,\n                    -0.80155474,\n                    -0.8633884,\n                    -0.6581785,\n                    -0.6463497,\n                    -0.62930876,\n                    -0.6465614,\n                    -0.64662224,\n                    -0.63142097,\n                    -0.5293907,\n                    -1.0326748,\n                    -0.7933024,\n                    -1.0779095,\n                    -0.77808034,\n                    -1.0023601,\n                    -0.74175876,\n                    -0.9819902,\n                    -0.9997212,\n                    -0.953454,\n                    -0.9269736,\n                    -0.77646047,\n                    -0.88533807,\n                    -1.0808892,\n                    -0.911517,\n                    -1.1113745,\n                    -1.2881032,\n                    -0.6981712,\n                    -1.048881,\n                    -0.82391083,\n                    -0.7688017,\n                    -0.8863816,\n                    -0.67885447,\n                    -0.7831837,\n                    -0.73852074,\n                    -0.75421745,\n                    -0.6855453,\n                    -0.6836569,\n                    -1.1590838,\n                    -0.6129658,\n                    -1.337165,\n                    -1.1796199,\n                    -0.91301686,\n                    -1.7960355,\n                    -1.1085035,\n                    -2.0368264,\n                    -1.0979167,\n                    -1.5597198,\n                    -2.3726456,\n                    -1.6531286,\n                    -0.7789359,\n                    -0.9969333,\n                    -1.3209299,\n                    -1.2398386,\n                    -1.0943171,\n                    -1.8565011,\n                    -1.2082129,\n                    -2.0254157,\n                    -1.1507211,\n                    -1.0858723,\n                    -0.9761007,\n                    -1.6726984,\n                    -1.3714278,\n                    -1.1769344,\n                    -1.2964422,\n                    -1.3787014,\n                    -1.0774325,\n                    -1.2595168,\n                    -1.5023205,\n                    -1.7526349,\n                    -2.0585146,\n                    -1.3601263,\n                    -1.2020801,\n                    -0.76575804,\n                    -2.5528324,\n                    -1.1102475,\n                    -0.977003,\n                    -0.85832506,\n                    -1.0666026,\n                    -0.96740466,\n                    -1.0918508,\n                    -2.1194818,\n                    -0.7036158,\n                    -1.0782622,\n                    -1.2990319,\n                    -1.7673608,\n                    -1.3664817,\n                    -1.101482,\n                    -0.86120254,\n                    -1.0904459,\n                    -0.82473683,\n                    -1.0382458,\n                    -0.7014238,\n                    -1.1469412,\n                    -1.5819608,\n                    -2.0776114,\n                    -0.8331923,\n                    -0.74183834,\n                    -2.0028663,\n                    -1.5265291,\n                    -2.3708613,\n                    -1.6433748,\n                    -2.1847932,\n                    -2.2085743,\n                    -3.0770442,\n                    -4.579025,\n                    -1.9978226,\n                    -2.0566041,\n                    -3.6105764,\n                    -3.1895306,\n                    -1.7005802,\n                    -1.2436558,\n                    -1.7310462,\n                    -3.3567033,\n                    -2.6761308,\n                    -2.8754559,\n                    -2.2890341,\n                    -2.090929,\n                    -1.3141505,\n                    -1.9325581,\n                    -4.143366,\n                    -1.8410652,\n                    -1.6836159,\n                    -1.4787452,\n                    -1.5515575,\n                    -1.5433555,\n                    -3.4945877,\n                    -2.9452395,\n                    -2.9747286,\n                    -3.8249507,\n                    -4.712332,\n                    -2.8805053,\n                    -2.1511056,\n                    -2.2605977,\n                    -1.9543904,\n                    -1.7370383,\n                    -2.8557973,\n                    -2.027881,\n                    -1.4113812,\n                    -1.6777797,\n                    -3.6317258,\n                    -2.9565845,\n                    -1.5805341,\n                    -2.0916042,\n                    -1.8768723,\n                    -1.4425573,\n                    -1.6574762,\n                    -5.694653,\n                    -2.0396917,\n                    -1.9360616,\n                    -2.9818797,\n                    -1.3613492,\n                    -1.7539183,\n                    -5.2183347,\n                    -5.3925138,\n                    -3.69513,\n                    -7.285011,\n                    -2.0467029,\n                    -4.6431193,\n                    -3.5842743,\n                    -2.338261,\n                    -1.6631751,\n                    -2.7470868,\n                    -1.5942484,\n                    -1.2338036,\n                    -1.5301003,\n                    -3.2176993,\n                    -2.6857648,\n                    -1.5410172,\n                    -2.2258418,\n                    -1.2013426,\n                    -1.2898548,\n                    -4.8211923,\n                    -5.0544915,\n                    -1.8151265,\n                    -1.3625166,\n                    -3.4235814,\n                    -2.0515249,\n                    -1.1709322,\n                    -1.9283103,\n                    -2.2851682,\n                    -2.3307283,\n                    -2.4420938,\n                    -1.9790992,\n                    -1.7172705,\n                    -2.305028,\n                    -1.4039015,\n                    -2.8155386,\n                    -3.0897534,\n                    -3.5799522,\n                    -2.252491,\n                    -1.934329,\n                    -6.9373517,\n                    -2.9747658,\n                    -2.2050023,\n                    -0.9547009,\n                    -2.517102,\n                    -1.4843043,\n                    -2.2253036,\n                    -1.3988092,\n                    -2.5037396,\n                    -3.1673834,\n                    -0.8072798,\n                    -1.4884133,\n                    -1.7703872,\n                    -2.2750254,\n                    -1.9589535,\n                    -2.5536778,\n                    -2.4836578,\n                    -3.569326,\n                    -1.6090255,\n                    -1.1384836,\n                    -0.7648988\n                ],\n                \"pointIndex\": [\n                    2,\n                    502,\n                    230,\n                    111339292,\n                    85613591,\n                    58324908,\n                    87013905,\n                    37649672,\n                    8623547,\n                    65372694,\n                    68426884,\n                    115159395,\n                    23409781,\n                    16687454,\n                    114499214,\n                    52593671,\n                    62770336,\n                    22136302,\n                    11966504,\n                    24715439,\n                    102196100,\n                    124284645,\n                    63854009,\n                    37351203,\n                    11321662,\n                    17604151,\n                    59108384,\n                    53958592,\n                    53080528,\n                    30732930,\n                    84649389,\n                    65556598,\n                    70498491,\n                    24491168,\n                    68752529,\n                    31292774,\n                    92071582,\n                    2570990,\n                    13264593,\n                    116182818,\n                    117957071,\n                    33697231,\n                    3043710,\n                    82913134,\n                    19169290,\n                    72793028,\n                    39782858,\n                    42751492,\n                    96796374,\n                    8120527,\n                    41760334,\n                    78600692,\n                    49909194,\n                    51222082,\n                    65924290,\n                    59617474,\n                    21024187,\n                    105381725,\n                    60317624,\n                    63447385,\n                    45559957,\n                    82210418,\n                    11007062,\n                    2150073,\n                    73502844,\n                    114103554,\n                    25757433,\n                    79620234,\n                    124966752,\n                    27534372,\n                    56910093,\n                    90668857,\n                    94056352,\n                    98591664,\n                    101355199,\n                    108140259,\n                    68998662,\n                    121992128,\n                    120269931,\n                    245471\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 230,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 4045460986159022609\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.4681739,\n                    -0.48007405,\n                    -0.49789903,\n                    -0.5071157,\n                    -0.5392349,\n                    -0.49877673,\n                    -0.5173413,\n                    -0.55151767,\n                    -0.56650174,\n                    -0.5500999,\n                    -0.5561777,\n                    -0.5550377,\n                    -0.5654364,\n                    -0.55124235,\n                    -0.6208702,\n                    -0.59977484,\n                    -0.7121681,\n                    -0.5872569,\n                    -0.57015973,\n                    -0.60092586,\n                    -0.5919172,\n                    -0.57651204,\n                    -0.599501,\n                    -0.84314567,\n                    -0.71985126,\n                    -0.61190027,\n                    -0.607676,\n                    -0.61616707,\n                    -0.87906057,\n                    -0.88150966,\n                    -1.2262063,\n                    -0.6288519,\n                    -0.90566653,\n                    -0.74081695,\n                    -0.78718567,\n                    -0.6843046,\n                    -0.5967524,\n                    -0.97782314,\n                    -0.9634063,\n                    -1.1774516,\n                    -0.6383265,\n                    -1.3105494,\n                    -0.61028373,\n                    -0.71040356,\n                    -0.9213385,\n                    -0.8167878,\n                    -0.65758455,\n                    -0.8537457,\n                    -1.0144536,\n                    -1.1103842,\n                    -0.73576075,\n                    -0.65716136,\n                    -0.7518719,\n                    -0.65931726,\n                    -0.6404282,\n                    -0.6632596,\n                    -1.0990821,\n                    -1.1170622,\n                    -1.5678968,\n                    -1.3625112,\n                    -1.085547,\n                    -1.5416995,\n                    -1.5436345,\n                    -0.9560163,\n                    -1.1595485,\n                    -1.1146106,\n                    -0.9999493,\n                    -0.9884563,\n                    -1.445272,\n                    -1.1867231,\n                    -0.94520855,\n                    -1.1810359,\n                    -1.6496556,\n                    -1.3205277,\n                    -0.87346005,\n                    -1.2355236,\n                    -1.2484058,\n                    -1.0776839,\n                    -1.8207626,\n                    -2.0081627,\n                    -2.1486614,\n                    -1.8236513,\n                    -1.2014854,\n                    -1.3567494,\n                    -1.7071117,\n                    -1.3450822,\n                    -1.1705921,\n                    -1.4246175,\n                    -1.3039186,\n                    -1.0957643,\n                    -0.9249299,\n                    -1.356931,\n                    -0.8482996,\n                    -0.675301,\n                    -0.84562135,\n                    -1.1149482,\n                    -1.6455381,\n                    -1.2188505,\n                    -1.4722234,\n                    -1.828188,\n                    -1.5412449,\n                    -0.8473445,\n                    -0.92642033,\n                    -0.8599541,\n                    -1.0990387,\n                    -0.83823735,\n                    -1.4376352,\n                    -0.8778756,\n                    -0.66725403,\n                    -1.1006153,\n                    -0.7513238,\n                    -0.7345586,\n                    -0.66469234,\n                    -1.3874843,\n                    -2.4865832,\n                    -2.9973478,\n                    -2.306394,\n                    -2.673725,\n                    -3.3250866,\n                    -2.1260333,\n                    -2.1913078,\n                    -1.8699329,\n                    -1.7517766,\n                    -1.6185035,\n                    -1.6238472,\n                    -2.273655,\n                    -5.387515,\n                    -4.84232,\n                    -3.1442783,\n                    -3.4106052,\n                    -1.8919024,\n                    -1.4764744,\n                    -1.1394618,\n                    -4.4950013,\n                    -1.0300931,\n                    -4.261308,\n                    -1.3040004,\n                    -1.6697409,\n                    -2.3310206,\n                    -1.9800646,\n                    -2.5040128,\n                    -0.9702708,\n                    -1.6211085,\n                    -2.4045038,\n                    -2.6541598,\n                    -1.83864,\n                    -3.9018402,\n                    -1.9505028,\n                    -3.0774148,\n                    -1.2140405,\n                    -1.0377613,\n                    -2.676499,\n                    -2.0348115,\n                    -2.026429,\n                    -1.398141,\n                    -1.1595172,\n                    -2.031862,\n                    -1.9018892,\n                    -1.907572,\n                    -3.091949,\n                    -4.789639,\n                    -2.4214916,\n                    -2.600685,\n                    -3.3066297,\n                    -3.311136,\n                    -1.543751,\n                    -4.41873,\n                    -1.721474,\n                    -2.1571455,\n                    -2.8674223,\n                    -1.771004,\n                    -3.224615,\n                    -1.9476156,\n                    -2.4125326,\n                    -2.195531,\n                    -2.5996487,\n                    -1.4842075,\n                    -2.531535,\n                    -2.6502254,\n                    -1.8549097,\n                    -2.5735629,\n                    -1.4825006,\n                    -2.5768104,\n                    -2.2504616,\n                    -1.3600206,\n                    -2.5184896,\n                    -1.0576596,\n                    -3.8762393,\n                    -2.7213616,\n                    -0.9605226,\n                    -1.5147715,\n                    -2.870183,\n                    -1.8479155,\n                    -3.3377554,\n                    -1.9698049,\n                    -1.9063296,\n                    -1.5209743,\n                    -3.7611253,\n                    -1.6307871,\n                    -2.031272,\n                    -4.151252,\n                    -1.6233153,\n                    -1.9489214,\n                    -6.4323297,\n                    -1.1256434,\n                    -1.313254,\n                    -2.584117,\n                    -4.35672,\n                    -1.3673528,\n                    -1.216091,\n                    -2.888076,\n                    -1.3286314,\n                    -0.9383582,\n                    -3.279279,\n                    -4.4881206,\n                    -0.95496076,\n                    -1.2240533,\n                    -1.0146334,\n                    -2.168913,\n                    -1.5143352,\n                    -1.1024603,\n                    -2.4158843,\n                    -2.5341992,\n                    -1.4697461,\n                    -0.8430382,\n                    -1.953946,\n                    -1.1216923,\n                    -3.570344,\n                    -1.8596222\n                ],\n                \"pointIndex\": [\n                    0,\n                    503,\n                    229,\n                    97308902,\n                    90546316,\n                    60531530,\n                    25415445,\n                    20053538,\n                    44909203,\n                    59035730,\n                    85880856,\n                    114508858,\n                    32430183,\n                    38734551,\n                    45130336,\n                    24225300,\n                    57518001,\n                    88017364,\n                    81431559,\n                    95179440,\n                    109730194,\n                    30224840,\n                    32023550,\n                    34779612,\n                    37411646,\n                    59797001,\n                    78525232,\n                    93423098,\n                    48613000,\n                    20423406,\n                    125847509,\n                    78012884,\n                    92848752,\n                    90257220,\n                    79655512,\n                    87435428,\n                    93908915,\n                    116022841,\n                    104712945,\n                    111236741,\n                    14469842,\n                    38373353,\n                    7382757,\n                    45236435,\n                    28703822,\n                    117425585,\n                    36904534,\n                    29186408,\n                    17099365,\n                    20913138,\n                    5216832,\n                    125480067,\n                    90774671,\n                    4649981,\n                    29814811,\n                    21945113,\n                    71558967,\n                    54887893,\n                    101510858,\n                    88783696,\n                    64133978,\n                    64525352,\n                    113952641,\n                    115717676,\n                    73090102,\n                    74664864,\n                    79148335,\n                    84019128,\n                    74340922,\n                    86377870,\n                    26855188,\n                    96460424,\n                    3240594,\n                    98755278,\n                    105951844,\n                    110712086,\n                    118850697,\n                    123183109,\n                    127513496,\n                    503\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 229,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -7062140655107411726\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.53298694,\n                    -0.5370285,\n                    -0.53302014,\n                    -0.5869225,\n                    -0.5386616,\n                    -0.53426373,\n                    -0.54588836,\n                    -0.6144738,\n                    -0.6666626,\n                    -0.55950403,\n                    -0.7248929,\n                    -0.5607794,\n                    -0.5471208,\n                    -0.57251537,\n                    -0.56006366,\n                    -0.6167674,\n                    -0.61541486,\n                    -0.817127,\n                    -0.6857935,\n                    -0.5771496,\n                    -0.6130053,\n                    -0.7751004,\n                    -0.80550486,\n                    -0.64279586,\n                    -0.67447525,\n                    -0.67872816,\n                    -0.5948092,\n                    -0.6225593,\n                    -0.7412373,\n                    -0.63900876,\n                    -0.6328384,\n                    -0.777648,\n                    -0.76546454,\n                    -0.74156725,\n                    -0.82844687,\n                    -0.8392029,\n                    -1.044082,\n                    -0.7329778,\n                    -0.99735993,\n                    -0.7560849,\n                    -0.7613802,\n                    -0.85758793,\n                    -0.62227815,\n                    -0.7803536,\n                    -0.88435996,\n                    -0.9483788,\n                    -0.8897424,\n                    -0.84419423,\n                    -0.76181144,\n                    -1.5871549,\n                    -0.9929117,\n                    -0.75543576,\n                    -0.7979167,\n                    -0.735058,\n                    -0.6214327,\n                    -0.7944632,\n                    -0.7902639,\n                    -0.83461565,\n                    -1.4979233,\n                    -0.7796874,\n                    -1.0374615,\n                    -0.8105296,\n                    -1.8733882,\n                    -0.8792849,\n                    -0.9162315,\n                    -1.1558676,\n                    -1.2506269,\n                    -1.1495136,\n                    -1.5213449,\n                    -2.2541447,\n                    -1.1921371,\n                    -1.3501981,\n                    -0.9838202,\n                    -1.0595492,\n                    -1.3465357,\n                    -0.9763687,\n                    -1.0712547,\n                    -1.4653171,\n                    -1.2853703,\n                    -2.0836577,\n                    -0.9410978,\n                    -0.96253556,\n                    -0.81756365,\n                    -1.3851339,\n                    -1.179112,\n                    -0.6709964,\n                    -0.76329947,\n                    -1.1769576,\n                    -1.4954052,\n                    -0.960525,\n                    -1.0389729,\n                    -1.2045083,\n                    -1.8349122,\n                    -1.461973,\n                    -1.300356,\n                    -0.95158595,\n                    -1.0056021,\n                    -2.020996,\n                    -1.2476102,\n                    -2.0049386,\n                    -1.8346143,\n                    -1.1478754,\n                    -1.8596087,\n                    -0.98516494,\n                    -0.99060625,\n                    -0.9482102,\n                    -0.8543665,\n                    -0.9581024,\n                    -0.94102937,\n                    -0.6761013,\n                    -0.95146936,\n                    -1.0973043,\n                    -0.88721204,\n                    -0.8015781,\n                    -1.2003738,\n                    -1.0721217,\n                    -0.8620344,\n                    -1.6259612,\n                    -3.8667881,\n                    -0.93236417,\n                    -0.8828107,\n                    -1.493874,\n                    -1.5099301,\n                    -2.5947087,\n                    -3.358613,\n                    -4.7279215,\n                    -2.1498632,\n                    -0.8850714,\n                    -0.9867246,\n                    -5.2405734,\n                    -1.5883927,\n                    -1.1818252,\n                    -1.1974761,\n                    -1.7496891,\n                    -4.0008407,\n                    -1.476634,\n                    -1.3821882,\n                    -1.875585,\n                    -1.6439567,\n                    -5.709731,\n                    -3.068541,\n                    -2.4429452,\n                    -1.776948,\n                    -2.922071,\n                    -1.7195975,\n                    -1.1998038,\n                    -1.2773302,\n                    -3.1672099,\n                    -1.4617462,\n                    -1.7898146,\n                    -3.2232401,\n                    -1.1349226,\n                    -1.3632929,\n                    -1.8463753,\n                    -4.629285,\n                    -1.6867559,\n                    -1.8657526,\n                    -1.5905323,\n                    -2.1952798,\n                    -3.075461,\n                    -2.5903156,\n                    -1.5011766,\n                    -1.6764424,\n                    -2.241862,\n                    -1.2524456,\n                    -2.0902004,\n                    -1.5791374,\n                    -1.4672202,\n                    -1.4199411,\n                    -1.2729433,\n                    -1.6486052,\n                    -1.5432082,\n                    -3.741604,\n                    -4.3343306,\n                    -3.1124523,\n                    -1.3878208,\n                    -1.808157,\n                    -2.4290116,\n                    -2.043054,\n                    -1.5419066,\n                    -1.6230438,\n                    -2.0333831,\n                    -1.5723906,\n                    -2.873767,\n                    -1.4621615,\n                    -2.5015512,\n                    -3.94249,\n                    -1.5258577,\n                    -1.8479875,\n                    -2.240764,\n                    -1.8996323,\n                    -1.2670697,\n                    -1.111706,\n                    -3.5020432,\n                    -2.166442,\n                    -2.0529845,\n                    -3.032947,\n                    -1.8734413,\n                    -1.9137822,\n                    -5.747706,\n                    -6.5535336,\n                    -1.8551661,\n                    -2.2223253,\n                    -3.246324,\n                    -2.2672668,\n                    -3.214827,\n                    -1.8734428,\n                    -1.175713,\n                    -1.2449504,\n                    -5.0981402,\n                    -1.1101719,\n                    -3.6191673,\n                    -1.1534905,\n                    -2.1083336,\n                    -0.95823085,\n                    -4.0686164,\n                    -1.2263622,\n                    -3.3285706,\n                    -1.6448618,\n                    -1.0919825,\n                    -2.5784686,\n                    -3.4817815,\n                    -1.2479918,\n                    -3.134906,\n                    -1.1962954,\n                    -1.6323433,\n                    -2.1114237,\n                    -0.8559951\n                ],\n                \"pointIndex\": [\n                    0,\n                    503,\n                    228,\n                    93009322,\n                    32391791,\n                    6112720,\n                    29612775,\n                    49771418,\n                    18883514,\n                    16121665,\n                    86770664,\n                    115266187,\n                    34589577,\n                    59516381,\n                    18350350,\n                    57759317,\n                    39037044,\n                    70805195,\n                    26572791,\n                    94168921,\n                    109425411,\n                    127831981,\n                    15016203,\n                    110821980,\n                    85958860,\n                    41693101,\n                    72741503,\n                    59203443,\n                    116104014,\n                    58329384,\n                    62243579,\n                    64305944,\n                    121055088,\n                    24881265,\n                    111164926,\n                    90454212,\n                    93322042,\n                    126810352,\n                    106388250,\n                    117830318,\n                    126742863,\n                    65345741,\n                    3622619,\n                    116568751,\n                    73871814,\n                    95430527,\n                    114891825,\n                    23449004,\n                    108834772,\n                    83860728,\n                    850823,\n                    61508947,\n                    61397268,\n                    103677019,\n                    83368800,\n                    70259113,\n                    77359180,\n                    66256039,\n                    5203535,\n                    64136609,\n                    3428296,\n                    127212426,\n                    70500267,\n                    8683920,\n                    114219368,\n                    78898989,\n                    42583776,\n                    84243156,\n                    88572891,\n                    1987234,\n                    31681933,\n                    53278978,\n                    98360665,\n                    117555367,\n                    6829606,\n                    109949533,\n                    43916474,\n                    120383937,\n                    14474349\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 228,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 8531256490203702939\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5294597,\n                    -0.5576223,\n                    -0.5410471,\n                    -0.5649329,\n                    -0.58059883,\n                    -0.59872955,\n                    -0.59176224,\n                    -0.6084886,\n                    -0.7097818,\n                    -0.68805045,\n                    -0.585696,\n                    -0.6078537,\n                    -0.63380736,\n                    -0.59650224,\n                    -0.6027426,\n                    -0.61310583,\n                    -0.61372364,\n                    -0.7964455,\n                    -0.87056744,\n                    -0.7759881,\n                    -1.0372149,\n                    -0.65877354,\n                    -0.61011463,\n                    -0.72138506,\n                    -0.70972306,\n                    -0.66512847,\n                    -0.6612416,\n                    -0.5998607,\n                    -0.99996316,\n                    -0.9549136,\n                    -0.61410373,\n                    -0.8476963,\n                    -0.71036553,\n                    -1.085258,\n                    -0.96238244,\n                    -0.910108,\n                    -1.0033214,\n                    -1.1844999,\n                    -1.3858411,\n                    -0.8568252,\n                    -0.7952146,\n                    -1.0685524,\n                    -1.4095951,\n                    -0.66511136,\n                    -1.051108,\n                    -1.0629483,\n                    -0.8007286,\n                    -0.73972476,\n                    -0.78043693,\n                    -1.4360243,\n                    -0.74637586,\n                    -0.67966276,\n                    -0.74322665,\n                    -1.1533587,\n                    -0.8437019,\n                    -0.607716,\n                    -0.9872282,\n                    -1.437123,\n                    -1.2489737,\n                    -1.6970297,\n                    -1.6194949,\n                    -1.318194,\n                    -1.1215625,\n                    -1.0581961,\n                    -0.970132,\n                    -0.95887625,\n                    -0.8250626,\n                    -1.3179755,\n                    -1.2062294,\n                    -1.3604482,\n                    -0.9787357,\n                    -1.2869112,\n                    -1.631505,\n                    -1.0290068,\n                    -1.0062145,\n                    -1.8089368,\n                    -1.9163202,\n                    -1.7504554,\n                    -2.6891043,\n                    -0.92384803,\n                    -1.4413592,\n                    -1.8508644,\n                    -1.0570983,\n                    -1.1877431,\n                    -1.1719575,\n                    -1.9445239,\n                    -2.1189156,\n                    -1.4971766,\n                    -1.1050333,\n                    -1.5659459,\n                    -1.3083023,\n                    -1.3377016,\n                    -1.8765005,\n                    -1.0915056,\n                    -0.85164684,\n                    -0.8110967,\n                    -1.1261318,\n                    -0.7932666,\n                    -1.508754,\n                    -1.6883552,\n                    -1.631404,\n                    -1.2834219,\n                    -1.4985572,\n                    -0.74091995,\n                    -0.8910905,\n                    -0.76595217,\n                    -0.93938935,\n                    -1.5934051,\n                    -1.5612308,\n                    -1.3203666,\n                    -1.582214,\n                    -0.61047065,\n                    -0.9571381,\n                    -2.2664058,\n                    -4.521733,\n                    -1.4690771,\n                    -2.5001209,\n                    -1.6353573,\n                    -1.7051553,\n                    -4.229778,\n                    -2.168278,\n                    -2.514496,\n                    -2.2526808,\n                    -3.2943108,\n                    -2.1315482,\n                    -1.8274347,\n                    -4.216294,\n                    -1.5538868,\n                    -1.3124804,\n                    -1.8857366,\n                    -1.1115198,\n                    -3.7827122,\n                    -1.3951958,\n                    -2.5932121,\n                    -1.3097647,\n                    -1.6431139,\n                    -3.0072067,\n                    -2.1572142,\n                    -3.3944185,\n                    -2.2084262,\n                    -1.4533173,\n                    -1.2706913,\n                    -1.0741483,\n                    -1.6193907,\n                    -2.244457,\n                    -2.4748313,\n                    -2.0723145,\n                    -1.7034297,\n                    -1.3629124,\n                    -2.333396,\n                    -2.0661693,\n                    -3.5874333,\n                    -2.2120068,\n                    -2.2768779,\n                    -3.0420141,\n                    -1.7942852,\n                    -2.5977976,\n                    -3.8865256,\n                    -3.352928,\n                    -2.1355355,\n                    -0.93818676,\n                    -2.0091407,\n                    -2.7621565,\n                    -3.4339957,\n                    -2.5531046,\n                    -1.8565471,\n                    -1.6133701,\n                    -1.9098023,\n                    -3.2153764,\n                    -2.1790311,\n                    -2.8811343,\n                    -1.9575641,\n                    -2.8316908,\n                    -3.0212128,\n                    -2.1982098,\n                    -2.3480704,\n                    -1.7099223,\n                    -2.6372912,\n                    -1.7484987,\n                    -2.5965567,\n                    -2.7568269,\n                    -3.3684866,\n                    -1.8365451,\n                    -4.9798565,\n                    -2.7800128,\n                    -2.0807607,\n                    -4.2281303,\n                    -3.197505,\n                    -2.5512059,\n                    -4.6846085,\n                    -1.9073757,\n                    -5.087554,\n                    -1.0783356,\n                    -1.7837888,\n                    -3.4602354,\n                    -0.9112789,\n                    -1.9554101,\n                    -2.2751474,\n                    -2.8101778,\n                    -4.778091,\n                    -4.4575386,\n                    -2.9218266,\n                    -2.4827263,\n                    -2.644842,\n                    -1.5498966,\n                    -1.7656147,\n                    -1.668538,\n                    -1.9522663,\n                    -2.826835,\n                    -2.6780727,\n                    -0.95048386,\n                    -0.98014146,\n                    -2.483194,\n                    -3.975097,\n                    -1.8736228,\n                    -2.7426178,\n                    -1.6407981,\n                    -2.2753592,\n                    -2.0097847,\n                    -1.4792215,\n                    -1.7406603,\n                    -1.9646327,\n                    -2.2271302,\n                    -3.3145914,\n                    -1.0689687\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    225,\n                    13403184,\n                    111495485,\n                    110075349,\n                    95286179,\n                    9082452,\n                    105107231,\n                    68503793,\n                    92229860,\n                    115741825,\n                    7426527,\n                    45483998,\n                    54101854,\n                    60557798,\n                    69292652,\n                    100886814,\n                    28223841,\n                    102230303,\n                    35674633,\n                    37336130,\n                    3777454,\n                    42419097,\n                    45683484,\n                    50447787,\n                    51025294,\n                    26013912,\n                    101107105,\n                    22461981,\n                    30742553,\n                    77687452,\n                    78700000,\n                    61768436,\n                    35877561,\n                    92404425,\n                    98119292,\n                    111182215,\n                    227696,\n                    3552227,\n                    100345500,\n                    38328637,\n                    107160848,\n                    41221529,\n                    109380134,\n                    52167691,\n                    97181098,\n                    51851055,\n                    21633223,\n                    125705045,\n                    41500469,\n                    53916530,\n                    56440578,\n                    33704439,\n                    36584834,\n                    19904467,\n                    23136818,\n                    86675926,\n                    23851670,\n                    71295809,\n                    74103306,\n                    75498188,\n                    96884548,\n                    6794077,\n                    84836487,\n                    89715102,\n                    71070215,\n                    91733595,\n                    53489144,\n                    29386823,\n                    106303479,\n                    20601240,\n                    110642482,\n                    33376894,\n                    116785738,\n                    2786544,\n                    122652855,\n                    128784580\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 225,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 86567694310489782\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.548254,\n                    -0.5607937,\n                    -0.57866526,\n                    -0.5624324,\n                    -0.5818964,\n                    -0.6089347,\n                    -0.6727932,\n                    -0.562878,\n                    -0.5639682,\n                    -0.6208581,\n                    -0.59887177,\n                    -0.72257894,\n                    -0.620159,\n                    -0.68043953,\n                    -0.7551201,\n                    -0.73569727,\n                    -0.6401625,\n                    -0.84797335,\n                    -0.6484228,\n                    -0.6759319,\n                    -0.8306258,\n                    -0.6635486,\n                    -0.64221096,\n                    -0.7404873,\n                    -0.7288819,\n                    -0.62848085,\n                    -0.76633126,\n                    -0.7984979,\n                    -0.9023936,\n                    -1.33871,\n                    -1.1491896,\n                    -0.7609833,\n                    -0.9475599,\n                    -0.7572917,\n                    -1.2838012,\n                    -1.1049031,\n                    -1.1746876,\n                    -0.6899676,\n                    -0.88186604,\n                    -0.8181633,\n                    -0.680215,\n                    -0.8640347,\n                    -1.1342883,\n                    -0.7576444,\n                    -0.8179945,\n                    -0.6939694,\n                    -0.8686436,\n                    -0.8221472,\n                    -0.84974766,\n                    -0.73687667,\n                    -1.1154488,\n                    -0.7298011,\n                    -0.76152563,\n                    -1.2177804,\n                    -0.9150309,\n                    -0.96158725,\n                    -0.8462979,\n                    -1.1513401,\n                    -0.9344139,\n                    -1.6353195,\n                    -1.3437028,\n                    -1.4996282,\n                    -1.3432597,\n                    -2.301112,\n                    -0.8197572,\n                    -1.2027588,\n                    -1.1205764,\n                    -0.87856936,\n                    -1.5738864,\n                    -2.232956,\n                    -3.8641906,\n                    -1.4949442,\n                    -1.4932362,\n                    -1.2822062,\n                    -1.7245877,\n                    -0.9485146,\n                    -1.4527066,\n                    -0.93614507,\n                    -1.469078,\n                    -0.85976386,\n                    -1.3209335,\n                    -0.7144693,\n                    -0.68033046,\n                    -0.9082166,\n                    -1.9164375,\n                    -1.7172593,\n                    -1.4276229,\n                    -1.3900268,\n                    -1.3021207,\n                    -2.0078642,\n                    -1.8958111,\n                    -0.9743969,\n                    -1.3751584,\n                    -1.2233984,\n                    -1.2176247,\n                    -0.9189738,\n                    -0.91843736,\n                    -0.9539633,\n                    -0.8822781,\n                    -1.2415289,\n                    -1.7231786,\n                    -1.578393,\n                    -1.3576747,\n                    -1.1325957,\n                    -1.0613661,\n                    -1.2737293,\n                    -1.2220944,\n                    -1.7856517,\n                    -1.8723731,\n                    -1.3435957,\n                    -1.2633096,\n                    -1.1389157,\n                    -1.1266003,\n                    -0.8861719,\n                    -0.85776955,\n                    -1.6702726,\n                    -4.3272867,\n                    -1.3503591,\n                    -1.742206,\n                    -2.5929413,\n                    -2.5635533,\n                    -1.3737376,\n                    -1.3955579,\n                    -1.5927967,\n                    -4.3128505,\n                    -4.2600355,\n                    -2.501178,\n                    -3.3870676,\n                    -3.9008584,\n                    -4.1824965,\n                    -3.6452065,\n                    -1.3949506,\n                    -1.9210273,\n                    -1.967188,\n                    -1.536652,\n                    -3.3252275,\n                    -2.2511718,\n                    -1.9874296,\n                    -1.7335365,\n                    -2.4373407,\n                    -2.9089544,\n                    -3.9528072,\n                    -4.4839187,\n                    -1.9656321,\n                    -1.8781765,\n                    -3.0906434,\n                    -5.065713,\n                    -4.014744,\n                    -3.8616838,\n                    -1.7609141,\n                    -2.0917294,\n                    -1.860987,\n                    -2.1770885,\n                    -3.6821332,\n                    -3.2795827,\n                    -1.9278725,\n                    -2.6800344,\n                    -5.78242,\n                    -1.4867266,\n                    -3.017378,\n                    -1.3494822,\n                    -2.7585256,\n                    -1.5266467,\n                    -7.224387,\n                    -1.0098683,\n                    -1.9480876,\n                    -1.9703789,\n                    -3.3225334,\n                    -1.389721,\n                    -1.9282464,\n                    -2.6578705,\n                    -2.8685904,\n                    -2.8504508,\n                    -6.6986213,\n                    -1.6179367,\n                    -1.5962142,\n                    -1.4038852,\n                    -1.6786649,\n                    -2.472646,\n                    -3.1394675,\n                    -2.1794803,\n                    -2.0659873,\n                    -3.158665,\n                    -3.1141348,\n                    -2.071478,\n                    -4.5385494,\n                    -3.6139581,\n                    -1.9906989,\n                    -3.6913047,\n                    -3.636456,\n                    -1.5532333,\n                    -5.768992,\n                    -4.82517,\n                    -1.4060777,\n                    -2.8837552,\n                    -2.9799287,\n                    -1.0938119,\n                    -1.9560072,\n                    -1.0404767,\n                    -1.7204325,\n                    -2.9698248,\n                    -3.0281875,\n                    -2.0829344,\n                    -2.106153,\n                    -1.9701356,\n                    -3.1649125,\n                    -5.150452,\n                    -1.2197741,\n                    -2.5101361,\n                    -2.1056838,\n                    -2.6430736,\n                    -2.108021,\n                    -4.4657874,\n                    -3.626636,\n                    -1.2784696,\n                    -2.9364944,\n                    -1.8963417,\n                    -2.7525795,\n                    -3.784847,\n                    -2.3326762,\n                    -4.461001,\n                    -1.4812615,\n                    -1.3003786,\n                    -1.3552235,\n                    -1.7875011,\n                    -3.5720217,\n                    -1.2042733,\n                    -1.1229393,\n                    -2.3410068,\n                    -1.7073672,\n                    -1.6788995,\n                    -3.0060105\n                ],\n                \"pointIndex\": [\n                    1,\n                    499,\n                    232,\n                    35644102,\n                    95620906,\n                    18805749,\n                    81317621,\n                    34604771,\n                    47007327,\n                    62586261,\n                    93525848,\n                    105772867,\n                    1836297,\n                    36431055,\n                    52379127,\n                    52079831,\n                    61376238,\n                    85044418,\n                    41659805,\n                    90062343,\n                    31183374,\n                    117254947,\n                    107884975,\n                    30201518,\n                    39283927,\n                    70478305,\n                    27333242,\n                    49959075,\n                    52821453,\n                    57541686,\n                    61873590,\n                    66612382,\n                    45591007,\n                    26443451,\n                    49454191,\n                    82171151,\n                    99534729,\n                    96420884,\n                    101045942,\n                    30411828,\n                    59671855,\n                    37696441,\n                    3305404,\n                    35179351,\n                    67193486,\n                    79432577,\n                    115617457,\n                    55276385,\n                    21088404,\n                    113881020,\n                    8555817,\n                    47485812,\n                    75662933,\n                    9315052,\n                    74555298,\n                    57313488,\n                    114520082,\n                    59780069,\n                    10015857,\n                    112869350,\n                    82799122,\n                    116286262,\n                    106461743,\n                    112690456,\n                    55923115,\n                    104375627,\n                    76843016,\n                    80456232,\n                    111216669,\n                    84333327,\n                    88958071,\n                    90820851,\n                    94593823,\n                    98821972,\n                    16298556,\n                    103044415,\n                    104144207,\n                    111522983,\n                    116762199,\n                    123031926,\n                    498\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 232,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 6252646384454912542\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.48593023,\n                    -0.49811634,\n                    -0.5051228,\n                    -0.52852297,\n                    -0.4998905,\n                    -0.50856495,\n                    -0.56792754,\n                    -0.58318883,\n                    -0.5442299,\n                    -0.57571816,\n                    -0.5104713,\n                    -0.5443268,\n                    -0.5515343,\n                    -0.60608447,\n                    -0.6757374,\n                    -0.6830418,\n                    -0.6755409,\n                    -0.59016246,\n                    -0.5454576,\n                    -0.7096835,\n                    -0.61583054,\n                    -0.5266673,\n                    -0.51681995,\n                    -0.56019515,\n                    -0.5993234,\n                    -0.6495098,\n                    -0.6438212,\n                    -0.61164343,\n                    -0.65656334,\n                    -0.72602475,\n                    -0.8520472,\n                    -0.8298695,\n                    -0.68893903,\n                    -0.80443794,\n                    -0.8151453,\n                    -0.6248744,\n                    -0.7932084,\n                    -0.5832469,\n                    -0.59996295,\n                    -0.7268257,\n                    -0.8978948,\n                    -0.628976,\n                    -0.703311,\n                    -0.9101348,\n                    -0.7230928,\n                    -0.7509946,\n                    -0.81770027,\n                    -1.0394619,\n                    -0.5726214,\n                    -0.9880306,\n                    -0.7951019,\n                    -0.8072624,\n                    -0.8245114,\n                    -0.7996366,\n                    -0.64646524,\n                    -0.6428294,\n                    -0.62054175,\n                    -0.7876593,\n                    -0.95186293,\n                    -1.3845879,\n                    -1.0309538,\n                    -1.4924188,\n                    -0.8735874,\n                    -0.9024334,\n                    -1.887193,\n                    -0.73925686,\n                    -0.7173001,\n                    -0.89899385,\n                    -1.2966541,\n                    -2.1132953,\n                    -2.0209932,\n                    -0.92569464,\n                    -0.66766036,\n                    -1.3062632,\n                    -1.2196876,\n                    -1.1793561,\n                    -0.70380116,\n                    -2.359043,\n                    -1.2396822,\n                    -1.0203435,\n                    -0.9604665,\n                    -1.758841,\n                    -1.2739277,\n                    -0.69405615,\n                    -1.3159864,\n                    -0.9570915,\n                    -1.4841802,\n                    -1.0174404,\n                    -1.4231768,\n                    -0.9732836,\n                    -0.95332456,\n                    -0.87739766,\n                    -1.1183963,\n                    -1.3045577,\n                    -2.6690967,\n                    -1.071694,\n                    -1.3986906,\n                    -0.6189145,\n                    -0.8985403,\n                    -1.2814728,\n                    -1.7861696,\n                    -1.1929153,\n                    -0.8898458,\n                    -0.86690474,\n                    -0.80744475,\n                    -1.1774896,\n                    -1.108945,\n                    -2.056511,\n                    -2.0676262,\n                    -0.7846663,\n                    -0.81444526,\n                    -0.6766059,\n                    -0.9041935,\n                    -0.73590857,\n                    -0.6264732,\n                    -0.81851256,\n                    -2.6052191,\n                    -1.8843722,\n                    -1.044584,\n                    -1.6357023,\n                    -1.6401404,\n                    -1.5270939,\n                    -2.2869985,\n                    -1.7568169,\n                    -1.7991313,\n                    -3.001768,\n                    -5.336341,\n                    -1.6224685,\n                    -1.0219606,\n                    -4.603571,\n                    -4.1932673,\n                    -3.042347,\n                    -1.9126862,\n                    -0.7629551,\n                    -3.189313,\n                    -2.7016556,\n                    -1.118586,\n                    -2.1607187,\n                    -2.0055864,\n                    -2.7484715,\n                    -3.1684074,\n                    -3.83769,\n                    -2.0820234,\n                    -1.0559535,\n                    -1.1772017,\n                    -1.330181,\n                    -1.6856858,\n                    -1.6979719,\n                    -2.530777,\n                    -2.7738214,\n                    -2.2110868,\n                    -1.4996016,\n                    -1.2965591,\n                    -4.6498537,\n                    -1.8551095,\n                    -2.5414307,\n                    -3.570963,\n                    -1.2399164,\n                    -2.8060694,\n                    -2.1512334,\n                    -1.8322636,\n                    -4.8060484,\n                    -1.9000674,\n                    -2.0965776,\n                    -2.3850784,\n                    -4.606346,\n                    -2.907361,\n                    -3.545009,\n                    -1.0164293,\n                    -2.7142386,\n                    -3.3618717,\n                    -1.5112023,\n                    -0.9955293,\n                    -2.7666876,\n                    -2.1661737,\n                    -1.4990155,\n                    -1.3076473,\n                    -2.456527,\n                    -1.6418482,\n                    -2.2069795,\n                    -1.9657013,\n                    -3.255251,\n                    -1.8241007,\n                    -1.5874321,\n                    -3.0413682,\n                    -2.5288503,\n                    -1.122015,\n                    -3.3506832,\n                    -1.7865183,\n                    -3.0265481,\n                    -3.1617188,\n                    -2.043597,\n                    -1.6243017,\n                    -1.6693872,\n                    -1.8202813,\n                    -6.2588425,\n                    -0.658507,\n                    -1.9051749,\n                    -1.0312359,\n                    -1.6516533,\n                    -2.6110055,\n                    -2.2804751,\n                    -2.2768223,\n                    -2.914403,\n                    -3.3968577,\n                    -1.5329814,\n                    -4.7806764,\n                    -1.8757861,\n                    -2.1339443,\n                    -1.2909119,\n                    -3.560735,\n                    -1.4998031,\n                    -1.5749999,\n                    -1.4923608,\n                    -1.1537538,\n                    -4.7296,\n                    -2.440965,\n                    -4.2241488,\n                    -2.3822827,\n                    -2.3952546,\n                    -1.0642797,\n                    -1.7057232,\n                    -1.2805316,\n                    -1.9372311,\n                    -1.3188204,\n                    -4.5041175,\n                    -2.726005,\n                    -0.8250985,\n                    -1.1002651,\n                    -1.376234,\n                    -0.9265303\n                ],\n                \"pointIndex\": [\n                    1,\n                    498,\n                    231,\n                    121903249,\n                    82609045,\n                    53657487,\n                    83469870,\n                    10321709,\n                    50627244,\n                    59885212,\n                    77692286,\n                    98141695,\n                    87402624,\n                    42160830,\n                    8468170,\n                    77416666,\n                    34591308,\n                    12532761,\n                    26185935,\n                    91499350,\n                    75331836,\n                    123483511,\n                    15452988,\n                    8325712,\n                    21363363,\n                    118013789,\n                    86219309,\n                    88635229,\n                    28187500,\n                    91925545,\n                    114866803,\n                    64359771,\n                    1014791,\n                    70322355,\n                    116900125,\n                    101094304,\n                    84425561,\n                    104223812,\n                    97907689,\n                    110328330,\n                    121265951,\n                    35245592,\n                    15199966,\n                    38695244,\n                    37808698,\n                    4744599,\n                    87014766,\n                    41946632,\n                    10751397,\n                    44730152,\n                    21616370,\n                    52919691,\n                    114313344,\n                    12902561,\n                    117409059,\n                    113520849,\n                    57750588,\n                    58573017,\n                    114622407,\n                    72330758,\n                    23440117,\n                    26354919,\n                    68648928,\n                    68002172,\n                    25188338,\n                    71817058,\n                    87196418,\n                    12687349,\n                    25351371,\n                    82891487,\n                    84141569,\n                    88845534,\n                    90204855,\n                    95439583,\n                    117258970,\n                    6922222,\n                    110083821,\n                    115550392,\n                    14129703,\n                    84815374\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 231,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -3921434996928360891\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.59787774,\n                    -0.60145414,\n                    -0.6037318,\n                    -0.60338205,\n                    -0.6137846,\n                    -0.6112629,\n                    -0.62412167,\n                    -0.62555826,\n                    -0.692133,\n                    -0.703087,\n                    -0.62788475,\n                    -0.6285306,\n                    -0.6236274,\n                    -0.73515266,\n                    -0.738029,\n                    -0.7654694,\n                    -0.6870156,\n                    -0.73044413,\n                    -0.7317749,\n                    -0.7283749,\n                    -0.9046549,\n                    -0.63757515,\n                    -0.90129393,\n                    -0.63576126,\n                    -0.65300924,\n                    -0.6393257,\n                    -0.624173,\n                    -1.0669246,\n                    -0.8360199,\n                    -0.88732535,\n                    -0.9085496,\n                    -0.8281918,\n                    -1.0321151,\n                    -0.69639766,\n                    -1.056624,\n                    -0.7568813,\n                    -0.83730763,\n                    -1.3531302,\n                    -0.79367673,\n                    -0.746458,\n                    -1.0654767,\n                    -0.9303029,\n                    -1.0939734,\n                    -0.8892214,\n                    -0.6607892,\n                    -1.0712175,\n                    -0.97386837,\n                    -1.092996,\n                    -0.82881105,\n                    -0.70330036,\n                    -0.80185133,\n                    -0.8514173,\n                    -0.69828564,\n                    -0.70050687,\n                    -0.6848632,\n                    -1.1613108,\n                    -1.4207476,\n                    -0.8566658,\n                    -1.1068753,\n                    -1.0668347,\n                    -1.0640814,\n                    -1.2468433,\n                    -1.1292615,\n                    -1.0954584,\n                    -1.9068737,\n                    -1.4202605,\n                    -2.034829,\n                    -0.73155093,\n                    -0.86568165,\n                    -2.4108014,\n                    -1.6762967,\n                    -1.0485967,\n                    -1.0873181,\n                    -0.904395,\n                    -1.1199968,\n                    -2.1598327,\n                    -1.4283174,\n                    -1.0384696,\n                    -0.84582597,\n                    -2.4886782,\n                    -0.7779316,\n                    -1.0970025,\n                    -1.1856172,\n                    -1.9569024,\n                    -3.981397,\n                    -1.3820696,\n                    -1.8495582,\n                    -1.303945,\n                    -1.7128863,\n                    -0.851952,\n                    -1.3258555,\n                    -2.0642643,\n                    -1.0822942,\n                    -1.0181471,\n                    -1.0585626,\n                    -1.5800385,\n                    -1.1739537,\n                    -0.9560621,\n                    -1.034065,\n                    -0.7476173,\n                    -1.2199632,\n                    -0.8563682,\n                    -1.7596116,\n                    -0.9308764,\n                    -0.9294558,\n                    -0.700496,\n                    -0.9780324,\n                    -1.3246034,\n                    -0.9090146,\n                    -1.4302996,\n                    -0.76940614,\n                    -3.9980063,\n                    -2.1296775,\n                    -1.9396099,\n                    -2.2312093,\n                    -1.1239054,\n                    -3.0561671,\n                    -1.9985267,\n                    -1.5552648,\n                    -1.8671099,\n                    -1.8968765,\n                    -1.5244303,\n                    -1.5921745,\n                    -1.4288629,\n                    -1.7421844,\n                    -2.1218357,\n                    -1.7104133,\n                    -2.6510508,\n                    -1.3104323,\n                    -3.057764,\n                    -2.031021,\n                    -1.4565883,\n                    -2.0348382,\n                    -2.2601967,\n                    -2.920707,\n                    -1.2729836,\n                    -3.1982338,\n                    -0.99266785,\n                    -1.7109475,\n                    -2.6560607,\n                    -2.4753838,\n                    -2.1877108,\n                    -2.064123,\n                    -1.1081226,\n                    -3.0854433,\n                    -5.3121643,\n                    -1.3218622,\n                    -2.3300076,\n                    -3.1619358,\n                    -1.2176294,\n                    -1.3897915,\n                    -4.1942477,\n                    -2.4851782,\n                    -2.5883253,\n                    -2.5248022,\n                    -1.5893021,\n                    -3.42884,\n                    -1.4305729,\n                    -1.9520689,\n                    -3.5295646,\n                    -2.5297673,\n                    -2.0775554,\n                    -1.0084162,\n                    -1.5056899,\n                    -1.9147689,\n                    -1.6083453,\n                    -1.1871455,\n                    -2.0726132,\n                    -2.1473198,\n                    -4.442302,\n                    -4.670319,\n                    -3.5520089,\n                    -1.6042694,\n                    -3.1528566,\n                    -4.5269594,\n                    -2.1927118,\n                    -1.8097452,\n                    -2.7539496,\n                    -4.4474444,\n                    -1.2035335,\n                    -1.1168379,\n                    -1.3293927,\n                    -2.0163567,\n                    -2.2053635,\n                    -3.6734867,\n                    -2.1019855,\n                    -1.3774385,\n                    -1.8108878,\n                    -1.7328309,\n                    -4.0557847,\n                    -6.4212666,\n                    -2.2095923,\n                    -5.359125,\n                    -1.4784194,\n                    -2.1430738,\n                    -3.644639,\n                    -1.284663,\n                    -3.362314,\n                    -1.093306,\n                    -2.4446,\n                    -1.1570675,\n                    -1.4576575,\n                    -2.2837992,\n                    -4.856039,\n                    -1.7351286,\n                    -2.236084,\n                    -2.0651164,\n                    -1.4864243,\n                    -1.4940932,\n                    -1.6566368,\n                    -1.8886653,\n                    -0.74667937,\n                    -2.0818756,\n                    -1.1767255,\n                    -1.7952884,\n                    -1.5501437,\n                    -1.3899502,\n                    -1.6304728,\n                    -1.4943737,\n                    -2.758686,\n                    -2.8887265,\n                    -2.2842896,\n                    -1.0909864\n                ],\n                \"pointIndex\": [\n                    4,\n                    503,\n                    223,\n                    84054172,\n                    109366651,\n                    51339656,\n                    88791098,\n                    4072988,\n                    54556737,\n                    75121037,\n                    86904619,\n                    102956360,\n                    91321010,\n                    118585526,\n                    78577257,\n                    63311712,\n                    67181236,\n                    52778,\n                    82908703,\n                    94184616,\n                    106563399,\n                    78881456,\n                    66679141,\n                    41331958,\n                    7050563,\n                    48465170,\n                    96787920,\n                    37268129,\n                    82110402,\n                    59620455,\n                    119453249,\n                    71640439,\n                    75552290,\n                    77771305,\n                    46659868,\n                    28981394,\n                    62183364,\n                    12446746,\n                    13219385,\n                    115972942,\n                    52801009,\n                    61792140,\n                    572087,\n                    39266840,\n                    2980561,\n                    32084687,\n                    45569065,\n                    39983677,\n                    47592681,\n                    3347835,\n                    123538200,\n                    52604221,\n                    20106550,\n                    54042751,\n                    22117839,\n                    58149627,\n                    116452996,\n                    61076830,\n                    66799983,\n                    69136378,\n                    40642821,\n                    22894287,\n                    123149293,\n                    33336803,\n                    77739806,\n                    81458609,\n                    81692046,\n                    85920934,\n                    87924055,\n                    30059320,\n                    92925472,\n                    95823877,\n                    99698152,\n                    121454403,\n                    59960944,\n                    116220425,\n                    117475741,\n                    499\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 223,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -7058296641388226677\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.54927164,\n                    -0.55799776,\n                    -0.56177443,\n                    -0.5784826,\n                    -0.5607427,\n                    -0.5943863,\n                    -0.56446606,\n                    -0.63234544,\n                    -0.6444895,\n                    -0.6149962,\n                    -0.5726348,\n                    -0.6144211,\n                    -0.5976512,\n                    -0.9134983,\n                    -0.5969545,\n                    -0.6978777,\n                    -0.662781,\n                    -0.6462334,\n                    -0.82320976,\n                    -0.62632024,\n                    -0.78237295,\n                    -0.6009164,\n                    -0.71237713,\n                    -0.63300943,\n                    -0.68879,\n                    -0.8736334,\n                    -0.6628779,\n                    -1.0856007,\n                    -0.92329025,\n                    -1.0283417,\n                    -0.772051,\n                    -0.78350765,\n                    -0.8919709,\n                    -0.7718917,\n                    -1.0579466,\n                    -0.7923724,\n                    -0.69287163,\n                    -0.8952094,\n                    -0.85189104,\n                    -0.65595937,\n                    -0.7328316,\n                    -1.1698253,\n                    -0.95717037,\n                    -0.8587923,\n                    -0.64600074,\n                    -0.73141485,\n                    -0.87741977,\n                    -1.103363,\n                    -0.63726646,\n                    -0.8150458,\n                    -0.74368244,\n                    -0.8759372,\n                    -1.081512,\n                    -0.71728975,\n                    -0.688069,\n                    -1.2152156,\n                    -1.1525712,\n                    -1.2191999,\n                    -1.0493709,\n                    -1.5191622,\n                    -1.5661151,\n                    -1.6331143,\n                    -1.2799627,\n                    -1.7562692,\n                    -1.150978,\n                    -1.178991,\n                    -0.9250156,\n                    -0.86997193,\n                    -1.4348046,\n                    -1.4207041,\n                    -1.1504325,\n                    -0.81934685,\n                    -1.3765908,\n                    -1.1598345,\n                    -0.76282966,\n                    -1.2485176,\n                    -0.9627283,\n                    -1.0175,\n                    -1.3302476,\n                    -1.4262129,\n                    -0.73058355,\n                    -0.7768302,\n                    -1.0742236,\n                    -1.2367499,\n                    -1.4979956,\n                    -1.3816236,\n                    -1.5363247,\n                    -1.4642798,\n                    -1.3516219,\n                    -0.72886014,\n                    -0.84243894,\n                    -1.1391242,\n                    -0.816415,\n                    -1.2249358,\n                    -1.1653829,\n                    -1.1568464,\n                    -1.409465,\n                    -0.7573363,\n                    -1.8553492,\n                    -0.9267509,\n                    -1.277529,\n                    -0.84015083,\n                    -1.4367083,\n                    -1.1704016,\n                    -1.0822623,\n                    -1.9117612,\n                    -1.4373354,\n                    -1.1935893,\n                    -0.72218204,\n                    -0.9773071,\n                    -2.307135,\n                    -1.6119989,\n                    -1.4311005,\n                    -1.6152966,\n                    -3.8505075,\n                    -2.5684729,\n                    -1.4745631,\n                    -2.283275,\n                    -1.9480804,\n                    -1.5828394,\n                    -2.2038364,\n                    -1.8381954,\n                    -2.7674043,\n                    -3.1706169,\n                    -1.6801208,\n                    -1.4045823,\n                    -3.458629,\n                    -5.5322,\n                    -2.0492415,\n                    -1.3397441,\n                    -1.2684066,\n                    -1.728175,\n                    -4.826077,\n                    -1.3611041,\n                    -1.0132012,\n                    -2.3263867,\n                    -1.668477,\n                    -5.789353,\n                    -1.8627526,\n                    -3.0538514,\n                    -4.4253926,\n                    -1.650529,\n                    -2.3139496,\n                    -2.480115,\n                    -3.4141963,\n                    -2.1213675,\n                    -1.5043126,\n                    -1.4033012,\n                    -3.1311145,\n                    -0.9516954,\n                    -2.1204515,\n                    -3.598058,\n                    -1.4977916,\n                    -1.0788697,\n                    -1.3113519,\n                    -3.0442638,\n                    -4.627087,\n                    -2.0146139,\n                    -1.3869731,\n                    -2.342794,\n                    -1.5375688,\n                    -1.2935922,\n                    -4.4602623,\n                    -2.0731196,\n                    -4.6459804,\n                    -3.7177625,\n                    -3.0546892,\n                    -2.069629,\n                    -1.2756584,\n                    -3.0119967,\n                    -1.6468534,\n                    -1.5275029,\n                    -2.2072217,\n                    -2.840689,\n                    -1.5935591,\n                    -1.7980634,\n                    -2.1122496,\n                    -3.8701308,\n                    -2.2193303,\n                    -3.620466,\n                    -1.0340712,\n                    -1.5489097,\n                    -2.2800333,\n                    -1.4681567,\n                    -4.295331,\n                    -0.97169226,\n                    -1.1701701,\n                    -1.4693524,\n                    -1.9215053,\n                    -1.4616832,\n                    -2.4838266,\n                    -1.2926298,\n                    -1.2791344,\n                    -1.4447894,\n                    -2.3319173,\n                    -2.0730073,\n                    -2.3841429,\n                    -1.9727724,\n                    -1.9029855,\n                    -3.8191602,\n                    -3.4143884,\n                    -1.6520147,\n                    -3.1560063,\n                    -1.1492236,\n                    -1.8619269,\n                    -2.8565183,\n                    -1.4807147,\n                    -2.1151505,\n                    -2.7476447,\n                    -1.7275034,\n                    -2.7608826,\n                    -1.9752641,\n                    -2.1960561,\n                    -1.7852536,\n                    -1.7566513,\n                    -1.6960467,\n                    -1.3483127,\n                    -1.7708883,\n                    -2.5635688,\n                    -1.6335524,\n                    -1.4366835,\n                    -3.7009428,\n                    -2.626339,\n                    -2.0646389,\n                    -2.1659148,\n                    -1.484712,\n                    -3.0297914,\n                    -2.1841908\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    228,\n                    97198270,\n                    59470988,\n                    65381995,\n                    79952849,\n                    36792402,\n                    51276755,\n                    81752643,\n                    80230109,\n                    118281718,\n                    109930419,\n                    80413679,\n                    71166889,\n                    21318653,\n                    24855703,\n                    67265095,\n                    120795767,\n                    29750424,\n                    113188732,\n                    33589531,\n                    31694451,\n                    112099069,\n                    13232518,\n                    45225703,\n                    49060933,\n                    51105009,\n                    20662083,\n                    57749492,\n                    63758790,\n                    24384450,\n                    110219280,\n                    70322865,\n                    77580408,\n                    53260142,\n                    91836161,\n                    96841947,\n                    6839269,\n                    120096698,\n                    105828287,\n                    108808445,\n                    19274606,\n                    119428017,\n                    44259364,\n                    8363462,\n                    41650447,\n                    68138301,\n                    125558726,\n                    47495101,\n                    4398592,\n                    49768486,\n                    11787755,\n                    53595347,\n                    76192272,\n                    56214662,\n                    31031249,\n                    58698945,\n                    22561727,\n                    61418247,\n                    62348906,\n                    64188278,\n                    66035284,\n                    86677956,\n                    26292697,\n                    28510053,\n                    6018892,\n                    126800998,\n                    83629329,\n                    90448353,\n                    29246911,\n                    99134894,\n                    119796088,\n                    45558469,\n                    108328045,\n                    60162587,\n                    121171674,\n                    124007771,\n                    85941694\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 228,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 1617599988107384674\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.49086484,\n                    -0.49165273,\n                    -0.49508175,\n                    -0.6431985,\n                    -0.5937614,\n                    -0.50219905,\n                    -0.573946,\n                    -0.6606333,\n                    -0.67695975,\n                    -0.66005003,\n                    -0.63093185,\n                    -0.5548935,\n                    -0.5702439,\n                    -0.71829444,\n                    -0.60590047,\n                    -0.7012481,\n                    -0.7132402,\n                    -0.729588,\n                    -0.697213,\n                    -0.69351006,\n                    -0.72366875,\n                    -0.6569782,\n                    -0.7441715,\n                    -0.58781534,\n                    -0.5558926,\n                    -0.67732096,\n                    -0.5800482,\n                    -0.7528965,\n                    -0.81201863,\n                    -0.7412216,\n                    -0.6288245,\n                    -1.2486026,\n                    -0.75286245,\n                    -0.72401667,\n                    -0.71564144,\n                    -0.76534486,\n                    -0.9099728,\n                    -1.0183343,\n                    -0.70542717,\n                    -0.70135754,\n                    -0.91260487,\n                    -0.8092986,\n                    -0.7336066,\n                    -0.7567574,\n                    -0.7708158,\n                    -0.84383863,\n                    -0.81836164,\n                    -0.68041724,\n                    -0.6287525,\n                    -0.6440457,\n                    -0.7410818,\n                    -0.7212312,\n                    -0.71242625,\n                    -0.67231476,\n                    -0.63468635,\n                    -0.7622313,\n                    -0.8208822,\n                    -1.1445706,\n                    -0.94941014,\n                    -0.8612036,\n                    -1.707661,\n                    -0.9243854,\n                    -1.9626482,\n                    -1.5958457,\n                    -1.3077794,\n                    -1.1861492,\n                    -1.1635512,\n                    -2.1751227,\n                    -0.8469007,\n                    -1.4496542,\n                    -1.5342058,\n                    -0.96538514,\n                    -1.3460073,\n                    -1.419018,\n                    -1.0653356,\n                    -1.2852613,\n                    -1.2012281,\n                    -0.8473538,\n                    -0.8943203,\n                    -0.8048798,\n                    -1.167146,\n                    -1.3002725,\n                    -1.0605211,\n                    -0.92645526,\n                    -1.69568,\n                    -0.7741866,\n                    -1.0942045,\n                    -1.6122143,\n                    -0.9987477,\n                    -1.2992978,\n                    -1.3553501,\n                    -1.4368896,\n                    -1.2315027,\n                    -1.1274419,\n                    -1.8796052,\n                    -0.9360587,\n                    -1.1173197,\n                    -1.2018306,\n                    -1.2060379,\n                    -0.7550845,\n                    -1.3723761,\n                    -0.92328745,\n                    -0.9710938,\n                    -1.2305354,\n                    -1.0390061,\n                    -1.0124133,\n                    -0.734787,\n                    -0.6868354,\n                    -1.0925033,\n                    -1.7397844,\n                    -0.75704724,\n                    -0.93412423,\n                    -1.0038755,\n                    -0.8340533,\n                    -1.0903175,\n                    -1.6320931,\n                    -5.0747957,\n                    -1.2733006,\n                    -2.6070294,\n                    -2.010462,\n                    -1.7844871,\n                    -3.348369,\n                    -2.3121226,\n                    -1.5482043,\n                    -0.97831064,\n                    -2.7482018,\n                    -2.2871487,\n                    -2.119465,\n                    -2.905391,\n                    -2.1168177,\n                    -3.059811,\n                    -1.3497925,\n                    -2.189309,\n                    -1.8456689,\n                    -2.1114528,\n                    -2.5084443,\n                    -2.2272942,\n                    -1.8464441,\n                    -2.7921028,\n                    -4.0985055,\n                    -1.874381,\n                    -4.627585,\n                    -2.7565491,\n                    -1.4172932,\n                    -1.0251176,\n                    -2.5863686,\n                    -2.255447,\n                    -5.3265395,\n                    -1.7419072,\n                    -1.784064,\n                    -1.4473236,\n                    -2.1382859,\n                    -1.6768821,\n                    -2.059615,\n                    -2.2160065,\n                    -0.94110626,\n                    -2.160445,\n                    -1.3646688,\n                    -1.2020136,\n                    -1.2484066,\n                    -2.6778018,\n                    -2.6019099,\n                    -2.7626283,\n                    -1.5040901,\n                    -2.4336689,\n                    -1.3975806,\n                    -5.2075863,\n                    -1.9458374,\n                    -1.9337393,\n                    -2.611955,\n                    -2.93051,\n                    -2.350819,\n                    -1.2660667,\n                    -1.3496511,\n                    -1.3058685,\n                    -3.110164,\n                    -3.809094,\n                    -3.3404458,\n                    -1.8760198,\n                    -1.8080118,\n                    -1.6777174,\n                    -2.7298841,\n                    -1.9431881,\n                    -2.405735,\n                    -1.4490008,\n                    -1.3650403,\n                    -1.4648993,\n                    -1.7826031,\n                    -1.1778947,\n                    -1.9751103,\n                    -2.531089,\n                    -2.5696335,\n                    -6.8766155,\n                    -3.561331,\n                    -1.7344441,\n                    -2.2454484,\n                    -1.7390339,\n                    -1.3827872,\n                    -1.3778417,\n                    -1.3080233,\n                    -1.2370479,\n                    -4.110881,\n                    -2.2114515,\n                    -2.4566734,\n                    -1.7254595,\n                    -1.7614281,\n                    -0.99026966,\n                    -2.1424553,\n                    -1.7575904,\n                    -4.0985904,\n                    -1.8833361,\n                    -1.3063549,\n                    -1.2607809,\n                    -0.82764417,\n                    -0.8102777,\n                    -2.1919854,\n                    -1.403665,\n                    -1.3009878,\n                    -2.792962,\n                    -3.3374386,\n                    -2.8087707,\n                    -2.2060149,\n                    -0.84837985,\n                    -2.9999259,\n                    -1.3997679,\n                    -2.5588124,\n                    -2.0231915,\n                    -1.9132456,\n                    -0.84803754,\n                    -1.3362615\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    230,\n                    110086351,\n                    101424723,\n                    52573038,\n                    95008443,\n                    15560022,\n                    19982898,\n                    64057438,\n                    79464555,\n                    107052269,\n                    93002383,\n                    42159278,\n                    49496892,\n                    119779632,\n                    111113351,\n                    26064893,\n                    12176857,\n                    6657738,\n                    108593992,\n                    126506494,\n                    72496706,\n                    42621811,\n                    18129767,\n                    4097229,\n                    1824239,\n                    9341177,\n                    109152575,\n                    22442466,\n                    82928928,\n                    91678956,\n                    79276268,\n                    118732690,\n                    78958311,\n                    6067895,\n                    97094175,\n                    103002712,\n                    107319471,\n                    120864622,\n                    2795147,\n                    34753592,\n                    3642607,\n                    37322672,\n                    25049365,\n                    75391554,\n                    12835239,\n                    106173089,\n                    43755132,\n                    117583954,\n                    46761671,\n                    8965966,\n                    50974275,\n                    95736136,\n                    97367112,\n                    59029487,\n                    86855671,\n                    126257878,\n                    123292235,\n                    65843424,\n                    64392139,\n                    65590301,\n                    72222837,\n                    118300295,\n                    76401247,\n                    77680057,\n                    27451418,\n                    81917883,\n                    28498094,\n                    33457347,\n                    99015206,\n                    98379937,\n                    100477882,\n                    105021239,\n                    106810149,\n                    116519442,\n                    74534426,\n                    122909395,\n                    125209175,\n                    255019\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 230,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -8928687457762026561\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.56832755,\n                    -0.5909885,\n                    -0.5748976,\n                    -0.60423553,\n                    -0.60178524,\n                    -0.60157627,\n                    -0.587748,\n                    -0.7172052,\n                    -0.6241009,\n                    -0.60629946,\n                    -0.6160531,\n                    -0.63146806,\n                    -0.608597,\n                    -0.59321743,\n                    -0.6879986,\n                    -0.783219,\n                    -0.8522558,\n                    -0.6727227,\n                    -0.8268062,\n                    -0.62091374,\n                    -0.61043733,\n                    -0.74049455,\n                    -0.7057798,\n                    -0.796559,\n                    -0.87044466,\n                    -0.70782137,\n                    -0.7786673,\n                    -0.6604127,\n                    -0.8732423,\n                    -1.4029797,\n                    -0.7945969,\n                    -0.8747885,\n                    -0.8368457,\n                    -0.95190823,\n                    -0.9993142,\n                    -0.8970017,\n                    -0.93625695,\n                    -0.84799105,\n                    -0.8354849,\n                    -0.79125404,\n                    -0.799949,\n                    -0.67009306,\n                    -0.7891741,\n                    -1.4105603,\n                    -0.8896512,\n                    -1.0135485,\n                    -0.929001,\n                    -0.886674,\n                    -0.8006716,\n                    -1.4901229,\n                    -1.2618842,\n                    -0.90933096,\n                    -0.8073693,\n                    -0.8621052,\n                    -1.0471725,\n                    -0.7722495,\n                    -1.3036199,\n                    -0.87458843,\n                    -1.4554368,\n                    -1.5855098,\n                    -1.9130875,\n                    -0.94120586,\n                    -1.0607948,\n                    -0.9088619,\n                    -1.4801517,\n                    -0.99234706,\n                    -1.3816286,\n                    -1.2490814,\n                    -1.1207249,\n                    -1.8138934,\n                    -1.0425998,\n                    -1.4596838,\n                    -1.6276789,\n                    -1.2308221,\n                    -1.0301083,\n                    -1.1402522,\n                    -1.4639021,\n                    -0.9087255,\n                    -1.0546715,\n                    -0.9879512,\n                    -1.1541631,\n                    -0.82525426,\n                    -1.0790806,\n                    -0.9471458,\n                    -0.85101986,\n                    -1.233984,\n                    -2.3894157,\n                    -2.9223077,\n                    -1.5428509,\n                    -1.5711381,\n                    -1.0467322,\n                    -1.8167585,\n                    -1.3438088,\n                    -1.2582407,\n                    -1.2467229,\n                    -1.6579001,\n                    -1.990238,\n                    -1.5339531,\n                    -1.0503933,\n                    -1.6122134,\n                    -1.8513618,\n                    -1.6831448,\n                    -1.2689091,\n                    -1.0429959,\n                    -1.3003299,\n                    -0.8789561,\n                    -0.94471043,\n                    -1.4198842,\n                    -2.455136,\n                    -1.2004322,\n                    -1.2269163,\n                    -1.12566,\n                    -0.7759718,\n                    -1.4541564,\n                    -4.4826355,\n                    -1.3858997,\n                    -1.4449863,\n                    -1.9620763,\n                    -1.6177365,\n                    -1.7349641,\n                    -1.9133793,\n                    -3.974498,\n                    -2.1262734,\n                    -1.7611657,\n                    -1.681921,\n                    -2.7635946,\n                    -3.0031888,\n                    -5.914061,\n                    -1.1342934,\n                    -2.911582,\n                    -1.8979477,\n                    -1.1300302,\n                    -2.3564131,\n                    -2.2195525,\n                    -1.4680218,\n                    -1.3823087,\n                    -3.8461316,\n                    -2.432869,\n                    -1.3012831,\n                    -3.8942091,\n                    -2.4783866,\n                    -1.3676889,\n                    -1.2583317,\n                    -2.443607,\n                    -3.2685487,\n                    -2.3278441,\n                    -4.7823668,\n                    -1.8398348,\n                    -2.6126237,\n                    -2.504085,\n                    -1.9899489,\n                    -4.835528,\n                    -2.721473,\n                    -2.3454478,\n                    -3.7778504,\n                    -1.8538367,\n                    -1.1873996,\n                    -1.3453918,\n                    -1.1090251,\n                    -5.224681,\n                    -1.4397637,\n                    -1.180208,\n                    -1.9746033,\n                    -1.0095749,\n                    -1.1477605,\n                    -4.054302,\n                    -1.5409443,\n                    -1.0316113,\n                    -5.2108626,\n                    -3.1235392,\n                    -1.0136346,\n                    -4.280433,\n                    -1.3601183,\n                    -3.8717458,\n                    -2.4440918,\n                    -4.286065,\n                    -5.50645,\n                    -2.420915,\n                    -1.6485515,\n                    -2.066975,\n                    -3.7196562,\n                    -2.0036898,\n                    -1.4285997,\n                    -3.475406,\n                    -3.0394547,\n                    -2.7931194,\n                    -1.4155692,\n                    -3.79178,\n                    -5.0270667,\n                    -2.0082972,\n                    -1.690726,\n                    -2.5187201,\n                    -1.95259,\n                    -3.773377,\n                    -2.530886,\n                    -2.3352246,\n                    -2.2920215,\n                    -2.7772734,\n                    -1.3019248,\n                    -2.7455304,\n                    -3.8654735,\n                    -4.183722,\n                    -1.9184648,\n                    -2.168456,\n                    -2.1282635,\n                    -3.9138813,\n                    -1.7724588,\n                    -1.7226524,\n                    -1.1068506,\n                    -1.533566,\n                    -2.2625072,\n                    -1.0837358,\n                    -2.4536111,\n                    -1.4014043,\n                    -1.1351576,\n                    -1.6732528,\n                    -1.6587623,\n                    -3.43352,\n                    -3.4383984,\n                    -3.6564918,\n                    -1.3637918,\n                    -2.2445352,\n                    -5.1128774,\n                    -3.1354551,\n                    -2.1848104,\n                    -1.7619399,\n                    -1.5470188\n                ],\n                \"pointIndex\": [\n                    0,\n                    503,\n                    227,\n                    90623523,\n                    26277715,\n                    80852668,\n                    96154707,\n                    16002266,\n                    21880427,\n                    115108,\n                    28454336,\n                    115537322,\n                    58754191,\n                    101519352,\n                    45299020,\n                    20168772,\n                    63804658,\n                    71557155,\n                    80167543,\n                    935797,\n                    115024005,\n                    32761132,\n                    34830558,\n                    16272046,\n                    19323507,\n                    94634636,\n                    3442190,\n                    49227875,\n                    52614915,\n                    87997601,\n                    47361516,\n                    71328471,\n                    11621999,\n                    21571484,\n                    126664324,\n                    29477886,\n                    91369324,\n                    97478496,\n                    112493209,\n                    118859279,\n                    14477878,\n                    103491542,\n                    100150288,\n                    121802827,\n                    1599154,\n                    37090009,\n                    39107810,\n                    40467407,\n                    17810789,\n                    42202527,\n                    43379152,\n                    74173352,\n                    22488157,\n                    60973194,\n                    77940776,\n                    75755120,\n                    54860923,\n                    72404857,\n                    52348407,\n                    69212122,\n                    65407855,\n                    100722638,\n                    116220869,\n                    24021941,\n                    75225950,\n                    105001139,\n                    68756504,\n                    82152011,\n                    6260319,\n                    93904377,\n                    27998054,\n                    112965178,\n                    106109504,\n                    105443757,\n                    48993265,\n                    116075177,\n                    119873334,\n                    126231308,\n                    253639\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 227,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -1388995391054092274\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.31804442,\n                    -0.43489292,\n                    -0.42907506,\n                    -0.4363616,\n                    -0.45597282,\n                    -0.46680713,\n                    -0.46190825,\n                    -0.5677014,\n                    -0.4563058,\n                    -0.6166242,\n                    -0.5039146,\n                    -0.6246354,\n                    -0.4707326,\n                    -0.47779357,\n                    -0.49204805,\n                    -0.64231163,\n                    -0.6764985,\n                    -0.46582282,\n                    -0.51274574,\n                    -0.6255991,\n                    -0.6194547,\n                    -0.5232377,\n                    -0.63157463,\n                    -0.6706966,\n                    -0.63934845,\n                    -0.5277276,\n                    -0.48863217,\n                    -0.49394917,\n                    -0.4862815,\n                    -1.0154939,\n                    -0.64515084,\n                    -0.7793933,\n                    -0.6908473,\n                    -0.7567321,\n                    -0.7171721,\n                    -0.46691778,\n                    -0.8643776,\n                    -0.51696473,\n                    -0.84084165,\n                    -0.6723087,\n                    -1.7153779,\n                    -0.62704635,\n                    -0.9834292,\n                    -0.6648248,\n                    -0.6621618,\n                    -0.82280827,\n                    -0.82331115,\n                    -0.679924,\n                    -1.0636495,\n                    -0.6468792,\n                    -0.8472379,\n                    -0.5683926,\n                    -0.61848426,\n                    -0.9037221,\n                    -0.49113053,\n                    -0.638287,\n                    -0.5134129,\n                    -0.88270605,\n                    -0.5657373,\n                    -1.0913377,\n                    -1.5473504,\n                    -0.8016271,\n                    -0.75376856,\n                    -1.2727239,\n                    -1.094562,\n                    -0.7468163,\n                    -0.72751355,\n                    -0.81397045,\n                    -0.93953127,\n                    -0.94911826,\n                    -1.8311458,\n                    -1.4125476,\n                    -0.5176985,\n                    -0.87438446,\n                    -1.5910777,\n                    -0.9631172,\n                    -0.62825584,\n                    -0.92554176,\n                    -1.268599,\n                    -1.4888319,\n                    -1.1603714,\n                    -1.9172816,\n                    -1.8976915,\n                    -0.7566482,\n                    -0.62849605,\n                    -1.0634012,\n                    -1.5461874,\n                    -2.2122335,\n                    -0.96318877,\n                    -0.95302933,\n                    -0.94853485,\n                    -0.9550196,\n                    -1.105756,\n                    -0.8768027,\n                    -1.3013245,\n                    -0.70743793,\n                    -1.454751,\n                    -1.20427,\n                    -1.0981193,\n                    -0.9563045,\n                    -0.75259453,\n                    -1.0261632,\n                    -0.9617375,\n                    -0.5910247,\n                    -0.9938949,\n                    -0.99083114,\n                    -0.6719092,\n                    -0.9257738,\n                    -1.8758082,\n                    -1.348569,\n                    -0.65144736,\n                    -0.71926266,\n                    -0.78749126,\n                    -1.4659787,\n                    -0.6161229,\n                    -0.9727178,\n                    -0.8877861,\n                    -1.0888059,\n                    -3.7083657,\n                    -1.3748552,\n                    -2.400187,\n                    -2.0805056,\n                    -3.7978256,\n                    -0.9706547,\n                    -1.2985879,\n                    -1.1572261,\n                    -1.219806,\n                    -1.541234,\n                    -1.8615499,\n                    -1.7287426,\n                    -1.1255622,\n                    -2.70784,\n                    -1.9170482,\n                    -1.4877199,\n                    -0.9863343,\n                    -2.0110765,\n                    -4.1267276,\n                    -1.8231001,\n                    -1.3878483,\n                    -2.3702366,\n                    -1.6555223,\n                    -1.9235884,\n                    -1.8364646,\n                    -1.6715032,\n                    -2.3337338,\n                    -1.5348499,\n                    -1.0097706,\n                    -2.8215172,\n                    -3.0130804,\n                    -1.6238242,\n                    -2.696633,\n                    -2.2997658,\n                    -4.003807,\n                    -1.4826674,\n                    -2.679388,\n                    -1.0749161,\n                    -1.9095988,\n                    -1.6551454,\n                    -4.2207875,\n                    -3.1003954,\n                    -1.5809857,\n                    -1.9965923,\n                    -4.100097,\n                    -3.2477396,\n                    -3.553958,\n                    -2.510146,\n                    -2.1376574,\n                    -4.343759,\n                    -2.1196232,\n                    -1.479722,\n                    -1.9746994,\n                    -1.1078998,\n                    -1.1593446,\n                    -2.383345,\n                    -2.3589444,\n                    -3.51402,\n                    -2.245927,\n                    -3.295619,\n                    -1.8285325,\n                    -1.9398905,\n                    -1.5956552,\n                    -3.3970203,\n                    -1.0234499,\n                    -2.2899134,\n                    -3.5824833,\n                    -7.324093,\n                    -1.2620987,\n                    -1.6717616,\n                    -1.4615635,\n                    -1.7714953,\n                    -4.0805354,\n                    -1.5767369,\n                    -0.87090784,\n                    -3.132244,\n                    -3.5286536,\n                    -1.3311883,\n                    -1.6633921,\n                    -1.9951661,\n                    -1.3551797,\n                    -1.4291936,\n                    -1.3036424,\n                    -1.4725003,\n                    -1.4442756,\n                    -2.8375487,\n                    -3.914979,\n                    -1.1315118,\n                    -3.6979558,\n                    -1.0114822,\n                    -0.91020817,\n                    -2.0747359,\n                    -2.1834383,\n                    -4.4955087,\n                    -1.40393,\n                    -1.6697918,\n                    -0.9387991,\n                    -1.2710674,\n                    -1.8251337,\n                    -2.156713,\n                    -3.0485265,\n                    -1.4951202,\n                    -2.7489276,\n                    -3.3636158,\n                    -1.2925293,\n                    -1.7190237,\n                    -1.1842874,\n                    -1.8158754,\n                    -1.7622224,\n                    -2.1716104,\n                    -2.079569,\n                    -3.835428,\n                    -1.0511314,\n                    -2.020286,\n                    -1.0010629,\n                    -1.6464177,\n                    -2.954303,\n                    -3.444575\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    236,\n                    93894141,\n                    57235403,\n                    51292171,\n                    29464013,\n                    37198160,\n                    20519697,\n                    54427788,\n                    84068462,\n                    106255160,\n                    1774023,\n                    40074331,\n                    4427759,\n                    3079778,\n                    85663388,\n                    63804879,\n                    12519718,\n                    114932833,\n                    98378907,\n                    15180335,\n                    36213183,\n                    37758740,\n                    119174445,\n                    105153785,\n                    46290997,\n                    62143965,\n                    41946504,\n                    52128933,\n                    23058516,\n                    59278899,\n                    2045518,\n                    69088865,\n                    72568272,\n                    81874749,\n                    123341018,\n                    96074422,\n                    31051790,\n                    105784766,\n                    60160276,\n                    127761926,\n                    2368005,\n                    117854012,\n                    38401570,\n                    73269329,\n                    24952653,\n                    63326851,\n                    106430149,\n                    47568139,\n                    19470288,\n                    1875865,\n                    48137184,\n                    47189058,\n                    109022512,\n                    54731519,\n                    11246436,\n                    124472607,\n                    101606598,\n                    56372736,\n                    57755438,\n                    58921712,\n                    105448807,\n                    64277816,\n                    86411819,\n                    26917341,\n                    652065,\n                    115585707,\n                    86867912,\n                    28333852,\n                    115424301,\n                    91242285,\n                    6613002,\n                    96842457,\n                    2992981,\n                    116744267,\n                    32198787,\n                    112944186,\n                    117543238,\n                    123927971,\n                    18612777,\n                    207553\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 236,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -1996687561581955494\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5846756,\n                    -0.58689344,\n                    -0.60667974,\n                    -0.6030738,\n                    -0.5918051,\n                    -0.6205509,\n                    -0.61719275,\n                    -0.6469364,\n                    -0.6475224,\n                    -0.65886086,\n                    -0.618924,\n                    -0.7007284,\n                    -0.6607422,\n                    -0.6618274,\n                    -0.82475907,\n                    -0.73422694,\n                    -0.6774076,\n                    -0.75282896,\n                    -1.0639298,\n                    -0.8307625,\n                    -0.88619107,\n                    -0.65084374,\n                    -0.7746913,\n                    -0.8276962,\n                    -0.73104393,\n                    -0.7153365,\n                    -0.6641185,\n                    -0.69109845,\n                    -0.78577524,\n                    -0.8427952,\n                    -1.0567325,\n                    -1.8381795,\n                    -0.73889744,\n                    -0.92306626,\n                    -0.73201686,\n                    -0.80058616,\n                    -0.9315472,\n                    -1.1340989,\n                    -1.1253422,\n                    -1.049055,\n                    -1.1059141,\n                    -1.3064846,\n                    -1.0086405,\n                    -0.92371744,\n                    -0.6526815,\n                    -0.7937004,\n                    -0.8038962,\n                    -1.2402302,\n                    -0.8927977,\n                    -0.7664857,\n                    -0.8206795,\n                    -1.4141115,\n                    -0.7470262,\n                    -0.717384,\n                    -0.7282268,\n                    -0.7670583,\n                    -0.71265644,\n                    -1.056092,\n                    -0.94361395,\n                    -0.95633894,\n                    -1.4659557,\n                    -1.0672771,\n                    -3.3476443,\n                    -3.6351054,\n                    -1.8766991,\n                    -1.0499516,\n                    -0.8640289,\n                    -1.3418787,\n                    -1.6710471,\n                    -1.0518789,\n                    -1.3284807,\n                    -1.0672903,\n                    -1.2984877,\n                    -1.4317946,\n                    -1.2819399,\n                    -1.1776006,\n                    -2.0117435,\n                    -1.1909988,\n                    -1.6325643,\n                    -1.3755174,\n                    -1.8467388,\n                    -1.1327515,\n                    -1.5188427,\n                    -1.3899642,\n                    -1.6420921,\n                    -1.043407,\n                    -1.6026639,\n                    -1.138417,\n                    -0.97101223,\n                    -1.271242,\n                    -0.7115588,\n                    -1.3566486,\n                    -0.9234316,\n                    -0.85723275,\n                    -0.8918241,\n                    -1.5019754,\n                    -1.6031859,\n                    -1.1208738,\n                    -1.133076,\n                    -1.1482856,\n                    -0.82539636,\n                    -0.97691697,\n                    -0.90151674,\n                    -1.9680749,\n                    -1.5732255,\n                    -0.97369736,\n                    -1.610725,\n                    -1.5830237,\n                    -1.2785949,\n                    -1.0099097,\n                    -0.8909851,\n                    -0.77351177,\n                    -1.0141395,\n                    -0.7269804,\n                    -1.4642861,\n                    -2.7214887,\n                    -1.4997181,\n                    -14.602708,\n                    -1.403811,\n                    -1.4172581,\n                    -1.310325,\n                    -1.5547988,\n                    -2.1956336,\n                    -3.7234614,\n                    -1.7775134,\n                    -3.4160905,\n                    -4.9316273,\n                    -4.792233,\n                    -4.167746,\n                    -2.1372902,\n                    -2.0618155,\n                    -1.0708857,\n                    -2.642648,\n                    -1.2613289,\n                    -1.8870069,\n                    -1.3915552,\n                    -4.009344,\n                    -1.8726304,\n                    -1.9056902,\n                    -2.2972603,\n                    -2.2165756,\n                    -2.9336991,\n                    -2.7637823,\n                    -1.2901676,\n                    -1.5869136,\n                    -1.9698185,\n                    -2.1646268,\n                    -2.960697,\n                    -2.566998,\n                    -1.4355865,\n                    -2.5717368,\n                    -2.0413668,\n                    -1.290169,\n                    -2.516158,\n                    -2.759129,\n                    -2.0104089,\n                    -3.2819536,\n                    -1.6685455,\n                    -2.3512256,\n                    -2.0406234,\n                    -1.4104943,\n                    -3.3107772,\n                    -5.470009,\n                    -3.6898227,\n                    -2.2685525,\n                    -2.7050624,\n                    -3.1616924,\n                    -2.3806925,\n                    -2.7841287,\n                    -1.9129984,\n                    -3.2932487,\n                    -1.6767087,\n                    -1.1507784,\n                    -2.3390803,\n                    -4.5349946,\n                    -3.1582544,\n                    -1.7180965,\n                    -5.6086135,\n                    -2.3687649,\n                    -2.3038986,\n                    -1.8158605,\n                    -3.0817351,\n                    -2.2010934,\n                    -1.9180917,\n                    -1.4120785,\n                    -1.3320959,\n                    -3.7883961,\n                    -1.5549222,\n                    -0.9354667,\n                    -1.0546124,\n                    -1.2253877,\n                    -2.1193779,\n                    -1.5984664,\n                    -4.7134676,\n                    -1.7268164,\n                    -2.1174202,\n                    -1.297949,\n                    -1.8061957,\n                    -1.2423681,\n                    -2.1758392,\n                    -1.4506269,\n                    -2.4918299,\n                    -0.90987223,\n                    -2.2017498,\n                    -1.0510653,\n                    -1.9787481,\n                    -2.1033976,\n                    -2.4857621,\n                    -3.798583,\n                    -2.7556937,\n                    -2.8681157,\n                    -3.3335586,\n                    -1.1036271,\n                    -2.1468067,\n                    -1.7182089,\n                    -9.625127,\n                    -1.7497753,\n                    -1.3092595,\n                    -1.6229994,\n                    -1.8266094,\n                    -1.1855419,\n                    -1.1022376,\n                    -2.5157192,\n                    -1.8676955,\n                    -3.8111587,\n                    -2.3706863,\n                    -1.3407114,\n                    -1.2293477\n                ],\n                \"pointIndex\": [\n                    0,\n                    502,\n                    228,\n                    85417915,\n                    87177656,\n                    52405554,\n                    24939584,\n                    81211067,\n                    63171332,\n                    68876814,\n                    88857761,\n                    107085546,\n                    50678915,\n                    43230496,\n                    51144720,\n                    9727721,\n                    73492326,\n                    27971076,\n                    84051766,\n                    100252284,\n                    33351039,\n                    122947737,\n                    10429343,\n                    109197430,\n                    8155808,\n                    46643364,\n                    2120434,\n                    112945328,\n                    84785397,\n                    88789216,\n                    64828394,\n                    23919421,\n                    11277540,\n                    77949708,\n                    26985267,\n                    94800518,\n                    99115340,\n                    102070153,\n                    6282878,\n                    50070055,\n                    37438927,\n                    35424417,\n                    62523043,\n                    68161840,\n                    4127142,\n                    5313773,\n                    43325589,\n                    38238130,\n                    47910376,\n                    66853413,\n                    28640695,\n                    22449973,\n                    65688760,\n                    55518842,\n                    10740481,\n                    58487096,\n                    96556615,\n                    23345688,\n                    5188696,\n                    11772972,\n                    95503364,\n                    115673175,\n                    125130528,\n                    14571204,\n                    79344496,\n                    82642712,\n                    54565943,\n                    98714084,\n                    22037788,\n                    92278340,\n                    97731258,\n                    85795585,\n                    98369318,\n                    103507242,\n                    107488494,\n                    110471801,\n                    116862439,\n                    123458410,\n                    127258485\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 228,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -128508364556705108\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.630898,\n                    -0.6477811,\n                    -0.63280094,\n                    -0.6948641,\n                    -0.6490363,\n                    -0.6381175,\n                    -0.70697945,\n                    -0.7485131,\n                    -0.7632588,\n                    -0.8179374,\n                    -0.6676431,\n                    -0.6843537,\n                    -0.6571992,\n                    -0.7652821,\n                    -0.7276874,\n                    -0.7827385,\n                    -0.77456915,\n                    -0.7726149,\n                    -0.77507704,\n                    -0.9687915,\n                    -1.0675865,\n                    -0.7493418,\n                    -0.88513535,\n                    -0.6983723,\n                    -0.8357457,\n                    -0.7440659,\n                    -0.69447714,\n                    -0.77738136,\n                    -1.0133171,\n                    -0.87642634,\n                    -0.857812,\n                    -1.0985663,\n                    -1.9370906,\n                    -1.2371792,\n                    -1.0567256,\n                    -1.0878961,\n                    -0.80589247,\n                    -0.7802553,\n                    -0.9365398,\n                    -0.9947,\n                    -1.1050628,\n                    -1.1739084,\n                    -1.0834794,\n                    -1.295832,\n                    -0.79162145,\n                    -0.94784623,\n                    -0.90933746,\n                    -0.8757789,\n                    -0.9412581,\n                    -0.9697154,\n                    -1.4111537,\n                    -0.77407503,\n                    -0.76541466,\n                    -0.7894012,\n                    -0.7536822,\n                    -0.8625097,\n                    -1.4588612,\n                    -1.042716,\n                    -1.2608161,\n                    -1.3634186,\n                    -1.0306897,\n                    -1.1214379,\n                    -0.93017733,\n                    -1.5569626,\n                    -1.2675877,\n                    -2.3300815,\n                    -2.037261,\n                    -1.3787063,\n                    -2.078333,\n                    -1.4332216,\n                    -2.1206706,\n                    -1.2226236,\n                    -1.1829647,\n                    -0.8473785,\n                    -1.4364246,\n                    -1.0885451,\n                    -1.7373513,\n                    -1.0789704,\n                    -1.4983333,\n                    -1.2914145,\n                    -1.1032754,\n                    -1.6687708,\n                    -2.7030573,\n                    -1.1990012,\n                    -1.7063648,\n                    -2.1159866,\n                    -2.342888,\n                    -1.6727512,\n                    -1.564358,\n                    -1.372533,\n                    -0.9340407,\n                    -0.99187064,\n                    -1.2056274,\n                    -1.0800625,\n                    -1.6576957,\n                    -1.3551583,\n                    -1.0228239,\n                    -1.7431831,\n                    -1.286001,\n                    -1.1640606,\n                    -1.6876549,\n                    -1.7610623,\n                    -1.6307021,\n                    -0.8457055,\n                    -0.8424614,\n                    -1.2389152,\n                    -1.2091107,\n                    -1.1098007,\n                    -1.0474402,\n                    -0.86699677,\n                    -1.1676563,\n                    -0.87737286,\n                    -3.3185194,\n                    -1.7179166,\n                    -1.6006385,\n                    -2.3141143,\n                    -3.4334073,\n                    -2.3130846,\n                    -2.0194337,\n                    -1.8243642,\n                    -1.720521,\n                    -1.9310031,\n                    -1.4816298,\n                    -2.4737897,\n                    -1.7918673,\n                    -1.5567455,\n                    -2.0214505,\n                    -2.3170557,\n                    -1.7804987,\n                    -2.2804224,\n                    -3.51206,\n                    -2.5557177,\n                    -3.2283506,\n                    -2.9984992,\n                    -2.7410605,\n                    -1.5036118,\n                    -2.0997918,\n                    -2.5839639,\n                    -2.6720161,\n                    -1.7679182,\n                    -1.5598592,\n                    -2.9722614,\n                    -2.615845,\n                    -2.8201096,\n                    -2.10161,\n                    -4.986848,\n                    -2.4506004,\n                    -1.3532511,\n                    -1.1694721,\n                    -1.619396,\n                    -1.6742814,\n                    -2.174175,\n                    -4.5054655,\n                    -2.4169295,\n                    -2.1824694,\n                    -1.1939554,\n                    -3.3943212,\n                    -2.303942,\n                    -2.0812685,\n                    -3.2173884,\n                    -2.0529165,\n                    -2.0515125,\n                    -2.9678133,\n                    -1.671063,\n                    -1.9606991,\n                    -2.8123155,\n                    -5.5252957,\n                    -2.4556065,\n                    -1.4970556,\n                    -2.6249,\n                    -2.0755422,\n                    -3.0332034,\n                    -3.7715864,\n                    -2.7577121,\n                    -3.0361726,\n                    -2.2537212,\n                    -3.0970392,\n                    -1.8873868,\n                    -2.558867,\n                    -3.7742202,\n                    -2.937425,\n                    -2.6971843,\n                    -1.9580126,\n                    -2.1035142,\n                    -1.6116444,\n                    -1.6787056,\n                    -4.688275,\n                    -2.2149107,\n                    -1.08943,\n                    -1.8120431,\n                    -1.8196094,\n                    -2.3866353,\n                    -2.839746,\n                    -2.0566387,\n                    -1.9034048,\n                    -4.365361,\n                    -2.675564,\n                    -1.5361451,\n                    -4.6711106,\n                    -3.8471017,\n                    -1.9594274,\n                    -2.5981467,\n                    -3.6338663,\n                    -4.5456915,\n                    -2.0403075,\n                    -2.2516203,\n                    -2.0036118,\n                    -3.7112646,\n                    -0.85280895,\n                    -4.2003527,\n                    -1.4760265,\n                    -2.7323143,\n                    -1.7102116,\n                    -2.1479855,\n                    -1.9858385,\n                    -2.095076,\n                    -3.0762653,\n                    -2.08245,\n                    -2.035378,\n                    -1.4645298,\n                    -2.5723915,\n                    -2.4020085,\n                    -2.0320628,\n                    -3.3489451,\n                    -0.9732658\n                ],\n                \"pointIndex\": [\n                    0,\n                    501,\n                    225,\n                    92365140,\n                    12468747,\n                    20753240,\n                    92141865,\n                    95005479,\n                    68640148,\n                    62615683,\n                    103838480,\n                    66742972,\n                    116439537,\n                    104268231,\n                    8858295,\n                    54224740,\n                    65521261,\n                    74614563,\n                    105769197,\n                    104643027,\n                    115637626,\n                    111636229,\n                    1331364,\n                    64087795,\n                    96602030,\n                    98980111,\n                    65777520,\n                    89080983,\n                    58892339,\n                    22644552,\n                    94256497,\n                    69757173,\n                    2334587,\n                    79822833,\n                    50828536,\n                    109548796,\n                    101251341,\n                    30702731,\n                    114884835,\n                    122206416,\n                    86253129,\n                    35635296,\n                    36615887,\n                    29815484,\n                    17668545,\n                    41848385,\n                    33060378,\n                    118682512,\n                    94592436,\n                    9165700,\n                    48507835,\n                    49743418,\n                    114459716,\n                    24705761,\n                    53719230,\n                    57642889,\n                    72129372,\n                    118372695,\n                    23655649,\n                    123107717,\n                    69213579,\n                    75295761,\n                    6699983,\n                    116981543,\n                    26403286,\n                    80827840,\n                    126197891,\n                    88762477,\n                    91658553,\n                    107618253,\n                    103016808,\n                    105082782,\n                    103281533,\n                    110847554,\n                    113373220,\n                    32790595,\n                    121450923,\n                    124577307\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 225,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 7222263662094774283\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.4580384,\n                    -0.46075743,\n                    -0.46211502,\n                    -0.4929041,\n                    -0.4730218,\n                    -0.4747222,\n                    -0.46597216,\n                    -0.66657495,\n                    -0.56379455,\n                    -0.47808835,\n                    -0.5287002,\n                    -0.50128,\n                    -0.5075093,\n                    -0.4686614,\n                    -0.73368955,\n                    -0.708327,\n                    -0.673227,\n                    -0.57508063,\n                    -0.7542673,\n                    -0.5368351,\n                    -0.47845197,\n                    -0.5562689,\n                    -0.72655267,\n                    -0.5083273,\n                    -0.503371,\n                    -0.5842113,\n                    -0.6419362,\n                    -0.5932558,\n                    -0.67286736,\n                    -0.7619549,\n                    -0.8213134,\n                    -0.90004736,\n                    -0.7194303,\n                    -0.87810415,\n                    -0.9601538,\n                    -0.6465579,\n                    -0.9196468,\n                    -0.89380914,\n                    -0.8300188,\n                    -1.2643598,\n                    -0.76223713,\n                    -0.858839,\n                    -0.5496779,\n                    -0.59502965,\n                    -0.6356277,\n                    -0.864928,\n                    -0.9490388,\n                    -0.8488594,\n                    -0.65086263,\n                    -0.7511573,\n                    -0.6592746,\n                    -0.79392016,\n                    -0.7518688,\n                    -0.646706,\n                    -0.7839151,\n                    -0.64437765,\n                    -1.0888321,\n                    -3.038991,\n                    -1.1950477,\n                    -0.9051793,\n                    -1.2800106,\n                    -0.8435553,\n                    -1.2381986,\n                    -1.7287918,\n                    -1.0798482,\n                    -0.764986,\n                    -1.1250908,\n                    -1.7044045,\n                    -2.1042469,\n                    -1.9712648,\n                    -1.036632,\n                    -2.0903056,\n                    -1.3683456,\n                    -1.0508043,\n                    -1.4783676,\n                    -1.0530821,\n                    -0.934944,\n                    -1.250601,\n                    -2.1660426,\n                    -1.6160846,\n                    -2.042257,\n                    -0.9940596,\n                    -1.1671561,\n                    -1.3778867,\n                    -1.1717991,\n                    -0.7084441,\n                    -0.69722295,\n                    -1.2867434,\n                    -0.65041363,\n                    -1.3847234,\n                    -0.75861377,\n                    -0.8957585,\n                    -1.3514508,\n                    -0.9746915,\n                    -1.086659,\n                    -1.1088036,\n                    -1.0807667,\n                    -0.77314854,\n                    -1.0591305,\n                    -0.9778022,\n                    -1.0392236,\n                    -1.8265591,\n                    -0.77029204,\n                    -0.8249063,\n                    -1.866333,\n                    -0.9302765,\n                    -1.1593188,\n                    -0.84130716,\n                    -0.7519833,\n                    -1.070171,\n                    -1.1213996,\n                    -0.8770907,\n                    -0.66014594,\n                    -1.7014613,\n                    -1.1222742,\n                    -3.6753109,\n                    -3.2820745,\n                    -2.1890109,\n                    -2.7490132,\n                    -2.0823503,\n                    -2.1744382,\n                    -2.1111338,\n                    -1.7354052,\n                    -2.6427343,\n                    -1.5783647,\n                    -2.3191943,\n                    -2.2987525,\n                    -4.551017,\n                    -2.6797552,\n                    -1.1783116,\n                    -1.1015216,\n                    -2.0225565,\n                    -1.0180598,\n                    -1.4789,\n                    -1.1881534,\n                    -2.7123027,\n                    -2.309295,\n                    -2.6841495,\n                    -2.2632222,\n                    -4.495556,\n                    -4.797015,\n                    -2.9508445,\n                    -2.761931,\n                    -2.3967311,\n                    -3.2577622,\n                    -2.7722323,\n                    -4.411268,\n                    -1.7627251,\n                    -1.7279713,\n                    -2.6112216,\n                    -4.2717066,\n                    -2.8829312,\n                    -1.2101817,\n                    -3.985402,\n                    -2.2695396,\n                    -2.272051,\n                    -1.633945,\n                    -5.034854,\n                    -3.8012376,\n                    -3.3353143,\n                    -2.1721678,\n                    -2.0492704,\n                    -2.2347207,\n                    -2.3234985,\n                    -1.1148325,\n                    -2.0454772,\n                    -2.0794945,\n                    -2.824807,\n                    -1.6930101,\n                    -2.8448656,\n                    -1.2729511,\n                    -1.3998069,\n                    -0.7758805,\n                    -1.3747265,\n                    -0.7600807,\n                    -1.4784952,\n                    -2.373021,\n                    -4.0120835,\n                    -1.2613412,\n                    -2.7648678,\n                    -4.0832334,\n                    -2.46164,\n                    -1.9434028,\n                    -0.9132536,\n                    -1.0332861,\n                    -2.0537302,\n                    -4.2940426,\n                    -1.770343,\n                    -0.9790258,\n                    -4.511891,\n                    -2.8161838,\n                    -1.1884514,\n                    -1.6424551,\n                    -1.822277,\n                    -2.8266153,\n                    -1.1383984,\n                    -0.80848265,\n                    -2.7638896,\n                    -1.6277307,\n                    -1.055982,\n                    -2.2933555,\n                    -1.3826586,\n                    -2.1459916,\n                    -2.1754184,\n                    -3.1542444,\n                    -2.6393998,\n                    -3.4686902,\n                    -2.8534982,\n                    -1.0647641,\n                    -3.4518557,\n                    -3.361874,\n                    -1.1547184,\n                    -2.4933126,\n                    -2.2956147,\n                    -1.3746315,\n                    -0.8468485,\n                    -1.3102388,\n                    -2.1647186,\n                    -2.7558634,\n                    -1.21418,\n                    -3.544629,\n                    -2.6128135,\n                    -1.7413558,\n                    -2.2080815,\n                    -1.8403822,\n                    -4.518899,\n                    -1.2705089,\n                    -2.5194619\n                ],\n                \"pointIndex\": [\n                    1,\n                    503,\n                    228,\n                    109434413,\n                    108902718,\n                    45375688,\n                    86832624,\n                    61475015,\n                    46389286,\n                    22559253,\n                    79620416,\n                    99865252,\n                    3332832,\n                    76968655,\n                    45642799,\n                    26412718,\n                    124991254,\n                    75975371,\n                    87432558,\n                    84846355,\n                    101770610,\n                    14411874,\n                    35292438,\n                    117281552,\n                    8002730,\n                    17329378,\n                    18437826,\n                    18993252,\n                    106181391,\n                    54209581,\n                    4957861,\n                    108094925,\n                    23961322,\n                    86705572,\n                    25729239,\n                    115529408,\n                    77332542,\n                    103448472,\n                    119617281,\n                    115078757,\n                    127247486,\n                    46567368,\n                    116666879,\n                    52781221,\n                    37853085,\n                    73956241,\n                    113961039,\n                    41589895,\n                    105665277,\n                    75178451,\n                    1978930,\n                    118698118,\n                    2148348,\n                    81224634,\n                    20280151,\n                    73821485,\n                    21476629,\n                    58807825,\n                    9382088,\n                    57233256,\n                    72226598,\n                    99300621,\n                    111957991,\n                    80990229,\n                    69990459,\n                    71743878,\n                    2732081,\n                    78838502,\n                    27695186,\n                    98383303,\n                    62160363,\n                    119084073,\n                    93542763,\n                    98105998,\n                    99641668,\n                    103936523,\n                    110020651,\n                    116195474,\n                    110564406\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 228,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 7872595278026300545\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5318544,\n                    -0.5526347,\n                    -0.5349425,\n                    -0.648295,\n                    -0.5679767,\n                    -0.53884137,\n                    -0.61517274,\n                    -0.69902545,\n                    -0.6633728,\n                    -0.57753766,\n                    -0.64800483,\n                    -0.548599,\n                    -0.54938823,\n                    -0.6605117,\n                    -0.86284494,\n                    -0.71828693,\n                    -0.81339324,\n                    -0.81776756,\n                    -0.931348,\n                    -0.6920306,\n                    -0.6435287,\n                    -0.71530217,\n                    -0.6555444,\n                    -0.98792934,\n                    -0.5638099,\n                    -0.6063291,\n                    -0.71361756,\n                    -0.8526455,\n                    -0.94601935,\n                    -0.9070212,\n                    -1.2568408,\n                    -1.6926986,\n                    -0.94654936,\n                    -1.230797,\n                    -0.9508877,\n                    -1.2342145,\n                    -1.5927098,\n                    -1.0119296,\n                    -1.0349593,\n                    -0.77720755,\n                    -0.9708845,\n                    -0.8177801,\n                    -0.7369408,\n                    -0.925699,\n                    -1.3079426,\n                    -0.6662252,\n                    -0.6679798,\n                    -1.0225967,\n                    -1.0462278,\n                    -1.093949,\n                    -0.61489826,\n                    -0.7670343,\n                    -0.6418647,\n                    -0.8002868,\n                    -0.7372775,\n                    -0.8960303,\n                    -1.1364943,\n                    -0.9798754,\n                    -1.1735563,\n                    -1.3999515,\n                    -1.0121135,\n                    -1.5662241,\n                    -1.4446868,\n                    -1.7188863,\n                    -1.9484583,\n                    -1.6996074,\n                    -1.1428131,\n                    -1.5619653,\n                    -1.2767718,\n                    -1.7581125,\n                    -2.8999724,\n                    -1.5626452,\n                    -1.7994468,\n                    -2.3420005,\n                    -1.8132316,\n                    -1.3271576,\n                    -1.1727203,\n                    -1.6932975,\n                    -1.093756,\n                    -0.8861891,\n                    -1.9243885,\n                    -1.7915244,\n                    -1.0730273,\n                    -1.5481496,\n                    -1.1812528,\n                    -0.95917827,\n                    -1.4939461,\n                    -1.3916106,\n                    -1.5121248,\n                    -1.7567997,\n                    -1.620493,\n                    -0.7535965,\n                    -0.9707958,\n                    -1.8692814,\n                    -0.7235233,\n                    -1.0412691,\n                    -1.3920215,\n                    -1.1896292,\n                    -1.3844485,\n                    -1.7028971,\n                    -1.2931386,\n                    -0.7415553,\n                    -1.7628093,\n                    -0.8847007,\n                    -0.9979066,\n                    -0.6709767,\n                    -1.7224689,\n                    -0.8326914,\n                    -0.8062148,\n                    -0.87184334,\n                    -0.7651507,\n                    -0.99336547,\n                    -1.0146102,\n                    -2.5240374,\n                    -1.830741,\n                    -1.0107177,\n                    -1.7176067,\n                    -1.2612122,\n                    -1.3724835,\n                    -3.8971236,\n                    -1.6958439,\n                    -1.310176,\n                    -1.26883,\n                    -3.2852666,\n                    -2.4117398,\n                    -5.1487803,\n                    -1.9918764,\n                    -4.3635464,\n                    -2.1841486,\n                    -3.4227571,\n                    -3.6512878,\n                    -5.3473387,\n                    -3.6426513,\n                    -1.1749694,\n                    -3.8084428,\n                    -1.797575,\n                    -3.917274,\n                    -1.3197335,\n                    -3.105423,\n                    -1.9836197,\n                    -3.3455346,\n                    -3.5547278,\n                    -2.9551923,\n                    -2.1559243,\n                    -3.201745,\n                    -3.1743422,\n                    -1.8750439,\n                    -2.8562143,\n                    -4.809888,\n                    -2.040142,\n                    -2.576594,\n                    -2.1615236,\n                    -2.6725488,\n                    -3.3076863,\n                    -1.9927764,\n                    -3.0091994,\n                    -2.5479405,\n                    -1.5206244,\n                    -1.3348264,\n                    -1.2347344,\n                    -1.0797039,\n                    -2.975576,\n                    -1.9721434,\n                    -2.4176564,\n                    -2.7389612,\n                    -2.7175198,\n                    -1.5420492,\n                    -1.8847944,\n                    -1.6513671,\n                    -2.5460448,\n                    -2.1374488,\n                    -2.0714803,\n                    -3.0631979,\n                    -2.056451,\n                    -2.042578,\n                    -1.50944,\n                    -1.6670105,\n                    -4.353,\n                    -1.9820719,\n                    -2.9284163,\n                    -1.8892688,\n                    -4.1990786,\n                    -2.1729066,\n                    -1.5507696,\n                    -2.7340138,\n                    -1.0775504,\n                    -1.3502195,\n                    -2.0504923,\n                    -5.231181,\n                    -1.5180875,\n                    -1.3710163,\n                    -1.2219802,\n                    -1.7035313,\n                    -1.8204783,\n                    -2.402197,\n                    -1.8848188,\n                    -1.4722091,\n                    -1.6576673,\n                    -1.3964651,\n                    -6.033809,\n                    -3.7256346,\n                    -1.6946881,\n                    -2.1770961,\n                    -1.2699507,\n                    -1.0702896,\n                    -2.541309,\n                    -5.0751104,\n                    -2.4699755,\n                    -2.5016696,\n                    -1.0325359,\n                    -2.0575097,\n                    -1.0528655,\n                    -0.79915005,\n                    -3.4902232,\n                    -2.510019,\n                    -2.2903266,\n                    -1.1804458,\n                    -0.82398766,\n                    -6.061527,\n                    -2.3614256,\n                    -0.8843897,\n                    -1.9906607,\n                    -5.030266,\n                    -5.0068526,\n                    -1.2827667,\n                    -1.1882246,\n                    -4.0000052,\n                    -2.9999995,\n                    -2.5603006,\n                    -2.2915864\n                ],\n                \"pointIndex\": [\n                    3,\n                    504,\n                    230,\n                    115347595,\n                    114781716,\n                    88385758,\n                    82551603,\n                    81082979,\n                    43380974,\n                    10194344,\n                    46512063,\n                    110078896,\n                    94415135,\n                    17090150,\n                    1091252,\n                    8607563,\n                    93121216,\n                    66403817,\n                    74732784,\n                    88677344,\n                    108310888,\n                    81147254,\n                    53245757,\n                    37363390,\n                    19078022,\n                    95335853,\n                    69450267,\n                    47217789,\n                    99847234,\n                    96872649,\n                    76836844,\n                    105708382,\n                    64047755,\n                    68216567,\n                    106743551,\n                    79846973,\n                    86907060,\n                    96454124,\n                    104808962,\n                    116530184,\n                    243950,\n                    110187486,\n                    8357889,\n                    69643767,\n                    47947051,\n                    34809077,\n                    25305060,\n                    39642089,\n                    97101121,\n                    41094885,\n                    42168413,\n                    44000776,\n                    19255895,\n                    1606917,\n                    100649211,\n                    61085956,\n                    53954022,\n                    36935615,\n                    4489663,\n                    58836135,\n                    23305165,\n                    64421908,\n                    96644721,\n                    88863796,\n                    45998859,\n                    84197218,\n                    74033379,\n                    77317836,\n                    20708815,\n                    81693082,\n                    85518041,\n                    13072922,\n                    109596508,\n                    98983742,\n                    6003317,\n                    108464322,\n                    115642690,\n                    120441323,\n                    73830126,\n                    252001\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 230,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -7386051442361988795\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5108797,\n                    -0.53291243,\n                    -0.5210462,\n                    -0.54066086,\n                    -0.62747353,\n                    -0.5228788,\n                    -0.53056675,\n                    -0.857135,\n                    -0.5431626,\n                    -0.6728934,\n                    -0.83623904,\n                    -0.5277067,\n                    -0.54691744,\n                    -0.5597804,\n                    -0.5415357,\n                    -1.0465266,\n                    -0.9172645,\n                    -0.6498583,\n                    -0.5838935,\n                    -0.7679667,\n                    -0.83578706,\n                    -0.86347896,\n                    -0.8489478,\n                    -0.58355063,\n                    -0.5913145,\n                    -0.67941654,\n                    -0.5532097,\n                    -0.57800925,\n                    -0.6950984,\n                    -1.0079625,\n                    -0.93897146,\n                    -1.0548553,\n                    -1.0986328,\n                    -0.990683,\n                    -0.95704466,\n                    -0.927313,\n                    -0.707812,\n                    -0.8876795,\n                    -0.58693117,\n                    -0.87326914,\n                    -0.78118306,\n                    -1.0092329,\n                    -0.90121645,\n                    -0.96073735,\n                    -1.1489044,\n                    -0.85746175,\n                    -1.0691592,\n                    -0.9054018,\n                    -0.94487154,\n                    -0.5967019,\n                    -0.7073035,\n                    -1.0018818,\n                    -1.0238538,\n                    -0.6573265,\n                    -0.65281594,\n                    -0.60852957,\n                    -0.8033723,\n                    -1.239034,\n                    -1.9295261,\n                    -3.1623762,\n                    -1.8403192,\n                    -1.3301795,\n                    -2.7431705,\n                    -1.4577744,\n                    -1.1961795,\n                    -1.1853992,\n                    -2.1535192,\n                    -1.0238626,\n                    -2.1560311,\n                    -1.0290521,\n                    -0.9725088,\n                    -1.5267935,\n                    -1.254052,\n                    -0.9736978,\n                    -1.2086835,\n                    -1.1516668,\n                    -1.6799083,\n                    -0.89718825,\n                    -1.7337049,\n                    -1.0380706,\n                    -1.2615322,\n                    -1.9453869,\n                    -0.9431378,\n                    -1.3229971,\n                    -1.113262,\n                    -0.9120092,\n                    -0.940582,\n                    -1.1862113,\n                    -1.2428428,\n                    -1.4028096,\n                    -1.1804905,\n                    -1.2219838,\n                    -1.0842413,\n                    -1.2987602,\n                    -1.3231869,\n                    -1.1256894,\n                    -1.4694376,\n                    -1.778964,\n                    -1.2206122,\n                    -0.7018415,\n                    -0.7977892,\n                    -1.0083444,\n                    -0.80514693,\n                    -1.1089602,\n                    -1.1285571,\n                    -1.4524838,\n                    -1.9886453,\n                    -1.9686787,\n                    -0.8539721,\n                    -1.1108284,\n                    -1.7369735,\n                    -0.75138754,\n                    -1.2929288,\n                    -2.8029969,\n                    -1.8970212,\n                    -2.9346006,\n                    -3.1239185,\n                    -2.3111954,\n                    -4.161659,\n                    -4.740618,\n                    -3.930361,\n                    -3.1946766,\n                    -2.6492808,\n                    -3.8204699,\n                    -1.512944,\n                    -3.6797109,\n                    -3.3561983,\n                    -2.0975733,\n                    -2.804475,\n                    -1.4382597,\n                    -1.8145785,\n                    -1.6634225,\n                    -4.7425866,\n                    -3.160053,\n                    -3.949805,\n                    -1.6202605,\n                    -1.3382463,\n                    -5.6329947,\n                    -3.1294072,\n                    -1.9707998,\n                    -1.1553085,\n                    -3.8239818,\n                    -4.8079767,\n                    -2.4293022,\n                    -1.5714104,\n                    -3.515261,\n                    -2.4722693,\n                    -1.2639914,\n                    -1.5367815,\n                    -2.372899,\n                    -2.0205681,\n                    -3.5453362,\n                    -1.3925744,\n                    -4.381768,\n                    -2.2124214,\n                    -2.1457138,\n                    -1.6154038,\n                    -2.1108005,\n                    -8.22557,\n                    -4.411373,\n                    -1.0580919,\n                    -1.2821277,\n                    -1.3998395,\n                    -2.4272609,\n                    -2.3948019,\n                    -0.96729654,\n                    -1.3134754,\n                    -1.676334,\n                    -1.6012802,\n                    -1.3334838,\n                    -1.2328417,\n                    -3.944815,\n                    -2.678497,\n                    -3.3102927,\n                    -1.9348183,\n                    -2.8151054,\n                    -1.7924582,\n                    -2.1709836,\n                    -2.1577468,\n                    -2.7874053,\n                    -1.9171277,\n                    -1.4504448,\n                    -2.6907806,\n                    -2.2421236,\n                    -1.2558291,\n                    -1.3776932,\n                    -1.1472483,\n                    -1.5318475,\n                    -5.8697314,\n                    -1.6595546,\n                    -2.01325,\n                    -1.4753066,\n                    -3.3051827,\n                    -2.9482744,\n                    -1.6691211,\n                    -3.6826656,\n                    -2.018957,\n                    -2.9914162,\n                    -2.479594,\n                    -2.6773448,\n                    -0.7891612,\n                    -1.6135672,\n                    -1.5358614,\n                    -1.0630399,\n                    -2.6844327,\n                    -2.140832,\n                    -1.4059803,\n                    -1.8687615,\n                    -1.5211347,\n                    -1.1563559,\n                    -1.8074015,\n                    -1.585874,\n                    -2.905799,\n                    -2.0970418,\n                    -2.6970625,\n                    -3.638173,\n                    -2.7245076,\n                    -1.0986912,\n                    -1.3670276,\n                    -2.4939995,\n                    -1.2994958,\n                    -3.5241516,\n                    -1.7636971,\n                    -1.6964252,\n                    -1.2399389,\n                    -1.4650214\n                ],\n                \"pointIndex\": [\n                    0,\n                    502,\n                    226,\n                    32292610,\n                    109869361,\n                    49676440,\n                    38860639,\n                    62592793,\n                    600790,\n                    23911739,\n                    83647733,\n                    116832183,\n                    95541343,\n                    17576862,\n                    29948653,\n                    57543406,\n                    23404328,\n                    71784850,\n                    83143492,\n                    92528201,\n                    110518113,\n                    14668457,\n                    92928200,\n                    332742,\n                    103153798,\n                    18665463,\n                    121031463,\n                    56018456,\n                    1132292,\n                    61525538,\n                    66163859,\n                    5447769,\n                    89783462,\n                    118557716,\n                    6231473,\n                    87963967,\n                    114288691,\n                    31571420,\n                    6534378,\n                    14235721,\n                    35168244,\n                    21282808,\n                    37519649,\n                    4883690,\n                    102255530,\n                    39341605,\n                    8200693,\n                    50840927,\n                    45610825,\n                    45886354,\n                    48352851,\n                    6983081,\n                    53828108,\n                    24397969,\n                    3149000,\n                    60837572,\n                    59532798,\n                    87757933,\n                    65933620,\n                    66841196,\n                    24895074,\n                    69969079,\n                    109326827,\n                    74783313,\n                    77159216,\n                    61306945,\n                    82896750,\n                    117164763,\n                    110986551,\n                    30674296,\n                    91982112,\n                    96851510,\n                    29073451,\n                    103960445,\n                    13886321,\n                    111283137,\n                    33622972,\n                    34148650,\n                    425\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 226,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 8161128184677511835\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.40340668,\n                    -0.529459,\n                    -0.5161869,\n                    -0.53543,\n                    -0.5572265,\n                    -0.559943,\n                    -0.5261264,\n                    -0.65634567,\n                    -0.8289195,\n                    -0.5882762,\n                    -0.6718383,\n                    -0.5602667,\n                    -0.59636503,\n                    -0.52978766,\n                    -0.5676633,\n                    -0.66157055,\n                    -0.67849123,\n                    -0.8783929,\n                    -0.87770337,\n                    -0.63287824,\n                    -0.7088296,\n                    -0.730397,\n                    -0.6751855,\n                    -0.67252594,\n                    -0.61771494,\n                    -0.62258834,\n                    -0.8742302,\n                    -0.54998684,\n                    -0.53258646,\n                    -0.5697075,\n                    -0.6338611,\n                    -0.7282499,\n                    -0.71893036,\n                    -0.69252646,\n                    -0.6978233,\n                    -1.083951,\n                    -1.0749961,\n                    -0.9492825,\n                    -1.0267339,\n                    -0.8309118,\n                    -1.0968392,\n                    -1.0346222,\n                    -0.71960473,\n                    -0.753241,\n                    -0.82036847,\n                    -0.68786734,\n                    -0.95036894,\n                    -0.7919302,\n                    -0.7842412,\n                    -0.78362364,\n                    -0.69734085,\n                    -0.7361724,\n                    -0.63951087,\n                    -0.9256614,\n                    -1.3222324,\n                    -0.557387,\n                    -0.5765443,\n                    -0.8541396,\n                    -1.5424199,\n                    -1.239235,\n                    -0.6581758,\n                    -0.6957259,\n                    -0.71216434,\n                    -1.2122377,\n                    -1.3424256,\n                    -0.8595487,\n                    -0.93395984,\n                    -0.91291904,\n                    -0.99467903,\n                    -1.0259969,\n                    -0.83747417,\n                    -1.1415584,\n                    -1.844381,\n                    -2.6260114,\n                    -1.8964473,\n                    -1.1886573,\n                    -0.95166403,\n                    -1.3016888,\n                    -2.1763248,\n                    -0.8594845,\n                    -1.7636291,\n                    -1.5953441,\n                    -1.2344376,\n                    -2.9971352,\n                    -1.2809831,\n                    -0.7776844,\n                    -1.5833745,\n                    -1.1377445,\n                    -0.81377697,\n                    -1.4259038,\n                    -1.5685266,\n                    -1.2828376,\n                    -1.2463381,\n                    -1.035027,\n                    -1.023178,\n                    -0.9225322,\n                    -1.334532,\n                    -1.2539599,\n                    -0.8611026,\n                    -1.0418408,\n                    -1.2660669,\n                    -1.0434586,\n                    -0.82252276,\n                    -0.9484263,\n                    -0.7822511,\n                    -0.7963581,\n                    -0.94546413,\n                    -1.4416524,\n                    -1.972239,\n                    -1.3854128,\n                    -1.7213302,\n                    -0.88175184,\n                    -0.660204,\n                    -1.0088569,\n                    -0.6626971,\n                    -1.4806819,\n                    -3.8850646,\n                    -3.4880378,\n                    -2.0370426,\n                    -1.7129023,\n                    -2.4480736,\n                    -0.9842973,\n                    -1.1321272,\n                    -1.3175576,\n                    -1.7688314,\n                    -0.99098766,\n                    -0.8212509,\n                    -2.069529,\n                    -2.7128298,\n                    -2.104214,\n                    -1.9494729,\n                    -1.5712934,\n                    -1.9657266,\n                    -2.8847177,\n                    -2.0214329,\n                    -1.1168336,\n                    -1.2752433,\n                    -1.5474607,\n                    -2.0712857,\n                    -1.228839,\n                    -2.3795173,\n                    -2.8112884,\n                    -0.84520334,\n                    -1.9762503,\n                    -1.7711192,\n                    -2.0245588,\n                    -2.4114013,\n                    -4.225437,\n                    -5.1449366,\n                    -3.5052965,\n                    -3.2005363,\n                    -1.7473437,\n                    -4.5288105,\n                    -1.3647547,\n                    -1.6030688,\n                    -3.399047,\n                    -3.9606524,\n                    -2.4988217,\n                    -2.2541208,\n                    -1.6598576,\n                    -2.0318375,\n                    -3.440064,\n                    -2.4472992,\n                    -2.583677,\n                    -1.9249152,\n                    -2.901985,\n                    -2.3829575,\n                    -3.055308,\n                    -3.5783951,\n                    -1.7583847,\n                    -1.3392538,\n                    -1.5853678,\n                    -1.7856835,\n                    -4.027946,\n                    -2.5141406,\n                    -2.4793181,\n                    -1.6539936,\n                    -1.8417726,\n                    -1.4020761,\n                    -1.6960694,\n                    -1.6217222,\n                    -1.7380404,\n                    -1.8801169,\n                    -2.5759354,\n                    -2.688679,\n                    -1.4906636,\n                    -2.4896166,\n                    -2.0299616,\n                    -1.1732154,\n                    -2.218882,\n                    -1.5876522,\n                    -2.8258562,\n                    -2.2313547,\n                    -1.8463587,\n                    -3.1047606,\n                    -1.3781078,\n                    -1.4334736,\n                    -2.1798456,\n                    -2.3027706,\n                    -2.6381938,\n                    -2.3633974,\n                    -1.6888646,\n                    -2.6922355,\n                    -3.9937582,\n                    -1.8913554,\n                    -3.3459861,\n                    -1.9445951,\n                    -2.7985828,\n                    -1.6192544,\n                    -2.2790425,\n                    -1.6679081,\n                    -2.0862288,\n                    -1.6981969,\n                    -2.1887875,\n                    -1.3072184,\n                    -1.5203125,\n                    -2.3198156,\n                    -4.1349745,\n                    -3.2535048,\n                    -4.6283674,\n                    -2.5832548,\n                    -3.1003091,\n                    -1.7998267,\n                    -1.9538168,\n                    -0.9321133,\n                    -1.3000219,\n                    -0.6603427,\n                    -1.417374,\n                    -4.4221253,\n                    -1.0785013,\n                    -1.8408641,\n                    -2.0598917\n                ],\n                \"pointIndex\": [\n                    1,\n                    504,\n                    232,\n                    93371543,\n                    83193340,\n                    51609995,\n                    89399250,\n                    58411525,\n                    8723328,\n                    20440944,\n                    46893266,\n                    122901962,\n                    7356524,\n                    43010919,\n                    19179752,\n                    70365646,\n                    119511225,\n                    76853214,\n                    84102788,\n                    95436273,\n                    1216784,\n                    124196634,\n                    4094230,\n                    39663861,\n                    41923224,\n                    21344652,\n                    21685201,\n                    43458670,\n                    15386476,\n                    60096538,\n                    103046171,\n                    23183276,\n                    11319123,\n                    45373015,\n                    12355583,\n                    77390034,\n                    94445921,\n                    99768692,\n                    30435166,\n                    117326285,\n                    123597904,\n                    3120255,\n                    36522499,\n                    16413966,\n                    23712342,\n                    50831074,\n                    41869953,\n                    31582697,\n                    18123249,\n                    45993707,\n                    98907237,\n                    50027373,\n                    52003281,\n                    76222963,\n                    97052839,\n                    55995355,\n                    59301019,\n                    41017833,\n                    63884255,\n                    71897870,\n                    24273408,\n                    71362132,\n                    35199141,\n                    46381904,\n                    97006198,\n                    81449733,\n                    2703585,\n                    123730816,\n                    88570270,\n                    28538466,\n                    94720610,\n                    88963368,\n                    73814843,\n                    102316085,\n                    88128342,\n                    110050359,\n                    116313747,\n                    118351755,\n                    65273013,\n                    127514189,\n                    27\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 232,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 5911960361261524437\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.39453644,\n                    -0.4091319,\n                    -0.40276408,\n                    -0.42154148,\n                    -0.43336,\n                    -0.40930772,\n                    -0.49056512,\n                    -0.4553714,\n                    -0.6343011,\n                    -0.43776572,\n                    -0.46448955,\n                    -0.49443203,\n                    -0.49125093,\n                    -0.55794704,\n                    -0.52942365,\n                    -0.5564705,\n                    -0.65847504,\n                    -0.69913536,\n                    -0.75695574,\n                    -0.71217597,\n                    -0.60467654,\n                    -0.49103096,\n                    -0.57176715,\n                    -0.5623783,\n                    -0.5702394,\n                    -0.5680598,\n                    -0.52803683,\n                    -0.5789208,\n                    -0.62406635,\n                    -0.5716927,\n                    -1.0776572,\n                    -0.74342746,\n                    -0.7486334,\n                    -0.69127804,\n                    -0.6608637,\n                    -0.9031974,\n                    -0.72085214,\n                    -0.769788,\n                    -0.77804446,\n                    -0.7316041,\n                    -0.95129424,\n                    -0.7048584,\n                    -0.68752366,\n                    -0.6274702,\n                    -0.78758854,\n                    -0.57621306,\n                    -0.980954,\n                    -0.7966236,\n                    -0.70016164,\n                    -0.9555895,\n                    -0.79694474,\n                    -0.8355656,\n                    -0.6856815,\n                    -0.567936,\n                    -0.8354194,\n                    -0.6369338,\n                    -0.81904274,\n                    -0.7119923,\n                    -0.86386496,\n                    -0.7636028,\n                    -1.0274584,\n                    -1.2511095,\n                    -1.3548596,\n                    -1.2078041,\n                    -1.0715795,\n                    -1.3628885,\n                    -0.895939,\n                    -0.79696196,\n                    -1.0516008,\n                    -0.7873496,\n                    -0.7411404,\n                    -1.0266874,\n                    -1.6383885,\n                    -0.7790249,\n                    -0.7705543,\n                    -1.0220681,\n                    -1.0377313,\n                    -0.95866215,\n                    -1.0646464,\n                    -1.2318766,\n                    -0.8416069,\n                    -1.0857205,\n                    -1.3508493,\n                    -1.0250691,\n                    -0.7554236,\n                    -1.2693433,\n                    -0.7089957,\n                    -1.5169703,\n                    -0.68931425,\n                    -0.92620236,\n                    -0.8353271,\n                    -0.82917625,\n                    -0.62339437,\n                    -0.99650085,\n                    -1.5622008,\n                    -1.2203263,\n                    -1.1878941,\n                    -1.4765059,\n                    -2.4610686,\n                    -1.033241,\n                    -1.1396645,\n                    -1.6011978,\n                    -1.1930758,\n                    -0.9267733,\n                    -0.9729251,\n                    -1.3341185,\n                    -1.0385826,\n                    -0.6454081,\n                    -0.60673964,\n                    -1.0060118,\n                    -0.9427047,\n                    -0.9003368,\n                    -0.90673447,\n                    -1.2340496,\n                    -1.0492711,\n                    -3.363109,\n                    -2.0330606,\n                    -1.0982486,\n                    -2.7784495,\n                    -1.5517746,\n                    -2.2327852,\n                    -1.1796428,\n                    -2.6232302,\n                    -2.162269,\n                    -3.54792,\n                    -1.6273329,\n                    -1.846263,\n                    -1.3899765,\n                    -4.4950037,\n                    -2.3274353,\n                    -2.5995972,\n                    -3.1184244,\n                    -1.998079,\n                    -2.848405,\n                    -1.6703528,\n                    -2.4962456,\n                    -1.6481466,\n                    -3.440863,\n                    -2.6396058,\n                    -2.9867055,\n                    -2.670095,\n                    -1.41553,\n                    -1.5924307,\n                    -1.4283478,\n                    -1.1941426,\n                    -2.6631145,\n                    -1.681505,\n                    -2.669729,\n                    -1.8716806,\n                    -1.4485921,\n                    -3.8915946,\n                    -1.7940518,\n                    -1.3624225,\n                    -2.0018167,\n                    -3.7898927,\n                    -1.0225416,\n                    -1.9008695,\n                    -2.9503803,\n                    -1.1070347,\n                    -1.7413036,\n                    -1.7370498,\n                    -1.1614032,\n                    -1.8687525,\n                    -1.8824614,\n                    -1.1254486,\n                    -1.7149575,\n                    -1.7517456,\n                    -1.09711,\n                    -2.1374192,\n                    -1.3370602,\n                    -1.3006828,\n                    -6.7224092,\n                    -1.5339332,\n                    -1.0457886,\n                    -1.276754,\n                    -3.4331303,\n                    -1.6579875,\n                    -2.1001787,\n                    -1.690451,\n                    -1.8255048,\n                    -1.8420224,\n                    -0.90034497,\n                    -1.0836391,\n                    -2.4854763,\n                    -2.548339,\n                    -1.9398812,\n                    -1.4311638,\n                    -6.7015624,\n                    -1.0492077,\n                    -2.898579,\n                    -3.2530568,\n                    -2.267042,\n                    -2.1777713,\n                    -2.7854774,\n                    -1.7165158,\n                    -2.1922739,\n                    -1.6294314,\n                    -3.1127849,\n                    -3.2658153,\n                    -3.1945748,\n                    -2.37653,\n                    -1.6131859,\n                    -1.9520547,\n                    -2.718332,\n                    -1.8190503,\n                    -2.4957001,\n                    -3.5247574,\n                    -1.804782,\n                    -1.4233923,\n                    -2.1619806,\n                    -1.8960294,\n                    -1.3675936,\n                    -1.4663973,\n                    -1.6324245,\n                    -1.3808581,\n                    -2.4436696,\n                    -1.645096,\n                    -0.7788095,\n                    -0.6247076,\n                    -1.2705113,\n                    -1.5623835,\n                    -2.6446254,\n                    -1.5811759,\n                    -1.0504855,\n                    -6.6227217,\n                    -5.9884167\n                ],\n                \"pointIndex\": [\n                    7,\n                    504,\n                    226,\n                    39212145,\n                    24172071,\n                    63328689,\n                    64391118,\n                    80415529,\n                    79126737,\n                    81548709,\n                    77773432,\n                    113781713,\n                    45052060,\n                    63749217,\n                    111353902,\n                    111742931,\n                    59065567,\n                    113400660,\n                    106333244,\n                    92186730,\n                    112305367,\n                    5020307,\n                    19122326,\n                    13956955,\n                    37768826,\n                    72182442,\n                    44295021,\n                    48397330,\n                    19062050,\n                    105440754,\n                    106754985,\n                    62469862,\n                    67227185,\n                    71449425,\n                    108893739,\n                    84983029,\n                    89703916,\n                    95292207,\n                    57488522,\n                    119775888,\n                    110360335,\n                    105944426,\n                    5519420,\n                    23569782,\n                    103807625,\n                    14868430,\n                    94940360,\n                    38835649,\n                    41730968,\n                    43929060,\n                    43492514,\n                    36987043,\n                    7288799,\n                    60860404,\n                    50942648,\n                    38573793,\n                    115194212,\n                    54955231,\n                    9713218,\n                    10423051,\n                    62160467,\n                    64660088,\n                    67093319,\n                    16165124,\n                    72556718,\n                    97362277,\n                    110787379,\n                    82336653,\n                    84240227,\n                    87221624,\n                    10841983,\n                    107072852,\n                    95670596,\n                    27229074,\n                    104610343,\n                    114059877,\n                    95223577,\n                    122325223,\n                    497\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 226,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 443867406360206419\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.46440938,\n                    -0.55194354,\n                    -0.5503501,\n                    -0.55612004,\n                    -0.57011396,\n                    -0.5687599,\n                    -0.5504094,\n                    -0.6050909,\n                    -0.5642461,\n                    -0.6262973,\n                    -0.57370365,\n                    -0.63136375,\n                    -0.59516186,\n                    -0.55939776,\n                    -0.59046006,\n                    -0.6264104,\n                    -1.2096254,\n                    -0.60545015,\n                    -1.0312783,\n                    -0.66549975,\n                    -0.7972616,\n                    -0.5882695,\n                    -0.77537113,\n                    -0.6631247,\n                    -0.97287655,\n                    -0.6211128,\n                    -0.6168714,\n                    -0.5831981,\n                    -0.5755868,\n                    -0.9301831,\n                    -0.68822616,\n                    -0.9176543,\n                    -0.89416593,\n                    -1.2748886,\n                    -1.299621,\n                    -0.8893791,\n                    -0.6247632,\n                    -1.1938516,\n                    -1.4159937,\n                    -0.79601777,\n                    -0.81408983,\n                    -0.8329688,\n                    -0.8892465,\n                    -0.660977,\n                    -0.9649444,\n                    -0.8232206,\n                    -1.1116574,\n                    -0.7901643,\n                    -1.1673359,\n                    -0.9826653,\n                    -0.9795712,\n                    -0.65056336,\n                    -0.6972535,\n                    -0.8124611,\n                    -0.8433622,\n                    -0.6048559,\n                    -0.5874916,\n                    -0.7510074,\n                    -0.9282149,\n                    -1.5678449,\n                    -1.0508904,\n                    -1.1219891,\n                    -0.89590734,\n                    -1.1569976,\n                    -1.2163044,\n                    -0.9758314,\n                    -1.1721662,\n                    -1.6305572,\n                    -1.423851,\n                    -2.877465,\n                    -1.3352257,\n                    -0.94389266,\n                    -1.0725486,\n                    -1.0377557,\n                    -0.6702163,\n                    -2.1633449,\n                    -1.6787019,\n                    -1.5197566,\n                    -1.464606,\n                    -0.84717834,\n                    -0.8617492,\n                    -1.1352758,\n                    -0.8588444,\n                    -1.3158804,\n                    -0.93582493,\n                    -1.3900374,\n                    -1.312389,\n                    -0.8095709,\n                    -1.1310865,\n                    -1.4500014,\n                    -1.0954881,\n                    -1.4290438,\n                    -1.9153944,\n                    -1.1448199,\n                    -1.3486769,\n                    -0.848019,\n                    -1.28376,\n                    -1.5707656,\n                    -2.1327546,\n                    -1.2360355,\n                    -1.8001504,\n                    -2.4112065,\n                    -1.1716948,\n                    -0.7562583,\n                    -0.7529761,\n                    -0.97947717,\n                    -1.4603944,\n                    -0.85283864,\n                    -0.9359614,\n                    -0.87537324,\n                    -1.0744178,\n                    -0.9669737,\n                    -0.65526927,\n                    -0.76069856,\n                    -0.6492787,\n                    -1.1887507,\n                    -1.4968824,\n                    -1.8659223,\n                    -1.6839557,\n                    -2.8952484,\n                    -2.0116107,\n                    -1.9978883,\n                    -3.5571594,\n                    -3.4723294,\n                    -1.6517309,\n                    -1.8540668,\n                    -2.5876698,\n                    -3.1918757,\n                    -1.4802592,\n                    -3.1979494,\n                    -1.2262383,\n                    -1.3654613,\n                    -1.4973582,\n                    -3.4507692,\n                    -3.1694086,\n                    -1.9254063,\n                    -5.152074,\n                    -1.553401,\n                    -1.8380904,\n                    -3.0006897,\n                    -3.0137157,\n                    -2.1162019,\n                    -1.655828,\n                    -1.031361,\n                    -1.5780444,\n                    -1.4910339,\n                    -2.0867536,\n                    -1.985997,\n                    -1.4112707,\n                    -1.3098238,\n                    -0.8342073,\n                    -2.2895734,\n                    -2.759206,\n                    -2.8878415,\n                    -2.9131627,\n                    -2.8809798,\n                    -2.5492322,\n                    -3.8534236,\n                    -1.8139232,\n                    -1.133239,\n                    -3.4292166,\n                    -6.1477556,\n                    -2.3670046,\n                    -3.3983068,\n                    -1.2114067,\n                    -3.470149,\n                    -2.30582,\n                    -5.7869906,\n                    -2.0662296,\n                    -1.5458041,\n                    -2.3490396,\n                    -3.2671423,\n                    -1.6294608,\n                    -2.9943771,\n                    -2.3128247,\n                    -2.9752448,\n                    -3.2546005,\n                    -1.285794,\n                    -1.847425,\n                    -2.8550897,\n                    -1.7487998,\n                    -1.1289887,\n                    -1.5940181,\n                    -2.1344016,\n                    -2.2462327,\n                    -2.1513886,\n                    -3.020991,\n                    -3.056209,\n                    -2.1476986,\n                    -1.449248,\n                    -1.5552889,\n                    -1.5682355,\n                    -1.3316531,\n                    -3.897052,\n                    -1.3796781,\n                    -2.231988,\n                    -1.6785682,\n                    -2.6739948,\n                    -3.0805538,\n                    -3.0601883,\n                    -3.3602982,\n                    -3.3161924,\n                    -2.250506,\n                    -2.8370016,\n                    -3.9633605,\n                    -5.9724317,\n                    -1.6216393,\n                    -1.413277,\n                    -0.8392533,\n                    -5.599172,\n                    -2.7745616,\n                    -2.7916412,\n                    -1.7649142,\n                    -2.2872913,\n                    -1.9738077,\n                    -2.6074562,\n                    -1.1445656,\n                    -1.2890749,\n                    -2.647072,\n                    -2.0012999,\n                    -1.7970166,\n                    -2.9343374,\n                    -2.818287,\n                    -3.303542,\n                    -0.97759485,\n                    -2.3368385,\n                    -2.182434,\n                    -3.0812023,\n                    -1.9679145,\n                    -0.9874132,\n                    -0.72272855,\n                    -2.6502638,\n                    -1.4007757,\n                    -1.6466194\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    234,\n                    69537992,\n                    91445418,\n                    19390374,\n                    28720100,\n                    24477262,\n                    57926693,\n                    19248800,\n                    6294096,\n                    107055478,\n                    1769480,\n                    89281685,\n                    49819340,\n                    14918544,\n                    66508569,\n                    19778600,\n                    85497112,\n                    34258159,\n                    116497451,\n                    124776842,\n                    60003625,\n                    7548891,\n                    59501782,\n                    103625696,\n                    125092715,\n                    52468681,\n                    618331,\n                    61076944,\n                    105684622,\n                    91178763,\n                    11364797,\n                    107376926,\n                    31656572,\n                    83561758,\n                    120265579,\n                    97611816,\n                    14491424,\n                    114217289,\n                    123716365,\n                    128784073,\n                    15728235,\n                    32719135,\n                    183378,\n                    42413085,\n                    89850175,\n                    71310800,\n                    18559145,\n                    60809821,\n                    110463371,\n                    106184965,\n                    51872121,\n                    102370607,\n                    9800332,\n                    57747978,\n                    22822474,\n                    103522097,\n                    9064348,\n                    11101780,\n                    99851390,\n                    117196965,\n                    73336535,\n                    75285846,\n                    27693273,\n                    28463006,\n                    80378635,\n                    80900038,\n                    30152149,\n                    96680559,\n                    90712670,\n                    96075623,\n                    99012189,\n                    102751726,\n                    56593235,\n                    110128911,\n                    112942886,\n                    34690435,\n                    35691352,\n                    35441887,\n                    36498903\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 234,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 2055649794410205326\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.44288203,\n                    -0.4733615,\n                    -0.45990443,\n                    -0.5049441,\n                    -0.48364964,\n                    -0.46929833,\n                    -0.5951013,\n                    -0.51562756,\n                    -0.50766015,\n                    -0.51539856,\n                    -0.5068618,\n                    -0.5359355,\n                    -0.51409596,\n                    -0.6012245,\n                    -0.672219,\n                    -0.5989203,\n                    -0.53809875,\n                    -0.5351609,\n                    -0.53932685,\n                    -0.5565911,\n                    -0.52460414,\n                    -0.774303,\n                    -0.566129,\n                    -0.54035103,\n                    -0.7583755,\n                    -0.5659491,\n                    -0.5352195,\n                    -0.68098015,\n                    -0.6640686,\n                    -0.7816395,\n                    -0.77675265,\n                    -0.8260454,\n                    -0.6643803,\n                    -0.8596022,\n                    -0.6064849,\n                    -0.6059143,\n                    -0.68557113,\n                    -0.58883345,\n                    -0.6485081,\n                    -0.6736849,\n                    -0.7512264,\n                    -0.5525414,\n                    -0.7294364,\n                    -0.79328084,\n                    -1.1318378,\n                    -0.6086408,\n                    -0.73085433,\n                    -0.620078,\n                    -0.6123451,\n                    -0.8122214,\n                    -1.075476,\n                    -0.62589544,\n                    -0.6219365,\n                    -0.8617798,\n                    -0.60795546,\n                    -1.1461984,\n                    -0.89067245,\n                    -0.70753044,\n                    -0.97933424,\n                    -1.4291743,\n                    -1.1285313,\n                    -1.6885188,\n                    -1.0900038,\n                    -1.0275611,\n                    -1.0758282,\n                    -1.024274,\n                    -1.3263488,\n                    -2.243545,\n                    -0.8787395,\n                    -0.97567546,\n                    -0.79980785,\n                    -0.6283274,\n                    -0.71997434,\n                    -0.72288865,\n                    -0.7504799,\n                    -0.92405844,\n                    -0.68834955,\n                    -0.8254108,\n                    -1.2518294,\n                    -1.4317335,\n                    -0.81669384,\n                    -1.2866132,\n                    -1.3988098,\n                    -0.9238192,\n                    -0.82263625,\n                    -0.8600533,\n                    -1.1017289,\n                    -0.929503,\n                    -0.8326041,\n                    -1.9470408,\n                    -1.1576751,\n                    -1.2693574,\n                    -0.828232,\n                    -1.6521869,\n                    -1.1554543,\n                    -0.7606298,\n                    -1.7863603,\n                    -1.0608599,\n                    -0.7879148,\n                    -0.8942874,\n                    -1.3608972,\n                    -1.4805124,\n                    -1.3453927,\n                    -1.2434686,\n                    -0.62647575,\n                    -0.9385256,\n                    -0.76255625,\n                    -1.2914335,\n                    -1.3336471,\n                    -0.7012181,\n                    -1.076786,\n                    -1.3590081,\n                    -1.7965043,\n                    -1.2802029,\n                    -1.2463863,\n                    -0.9444799,\n                    -0.8902152,\n                    -5.1300464,\n                    -1.9487566,\n                    -2.5945091,\n                    -3.9016354,\n                    -1.3708203,\n                    -1.7007787,\n                    -5.8792505,\n                    -2.924918,\n                    -2.0084155,\n                    -1.5516672,\n                    -1.9452276,\n                    -2.7344933,\n                    -2.4761894,\n                    -2.3559952,\n                    -1.6918682,\n                    -4.0521593,\n                    -2.8841708,\n                    -4.250247,\n                    -2.6824002,\n                    -2.351825,\n                    -7.0725884,\n                    -3.8799038,\n                    -1.0702031,\n                    -1.7500367,\n                    -1.7154711,\n                    -2.2885256,\n                    -1.1017034,\n                    -1.0053222,\n                    -3.9765737,\n                    -0.8603018,\n                    -1.6386056,\n                    -1.4199564,\n                    -1.0428835,\n                    -2.1775246,\n                    -1.817723,\n                    -1.5003532,\n                    -4.018854,\n                    -4.3493705,\n                    -0.86794126,\n                    -1.9145643,\n                    -1.7242973,\n                    -2.7774487,\n                    -2.593774,\n                    -2.8310463,\n                    -1.1839824,\n                    -0.99695164,\n                    -1.3106467,\n                    -2.2921367,\n                    -2.6983166,\n                    -1.6927524,\n                    -2.2582834,\n                    -1.862477,\n                    -1.604147,\n                    -1.1071069,\n                    -1.5620903,\n                    -1.6573658,\n                    -3.5879345,\n                    -1.8574893,\n                    -5.450627,\n                    -2.953317,\n                    -1.379029,\n                    -3.140232,\n                    -2.237678,\n                    -2.3773768,\n                    -1.4420193,\n                    -1.2076032,\n                    -1.6367044,\n                    -1.5738162,\n                    -2.370365,\n                    -2.294873,\n                    -2.259818,\n                    -3.3089173,\n                    -2.3828115,\n                    -1.4231418,\n                    -2.0798078,\n                    -3.65892,\n                    -2.782997,\n                    -2.4264176,\n                    -3.1848116,\n                    -2.5167139,\n                    -2.035897,\n                    -0.9078463,\n                    -1.9608375,\n                    -1.0440569,\n                    -2.198517,\n                    -3.9061434,\n                    -2.4758701,\n                    -2.5298085,\n                    -2.2807584,\n                    -1.8718891,\n                    -1.7454989,\n                    -1.5738424,\n                    -1.5848371,\n                    -1.9713155,\n                    -1.3583566,\n                    -1.9085646,\n                    -0.78653663,\n                    -0.91740644,\n                    -1.4229441,\n                    -4.3172007,\n                    -2.259679,\n                    -1.9394193,\n                    -2.3271387,\n                    -2.017966,\n                    -1.5892746,\n                    -1.1223392,\n                    -3.2935107,\n                    -1.9982659,\n                    -2.696077,\n                    -4.7974358,\n                    -4.0544963,\n                    -1.7201512,\n                    -4.663512,\n                    -2.5073416,\n                    -2.0538328,\n                    -1.1117074,\n                    -1.6175225,\n                    -1.0261378\n                ],\n                \"pointIndex\": [\n                    0,\n                    504,\n                    235,\n                    99588220,\n                    76013627,\n                    19457571,\n                    85439945,\n                    34908503,\n                    53846803,\n                    63291537,\n                    28617007,\n                    56293943,\n                    38760239,\n                    45478888,\n                    49224639,\n                    20952121,\n                    10945812,\n                    70524642,\n                    113401068,\n                    89973135,\n                    32545594,\n                    121351398,\n                    30164645,\n                    41959089,\n                    16114615,\n                    34039935,\n                    15397635,\n                    65264377,\n                    56470190,\n                    60045792,\n                    120239339,\n                    65801832,\n                    121529019,\n                    72480908,\n                    107004937,\n                    81560839,\n                    12924994,\n                    92743100,\n                    63497,\n                    107566904,\n                    116513017,\n                    126651450,\n                    37815476,\n                    100255780,\n                    41648880,\n                    41052862,\n                    44501779,\n                    45860742,\n                    23535023,\n                    22280250,\n                    48943274,\n                    49573865,\n                    51773427,\n                    101774133,\n                    20817324,\n                    57752019,\n                    59792965,\n                    82798362,\n                    10579199,\n                    66130504,\n                    112340598,\n                    69028211,\n                    40608197,\n                    103680196,\n                    73078149,\n                    67610248,\n                    28456353,\n                    77633952,\n                    83048960,\n                    84438350,\n                    89740547,\n                    98945000,\n                    93624260,\n                    102291671,\n                    124136951,\n                    100718338,\n                    50707467,\n                    109734397,\n                    118807444,\n                    126147953,\n                    128014075,\n                    504\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 235,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 5184381769321948633\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5680312,\n                    -0.57754153,\n                    -0.5803335,\n                    -0.58679545,\n                    -0.5828035,\n                    -0.58446527,\n                    -0.59277546,\n                    -0.6110242,\n                    -0.6347451,\n                    -0.650318,\n                    -0.5963477,\n                    -0.589384,\n                    -0.608581,\n                    -0.6600299,\n                    -0.65432173,\n                    -0.618048,\n                    -0.89475805,\n                    -0.69706285,\n                    -0.65788984,\n                    -0.7013913,\n                    -0.66922814,\n                    -0.82567894,\n                    -0.7393027,\n                    -0.6538014,\n                    -0.62519866,\n                    -0.68216234,\n                    -0.6155415,\n                    -0.68521684,\n                    -0.68252844,\n                    -0.8635453,\n                    -0.6732785,\n                    -0.95870876,\n                    -0.683223,\n                    -1.0384604,\n                    -1.0183295,\n                    -1.1785916,\n                    -0.7090885,\n                    -0.89864945,\n                    -1.1735743,\n                    -0.99098986,\n                    -1.2033477,\n                    -0.8001452,\n                    -1.0698832,\n                    -1.0053589,\n                    -1.1478246,\n                    -0.82658064,\n                    -0.7682891,\n                    -1.2017244,\n                    -0.71174425,\n                    -0.6443983,\n                    -0.67368025,\n                    -0.7728953,\n                    -0.8445695,\n                    -0.74427897,\n                    -0.75514364,\n                    -0.7461769,\n                    -0.74116397,\n                    -0.82120323,\n                    -1.0424049,\n                    -1.4037803,\n                    -1.8726203,\n                    -0.74156517,\n                    -1.0557835,\n                    -0.96516573,\n                    -1.1816691,\n                    -1.1345637,\n                    -0.8071123,\n                    -1.1318005,\n                    -1.5799372,\n                    -1.0468409,\n                    -1.3268329,\n                    -1.1965047,\n                    -1.5018703,\n                    -1.9639885,\n                    -0.9940779,\n                    -1.2910422,\n                    -1.6323619,\n                    -1.2475739,\n                    -1.5904983,\n                    -2.8499577,\n                    -1.687947,\n                    -1.3974866,\n                    -1.5153632,\n                    -1.2589599,\n                    -1.3481356,\n                    -1.0943612,\n                    -1.3469661,\n                    -1.0530231,\n                    -1.0110728,\n                    -2.2912538,\n                    -2.0522537,\n                    -1.311908,\n                    -1.1040617,\n                    -1.49376,\n                    -0.9379908,\n                    -1.2910535,\n                    -1.2647202,\n                    -1.465746,\n                    -1.0292729,\n                    -0.903422,\n                    -0.9057903,\n                    -1.0987015,\n                    -0.9196338,\n                    -1.5492829,\n                    -1.8007623,\n                    -0.8688556,\n                    -1.5213107,\n                    -1.3652006,\n                    -0.7674869,\n                    -1.199262,\n                    -1.0817316,\n                    -0.7907398,\n                    -0.8617374,\n                    -1.2053959,\n                    -0.7756612,\n                    -0.8560713,\n                    -0.8616877,\n                    -1.3875729,\n                    -1.4480209,\n                    -7.3676667,\n                    -1.7680417,\n                    -1.9234877,\n                    -2.000328,\n                    -1.7633696,\n                    -3.9967668,\n                    -1.7751657,\n                    -1.7586172,\n                    -1.360004,\n                    -2.5315728,\n                    -1.2654521,\n                    -2.6796257,\n                    -2.3294647,\n                    -2.5737264,\n                    -3.0398452,\n                    -1.5801363,\n                    -1.390896,\n                    -1.3436232,\n                    -4.408353,\n                    -2.7197933,\n                    -5.5573773,\n                    -1.6548759,\n                    -1.8081745,\n                    -4.581191,\n                    -2.2083158,\n                    -1.2077483,\n                    -1.9272752,\n                    -2.1264136,\n                    -2.210442,\n                    -2.9746046,\n                    -1.8114252,\n                    -1.5349617,\n                    -5.21763,\n                    -3.5900617,\n                    -4.7764673,\n                    -2.1318388,\n                    -1.4026515,\n                    -2.02728,\n                    -4.707749,\n                    -2.9943347,\n                    -4.005021,\n                    -2.996215,\n                    -3.1028323,\n                    -2.4414601,\n                    -2.6545715,\n                    -1.9498566,\n                    -2.0314212,\n                    -2.6523108,\n                    -1.9968216,\n                    -1.4356999,\n                    -2.427331,\n                    -1.6129284,\n                    -1.7536944,\n                    -1.2429045,\n                    -2.0530457,\n                    -6.1134977,\n                    -3.2139742,\n                    -1.5324388,\n                    -1.9248765,\n                    -2.0524924,\n                    -3.6633136,\n                    -2.5591948,\n                    -3.6681902,\n                    -3.4150784,\n                    -4.0795193,\n                    -3.0952582,\n                    -1.2616416,\n                    -3.3483067,\n                    -3.7493348,\n                    -1.9752245,\n                    -1.8228238,\n                    -2.7378254,\n                    -2.3111594,\n                    -1.3959323,\n                    -3.6355662,\n                    -1.5012057,\n                    -1.8132061,\n                    -1.6209233,\n                    -1.9733783,\n                    -2.1340463,\n                    -1.2205851,\n                    -0.9665645,\n                    -1.0465889,\n                    -1.7400819,\n                    -1.2885246,\n                    -1.5000305,\n                    -1.0893486,\n                    -1.8212684,\n                    -2.766409,\n                    -2.371216,\n                    -2.1498919,\n                    -2.5711405,\n                    -1.5781529,\n                    -0.9767635,\n                    -2.1542985,\n                    -3.195587,\n                    -1.8495193,\n                    -1.589415,\n                    -1.8028818,\n                    -0.8964406,\n                    -6.5694284,\n                    -2.386041,\n                    -1.7140924,\n                    -2.8135862,\n                    -3.413123,\n                    -1.8383019,\n                    -2.5924628,\n                    -1.0788869,\n                    -3.4721863,\n                    -2.3472803,\n                    -1.513369,\n                    -2.005564,\n                    -0.9147193\n                ],\n                \"pointIndex\": [\n                    3,\n                    504,\n                    232,\n                    38649302,\n                    11990578,\n                    81992637,\n                    93291768,\n                    65762442,\n                    18485813,\n                    61602633,\n                    5179386,\n                    102261781,\n                    36540032,\n                    41990296,\n                    76448885,\n                    54237680,\n                    4518924,\n                    67995138,\n                    119087849,\n                    95016431,\n                    108065424,\n                    18145236,\n                    6621405,\n                    82602739,\n                    13689195,\n                    34020832,\n                    47915466,\n                    60266361,\n                    95866064,\n                    22470217,\n                    68911772,\n                    23959189,\n                    87592514,\n                    123638180,\n                    84840840,\n                    88626199,\n                    33952373,\n                    109338161,\n                    77079998,\n                    113625126,\n                    122206333,\n                    33585290,\n                    66678691,\n                    40366891,\n                    8381952,\n                    86515696,\n                    7458778,\n                    74425191,\n                    16869744,\n                    109422026,\n                    47664082,\n                    73178736,\n                    19053106,\n                    51977784,\n                    53968219,\n                    12275988,\n                    79571602,\n                    103617253,\n                    9721202,\n                    60610567,\n                    43523358,\n                    62506069,\n                    66909823,\n                    68497670,\n                    79771835,\n                    71962373,\n                    26767973,\n                    85851873,\n                    78799049,\n                    28153276,\n                    48373087,\n                    96705395,\n                    82853479,\n                    103905866,\n                    12809656,\n                    108756209,\n                    112969720,\n                    117158318,\n                    121449827,\n                    125021075,\n                    501\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 232,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 9169886588161009142\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.53035605,\n                    -0.5324966,\n                    -0.55486125,\n                    -0.5330728,\n                    -0.5471217,\n                    -0.58455336,\n                    -0.5933521,\n                    -0.5599782,\n                    -0.6342549,\n                    -0.6387794,\n                    -0.55667377,\n                    -0.6493626,\n                    -0.74867827,\n                    -0.6670967,\n                    -0.6307774,\n                    -0.6196332,\n                    -0.5918868,\n                    -0.65830445,\n                    -0.6425075,\n                    -0.8268777,\n                    -0.6852247,\n                    -0.56706643,\n                    -0.6376095,\n                    -0.70883864,\n                    -0.9732487,\n                    -0.7535966,\n                    -0.76869935,\n                    -0.7472471,\n                    -0.923651,\n                    -0.7573382,\n                    -0.93107444,\n                    -0.6483894,\n                    -0.70469373,\n                    -0.6645228,\n                    -0.7695643,\n                    -0.80381304,\n                    -0.66245395,\n                    -0.82613385,\n                    -0.695198,\n                    -0.9515421,\n                    -1.010398,\n                    -0.7056208,\n                    -0.76953393,\n                    -0.9928844,\n                    -0.59132975,\n                    -0.99380237,\n                    -0.8515312,\n                    -0.82218546,\n                    -1.1088846,\n                    -1.0138144,\n                    -1.0554961,\n                    -1.2125427,\n                    -0.861188,\n                    -0.77022,\n                    -0.9146566,\n                    -0.8259621,\n                    -0.76882243,\n                    -1.2034107,\n                    -1.0809807,\n                    -1.5401542,\n                    -0.91514635,\n                    -1.3669915,\n                    -1.4481707,\n                    -1.409995,\n                    -0.85730594,\n                    -1.5939871,\n                    -0.73895526,\n                    -0.66953665,\n                    -0.77782303,\n                    -0.88488233,\n                    -1.0499003,\n                    -1.2570091,\n                    -1.1976534,\n                    -0.92151576,\n                    -1.182133,\n                    -0.833123,\n                    -0.9230005,\n                    -0.8967325,\n                    -1.0337293,\n                    -1.2166122,\n                    -0.99116784,\n                    -1.1490904,\n                    -1.1511246,\n                    -1.397479,\n                    -0.9786536,\n                    -1.402299,\n                    -0.94586,\n                    -1.2771218,\n                    -1.0545266,\n                    -0.59421927,\n                    -1.1618001,\n                    -1.8738767,\n                    -1.1558522,\n                    -1.8159966,\n                    -0.89378375,\n                    -0.8380161,\n                    -1.3560284,\n                    -2.3792744,\n                    -1.461425,\n                    -1.2379498,\n                    -1.2818031,\n                    -1.9932383,\n                    -1.3166027,\n                    -1.5687723,\n                    -1.9454088,\n                    -1.445117,\n                    -0.9366784,\n                    -0.92885256,\n                    -1.3723916,\n                    -1.2701558,\n                    -1.0020772,\n                    -1.0913596,\n                    -1.3008364,\n                    -1.4996603,\n                    -0.7830359,\n                    -1.7696719,\n                    -1.9255613,\n                    -3.2565742,\n                    -1.7884704,\n                    -6.904876,\n                    -2.9134037,\n                    -1.9650456,\n                    -2.221382,\n                    -2.0634215,\n                    -1.5779783,\n                    -2.0036862,\n                    -2.2380946,\n                    -2.1677172,\n                    -4.0049834,\n                    -1.6050944,\n                    -3.2902942,\n                    -4.6903667,\n                    -3.661412,\n                    -1.959162,\n                    -2.357832,\n                    -1.53947,\n                    -2.2820132,\n                    -2.1057246,\n                    -2.0005455,\n                    -3.5140362,\n                    -4.0113697,\n                    -2.8073394,\n                    -1.3974141,\n                    -3.510641,\n                    -1.5597332,\n                    -1.6886966,\n                    -3.7408767,\n                    -2.8858912,\n                    -3.572505,\n                    -1.8949933,\n                    -1.8129742,\n                    -4.6692724,\n                    -1.0588484,\n                    -2.3928235,\n                    -2.532507,\n                    -1.4463307,\n                    -3.0277712,\n                    -2.9297788,\n                    -1.9894695,\n                    -1.958042,\n                    -1.8407285,\n                    -1.2177694,\n                    -1.1468236,\n                    -1.2246511,\n                    -1.1679093,\n                    -1.5983938,\n                    -2.6404345,\n                    -1.760991,\n                    -2.9862454,\n                    -2.6671734,\n                    -1.3666124,\n                    -2.2798762,\n                    -4.07843,\n                    -1.3272638,\n                    -1.1056348,\n                    -1.3501083,\n                    -2.066305,\n                    -3.0409312,\n                    -1.3775028,\n                    -0.90984005,\n                    -0.72415125,\n                    -1.6538361,\n                    -4.1680713,\n                    -1.953995,\n                    -5.421199,\n                    -1.7286031,\n                    -2.956821,\n                    -1.9774354,\n                    -2.0668619,\n                    -1.656342,\n                    -1.2232676,\n                    -0.9585533,\n                    -1.0880358,\n                    -3.5506268,\n                    -2.2162747,\n                    -2.78535,\n                    -2.4571283,\n                    -2.4578626,\n                    -1.8101616,\n                    -1.5967966,\n                    -1.3759114,\n                    -1.5432861,\n                    -2.9304342,\n                    -2.0907807,\n                    -2.9865646,\n                    -1.413012,\n                    -2.822878,\n                    -2.1518707,\n                    -1.6998991,\n                    -2.7407525,\n                    -2.152089,\n                    -4.4406257,\n                    -2.0288308,\n                    -1.1662818,\n                    -2.3149743,\n                    -1.0508428,\n                    -4.1577044,\n                    -1.7287313,\n                    -5.233291,\n                    -2.2002838,\n                    -2.4515307,\n                    -3.343693,\n                    -1.1925954,\n                    -3.403616,\n                    -1.1421292,\n                    -2.327714,\n                    -2.8356974,\n                    -3.2215524,\n                    -1.7986864,\n                    -1.0376507,\n                    -1.9582424,\n                    -3.4708614\n                ],\n                \"pointIndex\": [\n                    1,\n                    504,\n                    232,\n                    95940876,\n                    85658404,\n                    48807258,\n                    5729481,\n                    37837239,\n                    9161211,\n                    22735646,\n                    28106346,\n                    68013783,\n                    38134148,\n                    7907680,\n                    49992278,\n                    54106613,\n                    59102940,\n                    69489239,\n                    77880382,\n                    49446833,\n                    105355591,\n                    118604499,\n                    120370463,\n                    40569547,\n                    16849157,\n                    64352906,\n                    47340019,\n                    82652660,\n                    20898060,\n                    21545586,\n                    22413604,\n                    63804236,\n                    66767259,\n                    116482736,\n                    31396222,\n                    29627447,\n                    101324628,\n                    108731924,\n                    119581943,\n                    33754835,\n                    30549495,\n                    7112415,\n                    44789791,\n                    68914851,\n                    16082884,\n                    41912800,\n                    98721568,\n                    47027405,\n                    41238752,\n                    45725007,\n                    18889598,\n                    114310282,\n                    105856109,\n                    52174821,\n                    9503028,\n                    2075681,\n                    91137023,\n                    81857977,\n                    58803769,\n                    73782073,\n                    61882364,\n                    114571543,\n                    41576065,\n                    91505261,\n                    72539750,\n                    75591724,\n                    77472155,\n                    12287422,\n                    83482431,\n                    35221081,\n                    2994602,\n                    88829860,\n                    95147664,\n                    35610587,\n                    32033118,\n                    110962144,\n                    115815305,\n                    24617835,\n                    122676595,\n                    127514003,\n                    142\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 232,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 7625825163713561172\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.68963563,\n                    -0.71228534,\n                    -0.69185126,\n                    -0.72458655,\n                    -0.73350304,\n                    -0.70023793,\n                    -0.6973863,\n                    -0.7521709,\n                    -0.74596405,\n                    -0.75555176,\n                    -0.7994502,\n                    -0.70423424,\n                    -0.71557814,\n                    -0.80184543,\n                    -0.7378255,\n                    -0.90932745,\n                    -0.8513464,\n                    -0.945497,\n                    -0.75618273,\n                    -0.9176568,\n                    -0.91030234,\n                    -0.870406,\n                    -0.8781088,\n                    -0.7445866,\n                    -0.7060583,\n                    -0.8570121,\n                    -0.71676576,\n                    -0.9451282,\n                    -0.98158306,\n                    -1.0464047,\n                    -1.2191277,\n                    -1.2070265,\n                    -1.1122887,\n                    -0.91584504,\n                    -0.9098384,\n                    -1.0282311,\n                    -1.0526453,\n                    -1.0620815,\n                    -0.9422277,\n                    -0.98331505,\n                    -1.0915213,\n                    -0.9253102,\n                    -0.972425,\n                    -1.5254942,\n                    -1.0299138,\n                    -1.0033057,\n                    -0.9877612,\n                    -0.8424406,\n                    -0.7661645,\n                    -0.8719801,\n                    -0.74254644,\n                    -0.87919515,\n                    -1.1128438,\n                    -0.82937247,\n                    -0.8437849,\n                    -1.4884276,\n                    -1.8097106,\n                    -1.6915003,\n                    -1.1752342,\n                    -1.2706611,\n                    -1.5071601,\n                    -1.4045566,\n                    -2.038409,\n                    -2.151111,\n                    -2.0421367,\n                    -1.4022131,\n                    -1.5389507,\n                    -1.3170675,\n                    -1.315749,\n                    -0.93967783,\n                    -1.2088668,\n                    -1.3546461,\n                    -1.1657493,\n                    -1.3647672,\n                    -1.3999888,\n                    -1.6134369,\n                    -1.1775569,\n                    -1.5266851,\n                    -1.5846884,\n                    -1.1892252,\n                    -2.184959,\n                    -1.4332705,\n                    -1.6650143,\n                    -1.119504,\n                    -1.1874212,\n                    -1.0438701,\n                    -1.1250359,\n                    -1.9323465,\n                    -2.3231916,\n                    -1.4397689,\n                    -1.2220843,\n                    -1.2882428,\n                    -1.0033833,\n                    -1.0603511,\n                    -0.98936176,\n                    -1.4499718,\n                    -0.9140737,\n                    -1.0238861,\n                    -1.1773385,\n                    -0.9237867,\n                    -1.4686022,\n                    -0.97709936,\n                    -0.75256306,\n                    -1.2221575,\n                    -1.3114357,\n                    -1.1347244,\n                    -1.7514151,\n                    -1.123055,\n                    -0.8995096,\n                    -0.86474705,\n                    -0.85333395,\n                    -2.048599,\n                    -1.9980121,\n                    -2.3663135,\n                    -1.9952893,\n                    -2.4167542,\n                    -1.9417686,\n                    -2.6963432,\n                    -1.3648587,\n                    -3.931102,\n                    -2.0329041,\n                    -4.328194,\n                    -2.0767705,\n                    -1.6628162,\n                    -5.6688666,\n                    -3.5560164,\n                    -2.79209,\n                    -2.689386,\n                    -3.5538812,\n                    -3.9373045,\n                    -2.5425057,\n                    -1.4241658,\n                    -2.6849244,\n                    -2.719885,\n                    -1.6209738,\n                    -3.384303,\n                    -2.4125133,\n                    -2.8833256,\n                    -1.3654975,\n                    -1.449889,\n                    -1.0091305,\n                    -2.6887238,\n                    -1.2313497,\n                    -1.5814414,\n                    -4.6353374,\n                    -2.5800967,\n                    -2.6386955,\n                    -4.811471,\n                    -2.0467079,\n                    -1.5542631,\n                    -4.6060424,\n                    -1.8895761,\n                    -1.7013047,\n                    -1.4387131,\n                    -1.9494822,\n                    -2.21773,\n                    -2.4674315,\n                    -1.9776376,\n                    -2.1961777,\n                    -3.6641738,\n                    -1.4983013,\n                    -3.065605,\n                    -2.5193455,\n                    -2.4397542,\n                    -1.6718928,\n                    -1.8533777,\n                    -3.6097572,\n                    -2.178392,\n                    -1.6004324,\n                    -1.533436,\n                    -1.2607406,\n                    -1.5537279,\n                    -3.823595,\n                    -2.2444324,\n                    -1.2052363,\n                    -2.4344325,\n                    -1.996442,\n                    -2.5809674,\n                    -2.397142,\n                    -1.9402657,\n                    -3.9277935,\n                    -1.531284,\n                    -1.3297614,\n                    -1.4778779,\n                    -1.8241446,\n                    -2.1734576,\n                    -1.2779739,\n                    -1.3039699,\n                    -1.2775682,\n                    -3.7285767,\n                    -1.7983359,\n                    -2.3423522,\n                    -3.0988562,\n                    -5.8211617,\n                    -1.0550694,\n                    -5.147151,\n                    -1.402445,\n                    -1.5902166,\n                    -4.214791,\n                    -1.2044046,\n                    -2.8841598,\n                    -1.496543,\n                    -1.6186411,\n                    -2.2231202,\n                    -3.8776445,\n                    -1.3348498,\n                    -3.221405,\n                    -1.6098989,\n                    -2.5724285,\n                    -5.3377237,\n                    -1.8548803,\n                    -1.764812,\n                    -1.3415471,\n                    -5.20307,\n                    -2.0851576,\n                    -1.914553,\n                    -2.0306556,\n                    -1.0157343,\n                    -1.0076644,\n                    -1.2212769,\n                    -1.0830055,\n                    -1.6060807,\n                    -5.5612807\n                ],\n                \"pointIndex\": [\n                    2,\n                    503,\n                    223,\n                    39112803,\n                    104990114,\n                    81937957,\n                    100213345,\n                    38376824,\n                    93871402,\n                    106658479,\n                    78636624,\n                    124971297,\n                    15024299,\n                    17742985,\n                    72679231,\n                    55114284,\n                    61605062,\n                    80242238,\n                    79029880,\n                    97582134,\n                    117911316,\n                    39630890,\n                    63755532,\n                    15610385,\n                    27111429,\n                    44667628,\n                    95898681,\n                    55537610,\n                    105882141,\n                    57752310,\n                    107102110,\n                    71106035,\n                    8477543,\n                    75496573,\n                    110934773,\n                    88507968,\n                    98972924,\n                    112105035,\n                    86434716,\n                    120953345,\n                    21224130,\n                    34552766,\n                    3241555,\n                    15145986,\n                    16709245,\n                    4633963,\n                    3355562,\n                    43882594,\n                    44439062,\n                    46207160,\n                    1501667,\n                    48161858,\n                    32358115,\n                    23484559,\n                    23372477,\n                    56610622,\n                    45983463,\n                    92602974,\n                    60857406,\n                    63897868,\n                    65522801,\n                    68204117,\n                    11338446,\n                    80785638,\n                    110850108,\n                    77107000,\n                    94909029,\n                    86604853,\n                    109040266,\n                    106369672,\n                    97466705,\n                    103041026,\n                    29702452,\n                    114035833,\n                    115899372,\n                    118423772,\n                    123722046,\n                    501\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 223,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -1521360444406908161\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.448413,\n                    -0.4563397,\n                    -0.45389304,\n                    -0.46477798,\n                    -0.47320774,\n                    -0.47462812,\n                    -0.47083667,\n                    -0.61239666,\n                    -0.5819031,\n                    -0.5328883,\n                    -0.55435115,\n                    -0.5672333,\n                    -0.49760112,\n                    -0.5436042,\n                    -0.59967595,\n                    -0.64196426,\n                    -0.77108985,\n                    -0.5990893,\n                    -0.68164337,\n                    -0.6658914,\n                    -0.6779405,\n                    -0.65547,\n                    -0.57186633,\n                    -0.7292096,\n                    -0.6201713,\n                    -0.5016417,\n                    -0.5391519,\n                    -0.5894515,\n                    -0.67855585,\n                    -0.793728,\n                    -0.95858973,\n                    -0.75544524,\n                    -1.1236985,\n                    -0.7778626,\n                    -0.87012255,\n                    -0.6094903,\n                    -0.8075542,\n                    -0.6905392,\n                    -0.7119214,\n                    -0.7561649,\n                    -0.7637378,\n                    -0.74645245,\n                    -0.7054175,\n                    -0.7976067,\n                    -1.184815,\n                    -0.679329,\n                    -0.6759231,\n                    -0.80786866,\n                    -0.802765,\n                    -0.73436123,\n                    -0.7933314,\n                    -0.7040496,\n                    -0.5593558,\n                    -0.58655614,\n                    -0.60826343,\n                    -0.6955069,\n                    -0.79697454,\n                    -2.0321658,\n                    -2.3671505,\n                    -1.2036948,\n                    -0.91650933,\n                    -1.2221347,\n                    -1.4929575,\n                    -0.82133824,\n                    -0.99631953,\n                    -1.2400419,\n                    -2.2688756,\n                    -0.8122746,\n                    -1.5502583,\n                    -0.95800006,\n                    -0.9741957,\n                    -0.91015464,\n                    -1.0454623,\n                    -1.6839831,\n                    -0.8809304,\n                    -1.0153738,\n                    -0.81790936,\n                    -1.3347377,\n                    -0.85336673,\n                    -1.419818,\n                    -1.2422366,\n                    -1.3848045,\n                    -1.8259623,\n                    -0.8008377,\n                    -1.4634923,\n                    -1.2378216,\n                    -0.83597064,\n                    -1.2431781,\n                    -1.0476589,\n                    -1.1947871,\n                    -1.502235,\n                    -1.1875122,\n                    -1.0802674,\n                    -0.7131609,\n                    -1.303804,\n                    -0.93148845,\n                    -0.868008,\n                    -0.91230166,\n                    -1.2311869,\n                    -0.7778201,\n                    -0.8125608,\n                    -1.0481919,\n                    -1.0332779,\n                    -2.4180708,\n                    -0.90919775,\n                    -1.0201457,\n                    -0.74089086,\n                    -0.7694123,\n                    -0.6068406,\n                    -1.9599571,\n                    -0.64965326,\n                    -0.80325395,\n                    -0.70115393,\n                    -1.4920911,\n                    -2.105182,\n                    -2.593633,\n                    -4.9075894,\n                    -2.4500175,\n                    -2.6866624,\n                    -2.011585,\n                    -1.2831956,\n                    -1.6990656,\n                    -5.3042397,\n                    -4.074707,\n                    -2.3969848,\n                    -1.8422551,\n                    -1.7336991,\n                    -2.014161,\n                    -1.1614549,\n                    -2.387507,\n                    -2.2531638,\n                    -1.5358254,\n                    -2.0135787,\n                    -2.4276953,\n                    -2.544838,\n                    -0.8222214,\n                    -1.783477,\n                    -2.3671846,\n                    -2.0942051,\n                    -1.530744,\n                    -3.109618,\n                    -2.4571917,\n                    -4.8672504,\n                    -1.750401,\n                    -1.1609975,\n                    -1.5165246,\n                    -2.134081,\n                    -1.7671065,\n                    -2.9051976,\n                    -2.3550234,\n                    -1.3915247,\n                    -1.6891075,\n                    -4.364541,\n                    -1.8734646,\n                    -1.9972665,\n                    -1.9541153,\n                    -1.9995427,\n                    -1.3496068,\n                    -0.99282694,\n                    -2.723645,\n                    -2.133502,\n                    -1.6864717,\n                    -1.82967,\n                    -1.3975633,\n                    -2.5430946,\n                    -2.8813655,\n                    -2.819163,\n                    -2.3204596,\n                    -1.1076525,\n                    -1.9235603,\n                    -1.9012809,\n                    -1.8713926,\n                    -1.4276859,\n                    -5.034833,\n                    -1.3538642,\n                    -1.7771684,\n                    -2.354052,\n                    -3.5404406,\n                    -1.6530912,\n                    -2.0195994,\n                    -2.6805522,\n                    -3.3481433,\n                    -2.3543854,\n                    -1.8849224,\n                    -1.7921944,\n                    -1.9087023,\n                    -1.5636692,\n                    -4.559099,\n                    -0.9331611,\n                    -2.0087197,\n                    -3.0887632,\n                    -1.7429163,\n                    -1.8492596,\n                    -1.4279119,\n                    -2.107058,\n                    -1.9292601,\n                    -1.2173969,\n                    -1.7226368,\n                    -1.288711,\n                    -1.5266242,\n                    -1.0676883,\n                    -0.82271296,\n                    -2.72016,\n                    -1.95744,\n                    -1.6681421,\n                    -1.561834,\n                    -1.139806,\n                    -2.7807567,\n                    -2.852466,\n                    -1.5560211,\n                    -2.8218806,\n                    -2.6191962,\n                    -3.2710078,\n                    -1.9588827,\n                    -0.9443455,\n                    -1.0752059,\n                    -2.6169057,\n                    -1.3419697,\n                    -3.6202824,\n                    -2.6320822,\n                    -2.3424454,\n                    -3.1701627,\n                    -1.705714,\n                    -2.471587,\n                    -1.706018,\n                    -3.0725029,\n                    -0.78821653,\n                    -3.9174464,\n                    -1.8377808\n                ],\n                \"pointIndex\": [\n                    1,\n                    502,\n                    229,\n                    124615864,\n                    110763474,\n                    66359894,\n                    79907579,\n                    35495826,\n                    45985371,\n                    23556059,\n                    99951616,\n                    111833388,\n                    35827229,\n                    64730104,\n                    89025,\n                    55394228,\n                    87863787,\n                    73136326,\n                    80548010,\n                    90110354,\n                    31092250,\n                    125987025,\n                    27087925,\n                    3716780,\n                    20623733,\n                    43597728,\n                    45811303,\n                    48424553,\n                    4840676,\n                    123094759,\n                    4513018,\n                    24238832,\n                    31302989,\n                    58364321,\n                    27878880,\n                    89627407,\n                    54208096,\n                    96030457,\n                    6524879,\n                    114400195,\n                    49457760,\n                    17319422,\n                    15150999,\n                    23999127,\n                    111962595,\n                    38632545,\n                    64027745,\n                    118741542,\n                    17979860,\n                    56032910,\n                    45211702,\n                    46460679,\n                    20337475,\n                    59822455,\n                    95109119,\n                    109122258,\n                    21832209,\n                    68020971,\n                    10293097,\n                    86314633,\n                    90878049,\n                    87951177,\n                    114630977,\n                    75148188,\n                    77180011,\n                    78900649,\n                    5601924,\n                    93405592,\n                    83673586,\n                    10641235,\n                    91171577,\n                    91661234,\n                    6995240,\n                    30253916,\n                    119912134,\n                    109594041,\n                    113369225,\n                    120188293,\n                    33002468,\n                    501\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 229,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 4365738981533818552\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5989758,\n                    -0.6030343,\n                    -0.6106647,\n                    -0.617829,\n                    -0.6120543,\n                    -0.6209751,\n                    -0.6540577,\n                    -0.7291006,\n                    -0.6260681,\n                    -0.6240438,\n                    -0.63892895,\n                    -0.6271658,\n                    -0.6558473,\n                    -0.67823005,\n                    -0.6712329,\n                    -0.7736877,\n                    -0.8370473,\n                    -0.8698825,\n                    -0.63718224,\n                    -0.66500527,\n                    -0.76872224,\n                    -0.7976186,\n                    -0.64789164,\n                    -0.75742877,\n                    -0.7079197,\n                    -0.66363484,\n                    -0.7033005,\n                    -0.6999278,\n                    -0.7570301,\n                    -0.8842918,\n                    -0.86259925,\n                    -1.0528697,\n                    -0.9030894,\n                    -1.0192664,\n                    -0.87634534,\n                    -0.97609395,\n                    -0.870785,\n                    -1.2165607,\n                    -0.8950667,\n                    -0.9456082,\n                    -0.81416637,\n                    -0.90334135,\n                    -0.9257617,\n                    -0.95053643,\n                    -0.8269434,\n                    -0.68671584,\n                    -0.9238147,\n                    -0.91530436,\n                    -1.2250437,\n                    -0.8648471,\n                    -0.9400571,\n                    -0.77502626,\n                    -0.69389594,\n                    -1.1458131,\n                    -0.9694328,\n                    -0.7568269,\n                    -1.0375473,\n                    -0.8990032,\n                    -1.1046243,\n                    -1.6111062,\n                    -1.2231795,\n                    -1.7332932,\n                    -1.2406415,\n                    -1.0933986,\n                    -1.4834079,\n                    -1.3647826,\n                    -1.162224,\n                    -1.0776985,\n                    -1.0275012,\n                    -1.0436147,\n                    -1.2653149,\n                    -1.7329624,\n                    -1.0180838,\n                    -1.0981978,\n                    -0.9358566,\n                    -1.3720297,\n                    -1.4376353,\n                    -1.0709182,\n                    -1.0828278,\n                    -0.9824067,\n                    -1.8678888,\n                    -1.6504165,\n                    -1.0274376,\n                    -1.0305225,\n                    -1.1328106,\n                    -1.4048452,\n                    -1.0147716,\n                    -1.2578955,\n                    -1.1658182,\n                    -1.8569355,\n                    -1.256529,\n                    -0.8613851,\n                    -0.73715734,\n                    -1.231118,\n                    -1.3888869,\n                    -1.298621,\n                    -1.3348144,\n                    -1.2907434,\n                    -1.3547943,\n                    -0.91213906,\n                    -2.81691,\n                    -1.2689966,\n                    -1.3123076,\n                    -0.8410366,\n                    -0.818469,\n                    -0.95411783,\n                    -0.85013014,\n                    -1.292814,\n                    -1.3674784,\n                    -1.4650946,\n                    -1.6974814,\n                    -0.7731692,\n                    -1.556783,\n                    -1.9376053,\n                    -2.6791103,\n                    -2.8065603,\n                    -3.4199686,\n                    -2.632667,\n                    -3.097106,\n                    -2.398917,\n                    -2.5683875,\n                    -1.318025,\n                    -3.004227,\n                    -2.803569,\n                    -1.7933978,\n                    -2.2934864,\n                    -2.2485514,\n                    -1.731055,\n                    -1.157685,\n                    -1.9098945,\n                    -2.593425,\n                    -1.854552,\n                    -3.8862267,\n                    -1.2046897,\n                    -1.6525576,\n                    -1.6870998,\n                    -1.3142855,\n                    -3.2718213,\n                    -2.055217,\n                    -1.2397898,\n                    -1.3642328,\n                    -3.6667328,\n                    -2.4019687,\n                    -3.7105029,\n                    -2.5529263,\n                    -3.6775377,\n                    -3.4969838,\n                    -2.5784345,\n                    -3.47127,\n                    -2.0122714,\n                    -1.2789313,\n                    -1.7605172,\n                    -2.4059713,\n                    -2.830982,\n                    -1.5596879,\n                    -1.1299579,\n                    -2.445495,\n                    -2.1350765,\n                    -2.6100347,\n                    -1.7369318,\n                    -4.3140836,\n                    -2.028551,\n                    -1.9430747,\n                    -2.796395,\n                    -5.855211,\n                    -5.55672,\n                    -2.097489,\n                    -2.3304756,\n                    -2.0556016,\n                    -2.3982108,\n                    -1.8669431,\n                    -2.6178248,\n                    -2.3411436,\n                    -1.0704573,\n                    -1.0604006,\n                    -2.2578304,\n                    -2.3912299,\n                    -1.3100142,\n                    -1.2620088,\n                    -1.9373451,\n                    -2.2696261,\n                    -1.309974,\n                    -1.3542323,\n                    -2.646746,\n                    -0.89304143,\n                    -2.8895712,\n                    -1.4734464,\n                    -4.183573,\n                    -1.3531339,\n                    -1.6352302,\n                    -3.697477,\n                    -2.8222086,\n                    -3.847648,\n                    -1.4322337,\n                    -2.2488072,\n                    -1.5600172,\n                    -1.3510327,\n                    -2.5411255,\n                    -1.7665361,\n                    -1.1097009,\n                    -1.1876982,\n                    -6.4440703,\n                    -3.827908,\n                    -1.6914281,\n                    -2.3275604,\n                    -4.6303954,\n                    -2.2944593,\n                    -1.0078675,\n                    -0.9225348,\n                    -1.9457649,\n                    -2.338592,\n                    -2.6975172,\n                    -3.4240358,\n                    -2.4394717,\n                    -0.8670702,\n                    -3.9153588,\n                    -1.4039124,\n                    -1.5496547,\n                    -4.910925,\n                    -1.6267688,\n                    -2.5536485,\n                    -2.1050537,\n                    -2.210298,\n                    -1.024319,\n                    -1.2126342,\n                    -1.8602599\n                ],\n                \"pointIndex\": [\n                    1,\n                    504,\n                    226,\n                    98429960,\n                    112656297,\n                    97879958,\n                    97194275,\n                    7373483,\n                    50394127,\n                    81304428,\n                    11835750,\n                    121966368,\n                    35984071,\n                    50958101,\n                    84195903,\n                    24589368,\n                    64319713,\n                    100139347,\n                    87559209,\n                    107136641,\n                    117838144,\n                    32701006,\n                    35765779,\n                    25433494,\n                    25147855,\n                    46773108,\n                    18929866,\n                    66570036,\n                    57057252,\n                    60835908,\n                    22833967,\n                    70319399,\n                    38498169,\n                    93120334,\n                    86293692,\n                    21093512,\n                    22041922,\n                    106002956,\n                    64746981,\n                    122673071,\n                    40642181,\n                    37877243,\n                    7182531,\n                    36374056,\n                    82994547,\n                    90761342,\n                    102959872,\n                    45036745,\n                    46067631,\n                    48358483,\n                    18563435,\n                    29229391,\n                    19184263,\n                    113583996,\n                    56503154,\n                    59226275,\n                    123570234,\n                    22119290,\n                    64825232,\n                    127647664,\n                    69875154,\n                    98054970,\n                    58565901,\n                    74327999,\n                    76273637,\n                    104065828,\n                    1185237,\n                    88841019,\n                    92894678,\n                    16534084,\n                    105157004,\n                    44909320,\n                    122969354,\n                    112214521,\n                    30448094,\n                    33692792,\n                    49009014,\n                    125650218,\n                    79\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 226,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -3752382720585822890\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.35257456,\n                    -0.63316864,\n                    -0.6249559,\n                    -0.644586,\n                    -0.70678765,\n                    -0.64916795,\n                    -0.6261788,\n                    -0.7028251,\n                    -0.68006915,\n                    -0.71244913,\n                    -0.77226007,\n                    -0.6599282,\n                    -0.72423315,\n                    -0.64820457,\n                    -0.86892164,\n                    -0.80033886,\n                    -0.9883338,\n                    -0.7176925,\n                    -0.8587724,\n                    -0.7907359,\n                    -0.7907442,\n                    -0.8138974,\n                    -0.8363504,\n                    -0.8191511,\n                    -0.7210162,\n                    -0.8011215,\n                    -0.7275551,\n                    -0.6740003,\n                    -0.76122135,\n                    -0.9360815,\n                    -1.143798,\n                    -1.069601,\n                    -0.89091927,\n                    -1.4255719,\n                    -1.0564026,\n                    -0.7504338,\n                    -0.74547905,\n                    -0.8932367,\n                    -0.9149731,\n                    -0.89604896,\n                    -0.89056855,\n                    -1.0763606,\n                    -1.2780377,\n                    -0.814832,\n                    -0.87155193,\n                    -1.1978128,\n                    -0.9658152,\n                    -0.91909224,\n                    -1.1020787,\n                    -0.9806116,\n                    -0.80837256,\n                    -1.121417,\n                    -0.959307,\n                    -0.82256985,\n                    -0.78294426,\n                    -0.8547436,\n                    -0.6911423,\n                    -0.9919269,\n                    -0.9217603,\n                    -1.1567005,\n                    -1.0467317,\n                    -1.1764008,\n                    -2.3090951,\n                    -1.9561859,\n                    -1.4364341,\n                    -0.9376694,\n                    -1.1808302,\n                    -1.6580477,\n                    -1.5421588,\n                    -1.2061565,\n                    -1.3012215,\n                    -1.1677957,\n                    -1.6640215,\n                    -0.9539052,\n                    -0.9304223,\n                    -1.4524605,\n                    -1.1548522,\n                    -3.087163,\n                    -0.9651231,\n                    -1.1932969,\n                    -1.7018605,\n                    -1.1340724,\n                    -1.1854857,\n                    -1.1199478,\n                    -1.6968377,\n                    -1.7598923,\n                    -1.5454631,\n                    -0.83821553,\n                    -1.3094827,\n                    -0.9318102,\n                    -1.0099014,\n                    -1.4364564,\n                    -1.6273265,\n                    -1.3257332,\n                    -1.6213363,\n                    -1.0319413,\n                    -1.9782693,\n                    -1.5172966,\n                    -1.8712493,\n                    -1.3883024,\n                    -1.1409416,\n                    -1.5943706,\n                    -1.3920141,\n                    -1.2470638,\n                    -1.3488907,\n                    -1.2877753,\n                    -2.3829832,\n                    -1.0140915,\n                    -0.86199796,\n                    -1.969369,\n                    -1.2389947,\n                    -1.1190457,\n                    -1.0083293,\n                    -1.5217599,\n                    -2.4827971,\n                    -3.2721095,\n                    -1.832073,\n                    -1.0300539,\n                    -1.3891287,\n                    -4.1304526,\n                    -2.2308176,\n                    -1.1684897,\n                    -1.3797816,\n                    -1.6611247,\n                    -3.9026396,\n                    -2.3507211,\n                    -2.3537116,\n                    -4.3257017,\n                    -3.9033318,\n                    -3.0079002,\n                    -2.1621022,\n                    -3.0395641,\n                    -1.1423059,\n                    -2.8617375,\n                    -3.5767941,\n                    -2.289676,\n                    -4.3020816,\n                    -2.492416,\n                    -1.7381042,\n                    -1.8634133,\n                    -2.2264454,\n                    -2.779647,\n                    -3.4513872,\n                    -1.451247,\n                    -1.6320184,\n                    -5.162463,\n                    -2.647772,\n                    -1.6107192,\n                    -1.6144214,\n                    -1.3170866,\n                    -2.3347747,\n                    -1.5668651,\n                    -1.5948402,\n                    -2.1280186,\n                    -2.3939443,\n                    -4.045837,\n                    -3.275819,\n                    -1.6477028,\n                    -1.5976466,\n                    -2.7628117,\n                    -2.4815483,\n                    -2.3328712,\n                    -3.8712041,\n                    -2.2842953,\n                    -1.4850677,\n                    -2.7312644,\n                    -2.5408404,\n                    -2.7129276,\n                    -2.680968,\n                    -1.9831715,\n                    -2.7402458,\n                    -1.7652198,\n                    -2.2835853,\n                    -1.6363691,\n                    -2.0129876,\n                    -2.5755525,\n                    -2.2679524,\n                    -2.9405003,\n                    -1.3456973,\n                    -1.4536569,\n                    -1.5227104,\n                    -6.832543,\n                    -1.6134499,\n                    -2.1005044,\n                    -1.5718675,\n                    -2.4156106,\n                    -1.9933846,\n                    -1.3972045,\n                    -1.3527149,\n                    -1.6452624,\n                    -1.8692507,\n                    -5.578517,\n                    -2.2867987,\n                    -2.0242627,\n                    -2.273815,\n                    -1.6365331,\n                    -1.7262317,\n                    -4.741056,\n                    -5.9371395,\n                    -1.6308647,\n                    -3.7583308,\n                    -3.0203693,\n                    -1.697553,\n                    -3.1482482,\n                    -1.7286578,\n                    -1.5627481,\n                    -2.9239838,\n                    -5.917072,\n                    -1.7130485,\n                    -1.6678938,\n                    -1.4424074,\n                    -1.5029242,\n                    -1.4068965,\n                    -2.7383819,\n                    -3.7690282,\n                    -1.0258008,\n                    -4.613188,\n                    -1.9295435,\n                    -2.5107205,\n                    -4.164117,\n                    -2.1205091,\n                    -2.5135174,\n                    -1.7840471,\n                    -2.2793093,\n                    -1.6349511,\n                    -1.6482064,\n                    -1.0658326,\n                    -2.3246038,\n                    -2.2873054\n                ],\n                \"pointIndex\": [\n                    0,\n                    503,\n                    229,\n                    47003035,\n                    91960546,\n                    53637672,\n                    28600765,\n                    38427429,\n                    40526672,\n                    26033828,\n                    115488365,\n                    112303677,\n                    49858696,\n                    42759117,\n                    49676271,\n                    21699959,\n                    64819169,\n                    74063572,\n                    88056669,\n                    97891273,\n                    108932556,\n                    12206832,\n                    37949326,\n                    61758732,\n                    43266547,\n                    46161538,\n                    49251921,\n                    20045725,\n                    16366498,\n                    61588018,\n                    31034449,\n                    69264521,\n                    74375881,\n                    51199646,\n                    86387221,\n                    29430627,\n                    95950547,\n                    104463550,\n                    109041831,\n                    115801990,\n                    72897011,\n                    32290592,\n                    16076766,\n                    66841640,\n                    40804401,\n                    41674410,\n                    17596219,\n                    10670356,\n                    18382075,\n                    26510078,\n                    48722314,\n                    51585602,\n                    53045444,\n                    2025613,\n                    21195801,\n                    56912908,\n                    61279871,\n                    125705476,\n                    23011045,\n                    66571601,\n                    75174384,\n                    71934978,\n                    11471,\n                    62130061,\n                    98582289,\n                    100499662,\n                    86025852,\n                    88381278,\n                    39302271,\n                    57339052,\n                    95444371,\n                    101036885,\n                    103842701,\n                    106390806,\n                    38130557,\n                    112724572,\n                    120471251,\n                    124260657,\n                    35299134,\n                    503\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 229,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": 3134929515649909125\n            },\n            {\n                \"version\": \"2.0\",\n                \"weight\": [\n                    -0.5317006,\n                    -0.5549443,\n                    -0.55005306,\n                    -0.58047175,\n                    -0.5792419,\n                    -0.5618388,\n                    -0.55156845,\n                    -0.59663844,\n                    -0.6457649,\n                    -0.61152357,\n                    -0.6713033,\n                    -0.85364294,\n                    -0.6575595,\n                    -0.6192772,\n                    -0.7174029,\n                    -0.68995684,\n                    -0.6509831,\n                    -0.6804296,\n                    -0.8823458,\n                    -0.6229239,\n                    -0.63748795,\n                    -0.6725953,\n                    -0.77007604,\n                    -0.86289036,\n                    -0.87573314,\n                    -0.9368412,\n                    -0.6730294,\n                    -0.72140056,\n                    -0.70452213,\n                    -0.85281587,\n                    -0.778473,\n                    -0.80166095,\n                    -0.8534091,\n                    -0.97973895,\n                    -0.72902834,\n                    -1.0171428,\n                    -0.7279368,\n                    -0.8837121,\n                    -0.9821047,\n                    -0.81776035,\n                    -0.71419483,\n                    -0.87666404,\n                    -0.6967305,\n                    -0.686794,\n                    -0.83897287,\n                    -1.0782491,\n                    -0.79766566,\n                    -1.0869834,\n                    -0.9266605,\n                    -0.96625125,\n                    -1.3254075,\n                    -0.990544,\n                    -0.95162743,\n                    -0.68006086,\n                    -0.94362134,\n                    -0.91446745,\n                    -0.76033175,\n                    -0.82099503,\n                    -1.98926,\n                    -0.9402286,\n                    -1.1041036,\n                    -1.4840512,\n                    -1.3193669,\n                    -1.1383134,\n                    -1.0716103,\n                    -1.353175,\n                    -1.2287694,\n                    -1.0996752,\n                    -1.3116969,\n                    -0.8533949,\n                    -0.78780127,\n                    -1.2678065,\n                    -1.3051043,\n                    -0.892428,\n                    -1.0663462,\n                    -1.2590696,\n                    -1.0059689,\n                    -1.5622548,\n                    -1.3429893,\n                    -0.9912661,\n                    -0.91426957,\n                    -0.89937264,\n                    -0.81522775,\n                    -0.8956658,\n                    -0.9297555,\n                    -1.1325147,\n                    -2.0525374,\n                    -1.4441347,\n                    -0.8413736,\n                    -1.7260334,\n                    -0.84645754,\n                    -1.1753869,\n                    -1.0894531,\n                    -1.2714784,\n                    -1.0054822,\n                    -1.9432375,\n                    -1.4703784,\n                    -1.0627242,\n                    -1.0848396,\n                    -1.6313579,\n                    -1.2841729,\n                    -2.3314505,\n                    -1.6599146,\n                    -1.5145736,\n                    -1.1292386,\n                    -1.1420798,\n                    -1.1882722,\n                    -0.7650141,\n                    -0.8330598,\n                    -1.0279738,\n                    -1.5063119,\n                    -1.0402813,\n                    -1.673066,\n                    -0.855341,\n                    -0.8344414,\n                    -0.8772748,\n                    -3.3096359,\n                    -2.1580143,\n                    -3.2406547,\n                    -1.2543756,\n                    -1.3132578,\n                    -1.7253371,\n                    -1.2502601,\n                    -1.4913716,\n                    -1.6956722,\n                    -1.6008805,\n                    -3.4310803,\n                    -4.0316906,\n                    -1.3139403,\n                    -1.2990541,\n                    -2.7985826,\n                    -3.0076113,\n                    -2.204242,\n                    -2.1400445,\n                    -1.8365418,\n                    -1.5423521,\n                    -2.2629874,\n                    -1.9485801,\n                    -2.8632088,\n                    -1.4487959,\n                    -1.7835891,\n                    -1.3249984,\n                    -0.8726291,\n                    -6.6406846,\n                    -2.2428896,\n                    -3.2130282,\n                    -1.9984459,\n                    -2.1642005,\n                    -2.2802176,\n                    -2.3029919,\n                    -1.7716421,\n                    -1.2956859,\n                    -2.674018,\n                    -1.4789615,\n                    -1.2592919,\n                    -1.651695,\n                    -2.7735283,\n                    -2.754395,\n                    -5.219823,\n                    -1.8212689,\n                    -1.5849502,\n                    -1.5373638,\n                    -2.1230855,\n                    -2.006167,\n                    -1.95454,\n                    -1.5437725,\n                    -3.7523682,\n                    -1.6756934,\n                    -1.1302894,\n                    -2.3797457,\n                    -1.5028995,\n                    -1.2654449,\n                    -3.4029913,\n                    -2.4615457,\n                    -2.1503222,\n                    -2.4433656,\n                    -3.428305,\n                    -2.0373065,\n                    -1.5127231,\n                    -1.9826941,\n                    -3.430179,\n                    -2.1248314,\n                    -1.0785834,\n                    -1.3342285,\n                    -3.162054,\n                    -1.7133318,\n                    -2.4264777,\n                    -2.7424254,\n                    -2.205474,\n                    -2.1188297,\n                    -1.0416778,\n                    -3.8446963,\n                    -3.152632,\n                    -1.5539855,\n                    -2.4188478,\n                    -1.0664158,\n                    -5.486928,\n                    -1.2196403,\n                    -1.6307973,\n                    -2.1837428,\n                    -4.4910727,\n                    -1.627812,\n                    -1.5182625,\n                    -2.5001745,\n                    -2.4735217,\n                    -4.885121,\n                    -2.1104615,\n                    -1.761913,\n                    -1.5156634,\n                    -3.1595268,\n                    -2.6720266,\n                    -2.3196728,\n                    -3.2704804,\n                    -3.718801,\n                    -1.2293067,\n                    -1.98525,\n                    -1.442857,\n                    -2.399521,\n                    -1.6830201,\n                    -1.3301995,\n                    -1.9729538,\n                    -5.5065384,\n                    -2.9774437,\n                    -3.094193,\n                    -1.7231919,\n                    -2.0231786,\n                    -1.9183023,\n                    -1.1119088,\n                    -1.0021992,\n                    -1.0839311,\n                    -1.6686866,\n                    -2.8646417\n                ],\n                \"pointIndex\": [\n                    1,\n                    502,\n                    232,\n                    30125521,\n                    104207869,\n                    11501672,\n                    117074017,\n                    7291457,\n                    63508587,\n                    23048853,\n                    117747004,\n                    110069385,\n                    49195970,\n                    41410132,\n                    1528660,\n                    90821874,\n                    10375556,\n                    24571484,\n                    119895067,\n                    37777908,\n                    102803209,\n                    122503004,\n                    84265154,\n                    16301591,\n                    74151424,\n                    83856748,\n                    24283422,\n                    107239937,\n                    52268400,\n                    4644238,\n                    60393661,\n                    23482961,\n                    65935747,\n                    70951942,\n                    73063505,\n                    80420200,\n                    28404641,\n                    118557453,\n                    30463815,\n                    125460264,\n                    121125580,\n                    34340303,\n                    86574979,\n                    37513098,\n                    24156390,\n                    48742358,\n                    77536476,\n                    42673052,\n                    118237420,\n                    113489827,\n                    46483872,\n                    40120324,\n                    21519222,\n                    51163760,\n                    22531470,\n                    53478866,\n                    55841307,\n                    57247299,\n                    60599982,\n                    71139165,\n                    113963557,\n                    64030350,\n                    111083161,\n                    2400827,\n                    10908098,\n                    71356663,\n                    105361320,\n                    90624850,\n                    124964176,\n                    72753707,\n                    92804731,\n                    113839913,\n                    97210899,\n                    66840182,\n                    103525693,\n                    111846083,\n                    108929408,\n                    116239059,\n                    69030844,\n                    125742944,\n                    27\n                ],\n                \"storeSequenceIndicesEnabled\": false,\n                \"size\": 232,\n                \"capacity\": 256,\n                \"initialAcceptFraction\": 0.125,\n                \"timeDecay\": 1.0E-4,\n                \"sequenceIndexOfMostRecentTimeDecayUpdate\": 0,\n                \"maxSequenceIndex\": 504,\n                \"compressed\": true,\n                \"randomSeed\": -6734892575936596101\n            }\n        ],\n        \"compactRandomCutTreeStates\": [\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        1035009826,\n                        250599463,\n                        603060643,\n                        754644337,\n                        891416226,\n                        782146283,\n                        378713454,\n                        1056620465,\n                        177986421,\n                        928036085,\n                        1071848943,\n                        501577126,\n                        898733163,\n                        254781898,\n                        53861341,\n                        509206381,\n                        333917169,\n                        959170671,\n                        437304017,\n                        659733574,\n                        389488302,\n                        1021681109,\n                        82354001,\n                        762926541,\n                        711943918,\n                        735524973,\n                        1005665057,\n                        320935793,\n                        1029885382,\n                        573999574,\n                        483525999,\n                        72393826,\n                        1050637815,\n                        175233779,\n                        1013619019,\n                        69921914,\n                        199951919,\n                        23503,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        52,\n                        -120,\n                        38,\n                        66,\n                        -105,\n                        91,\n                        -66,\n                        66,\n                        -119,\n                        -123,\n                        -128,\n                        66,\n                        -112,\n                        -25,\n                        76,\n                        64,\n                        -49,\n                        79,\n                        79,\n                        66,\n                        -93,\n                        20,\n                        -11,\n                        66,\n                        90,\n                        87,\n                        -118,\n                        66,\n                        -83,\n                        119,\n                        41,\n                        66,\n                        -82,\n                        -112,\n                        -109,\n                        66,\n                        68,\n                        -88,\n                        -34,\n                        66,\n                        -61,\n                        -42,\n                        101,\n                        66,\n                        113,\n                        3,\n                        -70,\n                        66,\n                        127,\n                        51,\n                        -88,\n                        66,\n                        -104,\n                        -22,\n                        -59,\n                        66,\n                        -82,\n                        -85,\n                        -15,\n                        66,\n                        123,\n                        -105,\n                        34,\n                        66,\n                        115,\n                        52,\n                        98,\n                        66,\n                        -116,\n                        -128,\n                        -82,\n                        66,\n                        -95,\n                        49,\n                        -125,\n                        65,\n                        -73,\n                        19,\n                        -122,\n                        66,\n                        -113,\n                        -121,\n                        34,\n                        66,\n                        -110,\n                        8,\n                        1,\n                        66,\n                        101,\n                        25,\n                        -6,\n                        66,\n                        -88,\n                        7,\n                        -59,\n                        66,\n                        -77,\n                        2,\n                        27,\n                        66,\n                        -109,\n                        -34,\n                        54,\n                        66,\n                        95,\n                        -7,\n                        71,\n                        66,\n                        -119,\n                        30,\n                        41,\n                        66,\n                        -74,\n                        -53,\n                        -123,\n                        66,\n                        -63,\n                        102,\n                        -15,\n                        66,\n                        84,\n                        105,\n                        70,\n                        65,\n                        4,\n                        -25,\n                        -22,\n                        66,\n                        -65,\n                        -5,\n                        -112,\n                        66,\n                        -116,\n                        -47,\n                        -119,\n                        66,\n                        -108,\n                        9,\n                        -92,\n                        66,\n                        91,\n                        -44,\n                        88,\n                        66,\n                        -65,\n                        -21,\n                        19,\n                        66,\n                        67,\n                        38,\n                        75,\n                        66,\n                        -114,\n                        -113,\n                        20,\n                        65,\n                        -38,\n                        72,\n                        72,\n                        66,\n                        -109,\n                        3,\n                        -88,\n                        66,\n                        86,\n                        -27,\n                        -95,\n                        66,\n                        -111,\n                        -60,\n                        -113,\n                        66,\n                        122,\n                        -1,\n                        -112,\n                        66,\n                        96,\n                        -8,\n                        33,\n                        66,\n                        -109,\n                        -88,\n                        -122,\n                        66,\n                        97,\n                        -14,\n                        89,\n                        66,\n                        115,\n                        44,\n                        65,\n                        66,\n                        -122,\n                        -114,\n                        -71,\n                        66,\n                        76,\n                        -51,\n                        3,\n                        66,\n                        -78,\n                        -88,\n                        -93,\n                        66,\n                        90,\n                        31,\n                        -105,\n                        66,\n                        -104,\n                        -74,\n                        37,\n                        66,\n                        -115,\n                        -96,\n                        73,\n                        66,\n                        -113,\n                        111,\n                        24,\n                        66,\n                        -126,\n                        -58,\n                        61,\n                        66,\n                        117,\n                        -80,\n                        27,\n                        65,\n                        -38,\n                        125,\n                        52,\n                        66,\n                        -99,\n                        117,\n                        47,\n                        66,\n                        81,\n                        49,\n                        -33,\n                        66,\n                        93,\n                        -100,\n                        75,\n                        66,\n                        110,\n                        62,\n                        -54,\n                        66,\n                        82,\n                        -51,\n                        91,\n                        66,\n                        -71,\n                        -93,\n                        -22,\n                        66,\n                        -64,\n                        51,\n                        0,\n                        66,\n                        75,\n                        78,\n                        -62,\n                        66,\n                        -73,\n                        64,\n                        84,\n                        66,\n                        -95,\n                        -53,\n                        -83,\n                        66,\n                        -92,\n                        53,\n                        63,\n                        66,\n                        -65,\n                        -15,\n                        103,\n                        66,\n                        -76,\n                        -88,\n                        -113,\n                        66,\n                        -76,\n                        -128,\n                        -28,\n                        66,\n                        114,\n                        113,\n                        117,\n                        66,\n                        74,\n                        68,\n                        88,\n                        66,\n                        -111,\n                        -73,\n                        -124,\n                        66,\n                        72,\n                        -3,\n                        -80,\n                        66,\n                        -98,\n                        -117,\n                        86,\n                        66,\n                        -64,\n                        80,\n                        60,\n                        66,\n                        -95,\n                        50,\n                        121,\n                        66,\n                        -61,\n                        -75,\n                        -94,\n                        66,\n                        -78,\n                        -20,\n                        96,\n                        66,\n                        83,\n                        -82,\n                        -84,\n                        66,\n                        -70,\n                        12,\n                        -59,\n                        66,\n                        81,\n                        -3,\n                        56,\n                        66,\n                        -112,\n                        22,\n                        94,\n                        66,\n                        -64,\n                        100,\n                        -27,\n                        66,\n                        -83,\n                        -17,\n                        -68,\n                        66,\n                        116,\n                        1,\n                        44,\n                        65,\n                        -97,\n                        93,\n                        -34,\n                        66,\n                        -113,\n                        -63,\n                        -95,\n                        66,\n                        -116,\n                        71,\n                        -23,\n                        66,\n                        105,\n                        -119,\n                        -108,\n                        66,\n                        78,\n                        -52,\n                        122,\n                        66,\n                        75,\n                        89,\n                        69,\n                        66,\n                        -106,\n                        88,\n                        54,\n                        66,\n                        88,\n                        81,\n                        82,\n                        65,\n                        -15,\n                        9,\n                        -44,\n                        65,\n                        -2,\n                        -118,\n                        -32,\n                        66,\n                        -62,\n                        71,\n                        -79,\n                        66,\n                        -64,\n                        84,\n                        99,\n                        66,\n                        -62,\n                        -72,\n                        -101,\n                        66,\n                        -115,\n                        118,\n                        69,\n                        66,\n                        78,\n                        93,\n                        -70,\n                        66,\n                        80,\n                        69,\n                        60,\n                        66,\n                        -76,\n                        49,\n                        22,\n                        66,\n                        84,\n                        -25,\n                        48,\n                        66,\n                        -63,\n                        104,\n                        -70,\n                        64,\n                        -15,\n                        116,\n                        -28,\n                        66,\n                        -117,\n                        21,\n                        34,\n                        66,\n                        -60,\n                        -12,\n                        -7,\n                        66,\n                        -59,\n                        -122,\n                        96,\n                        66,\n                        -119,\n                        111,\n                        81,\n                        66,\n                        51,\n                        106,\n                        -48,\n                        66,\n                        -113,\n                        -1,\n                        19,\n                        66,\n                        -74,\n                        -13,\n                        -8,\n                        66,\n                        -63,\n                        7,\n                        -32,\n                        66,\n                        104,\n                        85,\n                        83,\n                        66,\n                        -110,\n                        -25,\n                        12,\n                        66,\n                        -126,\n                        4,\n                        25,\n                        66,\n                        86,\n                        121,\n                        109,\n                        66,\n                        -92,\n                        47,\n                        -114,\n                        66,\n                        -112,\n                        -55,\n                        4,\n                        66,\n                        127,\n                        59,\n                        -65,\n                        66,\n                        -62,\n                        18,\n                        80,\n                        66,\n                        76,\n                        -59,\n                        108,\n                        66,\n                        71,\n                        -104,\n                        95,\n                        66,\n                        -95,\n                        -67,\n                        97,\n                        66,\n                        -93,\n                        -126,\n                        -59,\n                        66,\n                        70,\n                        -2,\n                        -29,\n                        66,\n                        105,\n                        -76,\n                        11,\n                        66,\n                        -68,\n                        -1,\n                        108,\n                        66,\n                        -72,\n                        62,\n                        11,\n                        66,\n                        -103,\n                        -56,\n                        -72,\n                        66,\n                        -80,\n                        67,\n                        -43,\n                        66,\n                        99,\n                        -36,\n                        44,\n                        66,\n                        -118,\n                        120,\n                        -3,\n                        66,\n                        -85,\n                        -97,\n                        39,\n                        66,\n                        -79,\n                        120,\n                        -98,\n                        66,\n                        -99,\n                        8,\n                        16,\n                        66,\n                        -71,\n                        -124,\n                        -94,\n                        66,\n                        -106,\n                        19,\n                        -75,\n                        66,\n                        -71,\n                        24,\n                        -39,\n                        66,\n                        79,\n                        43,\n                        22,\n                        66,\n                        -61,\n                        -84,\n                        109,\n                        66,\n                        -88,\n                        -46,\n                        -86,\n                        66,\n                        72,\n                        31,\n                        61,\n                        66,\n                        -117,\n                        23,\n                        -121,\n                        66,\n                        -78,\n                        -66,\n                        1,\n                        66,\n                        -78,\n                        -65,\n                        16,\n                        66,\n                        -128,\n                        -87,\n                        127,\n                        66,\n                        -117,\n                        -128,\n                        6,\n                        66,\n                        71,\n                        36,\n                        67,\n                        66,\n                        -62,\n                        -42,\n                        -51,\n                        66,\n                        -71,\n                        -104,\n                        105,\n                        66,\n                        -112,\n                        106,\n                        -34,\n                        66,\n                        -113,\n                        63,\n                        -111,\n                        66,\n                        -120,\n                        -69,\n                        32,\n                        66,\n                        -124,\n                        -64,\n                        122,\n                        66,\n                        -60,\n                        -43,\n                        -95,\n                        66,\n                        -65,\n                        -117,\n                        -34,\n                        66,\n                        120,\n                        -80,\n                        62,\n                        66,\n                        -101,\n                        37,\n                        105,\n                        66,\n                        -103,\n                        77,\n                        -81,\n                        66,\n                        79,\n                        -72,\n                        -70,\n                        66,\n                        -113,\n                        111,\n                        -111,\n                        66,\n                        -66,\n                        -128,\n                        -1,\n                        66,\n                        -60,\n                        -67,\n                        59,\n                        66,\n                        -90,\n                        126,\n                        -15,\n                        66,\n                        -62,\n                        -91,\n                        109,\n                        66,\n                        -76,\n                        -125,\n                        80,\n                        66,\n                        73,\n                        -106,\n                        113,\n                        66,\n                        -96,\n                        -79,\n                        106,\n                        66,\n                        -73,\n                        -29,\n                        -110,\n                        66,\n                        -63,\n                        -61,\n                        63,\n                        66,\n                        -61,\n                        7,\n                        -61,\n                        66,\n                        -66,\n                        94,\n                        -105,\n                        66,\n                        -65,\n                        118,\n                        -110,\n                        66,\n                        -112,\n                        43,\n                        -74,\n                        66,\n                        84,\n                        84,\n                        -96,\n                        66,\n                        -101,\n                        -54,\n                        -110,\n                        66,\n                        88,\n                        70,\n                        -55,\n                        66,\n                        71,\n                        -112,\n                        54,\n                        66,\n                        -103,\n                        -71,\n                        -87,\n                        64,\n                        -34,\n                        -46,\n                        44,\n                        66,\n                        -114,\n                        -1,\n                        115,\n                        66,\n                        -69,\n                        42,\n                        85,\n                        66,\n                        -77,\n                        -11,\n                        -128,\n                        66,\n                        78,\n                        20,\n                        -12,\n                        66,\n                        -97,\n                        18,\n                        -41,\n                        66,\n                        -98,\n                        24,\n                        63,\n                        66,\n                        -99,\n                        -108,\n                        59,\n                        66,\n                        -69,\n                        -109,\n                        39,\n                        66,\n                        123,\n                        20,\n                        44,\n                        66,\n                        71,\n                        107,\n                        -23,\n                        66,\n                        -102,\n                        -54,\n                        -55,\n                        66,\n                        -59,\n                        -1,\n                        59,\n                        66,\n                        -106,\n                        117,\n                        62,\n                        66,\n                        89,\n                        -75,\n                        55,\n                        66,\n                        118,\n                        -44,\n                        66,\n                        66,\n                        82,\n                        -77,\n                        110,\n                        66,\n                        -64,\n                        46,\n                        -10,\n                        66,\n                        103,\n                        -84,\n                        -54,\n                        66,\n                        89,\n                        67,\n                        -103,\n                        66,\n                        -92,\n                        -70,\n                        -61,\n                        66,\n                        80,\n                        -87,\n                        -57,\n                        66,\n                        -66,\n                        -49,\n                        -88,\n                        66,\n                        -78,\n                        -121,\n                        -13,\n                        66,\n                        -108,\n                        -9,\n                        88,\n                        66,\n                        -64,\n                        -81,\n                        -11,\n                        66,\n                        -67,\n                        26,\n                        6,\n                        66,\n                        -67,\n                        34,\n                        -62,\n                        66,\n                        77,\n                        -43,\n                        122,\n                        66,\n                        -128,\n                        8,\n                        28,\n                        66,\n                        -113,\n                        1,\n                        70,\n                        66,\n                        -70,\n                        67,\n                        48,\n                        66,\n                        -61,\n                        66,\n                        125,\n                        66,\n                        85,\n                        -105,\n                        -82,\n                        66,\n                        -90,\n                        65,\n                        44,\n                        66,\n                        -108,\n                        -28,\n                        87,\n                        66,\n                        -66,\n                        55,\n                        -74,\n                        66,\n                        -58,\n                        39,\n                        107,\n                        66,\n                        -102,\n                        79,\n                        -124,\n                        66,\n                        86,\n                        -119,\n                        -25,\n                        66,\n                        -69,\n                        37,\n                        -54,\n                        66,\n                        -71,\n                        5,\n                        91,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 225,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162254824,\n                        712052113,\n                        754938941,\n                        595716196,\n                        726452833,\n                        1028151670,\n                        729943241,\n                        644983369,\n                        983697169,\n                        1017110264,\n                        725394985,\n                        23845990,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        731785498,\n                        1098282959,\n                        1013783822,\n                        1145606648,\n                        582793942,\n                        625851574,\n                        731233652,\n                        644972300,\n                        1098282118,\n                        597311243,\n                        1099823740,\n                        21523369,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 29,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 29,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -4554398593646845892,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        120031194,\n                        258812407,\n                        464848054,\n                        333552074,\n                        753138641,\n                        900978230,\n                        595176269,\n                        364882913,\n                        899482826,\n                        514410871,\n                        376147622,\n                        200264779,\n                        497241677,\n                        992958069,\n                        536472545,\n                        719968494,\n                        378748489,\n                        448611453,\n                        757247405,\n                        504404769,\n                        249024510,\n                        902268867,\n                        849593554,\n                        304122367,\n                        188320945,\n                        305055057,\n                        309976559,\n                        913659554,\n                        634459733,\n                        454921387,\n                        737840465,\n                        708523681,\n                        637470893,\n                        623547595,\n                        458720634,\n                        644593638,\n                        709922266,\n                        1022,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        18,\n                        9,\n                        -30,\n                        68,\n                        -53,\n                        97,\n                        81,\n                        66,\n                        -128,\n                        -75,\n                        -115,\n                        66,\n                        105,\n                        -14,\n                        88,\n                        69,\n                        4,\n                        108,\n                        -55,\n                        66,\n                        9,\n                        -56,\n                        46,\n                        66,\n                        49,\n                        97,\n                        -113,\n                        66,\n                        94,\n                        -27,\n                        115,\n                        66,\n                        -73,\n                        111,\n                        53,\n                        66,\n                        -99,\n                        115,\n                        -29,\n                        66,\n                        -102,\n                        89,\n                        120,\n                        66,\n                        118,\n                        -22,\n                        15,\n                        66,\n                        -60,\n                        -63,\n                        -58,\n                        66,\n                        -124,\n                        -30,\n                        42,\n                        66,\n                        126,\n                        -62,\n                        106,\n                        66,\n                        -87,\n                        -3,\n                        -127,\n                        66,\n                        114,\n                        -96,\n                        -70,\n                        66,\n                        -122,\n                        -12,\n                        -71,\n                        66,\n                        -75,\n                        100,\n                        6,\n                        66,\n                        -100,\n                        -88,\n                        -63,\n                        66,\n                        -76,\n                        117,\n                        5,\n                        66,\n                        77,\n                        83,\n                        64,\n                        66,\n                        -68,\n                        -78,\n                        4,\n                        66,\n                        -126,\n                        -56,\n                        79,\n                        66,\n                        -93,\n                        45,\n                        -50,\n                        66,\n                        -71,\n                        75,\n                        29,\n                        66,\n                        -80,\n                        23,\n                        -71,\n                        66,\n                        17,\n                        -39,\n                        -54,\n                        66,\n                        -61,\n                        -19,\n                        -116,\n                        66,\n                        -67,\n                        39,\n                        -45,\n                        66,\n                        -73,\n                        -51,\n                        36,\n                        66,\n                        -112,\n                        -57,\n                        -127,\n                        66,\n                        -119,\n                        100,\n                        81,\n                        66,\n                        76,\n                        23,\n                        -49,\n                        66,\n                        70,\n                        -100,\n                        27,\n                        66,\n                        -79,\n                        -17,\n                        38,\n                        66,\n                        -115,\n                        -45,\n                        -83,\n                        66,\n                        -87,\n                        72,\n                        45,\n                        66,\n                        -61,\n                        104,\n                        -89,\n                        66,\n                        122,\n                        0,\n                        -93,\n                        66,\n                        74,\n                        36,\n                        -66,\n                        66,\n                        -114,\n                        57,\n                        21,\n                        66,\n                        -96,\n                        112,\n                        62,\n                        66,\n                        95,\n                        120,\n                        -104,\n                        66,\n                        -72,\n                        -53,\n                        -112,\n                        66,\n                        95,\n                        22,\n                        -127,\n                        66,\n                        96,\n                        102,\n                        -77,\n                        66,\n                        -78,\n                        -76,\n                        80,\n                        66,\n                        -61,\n                        72,\n                        48,\n                        66,\n                        -79,\n                        28,\n                        37,\n                        66,\n                        -102,\n                        -112,\n                        52,\n                        66,\n                        -68,\n                        -10,\n                        -114,\n                        66,\n                        -109,\n                        -12,\n                        70,\n                        66,\n                        -83,\n                        -12,\n                        -13,\n                        66,\n                        70,\n                        -9,\n                        -42,\n                        66,\n                        126,\n                        -66,\n                        52,\n                        66,\n                        -84,\n                        -79,\n                        49,\n                        66,\n                        -76,\n                        20,\n                        60,\n                        66,\n                        -77,\n                        114,\n                        51,\n                        65,\n                        -51,\n                        -43,\n                        -90,\n                        66,\n                        -70,\n                        41,\n                        51,\n                        66,\n                        -93,\n                        67,\n                        -16,\n                        66,\n                        115,\n                        -11,\n                        -17,\n                        66,\n                        78,\n                        -46,\n                        -34,\n                        66,\n                        -88,\n                        -69,\n                        -103,\n                        66,\n                        68,\n                        75,\n                        -82,\n                        66,\n                        101,\n                        104,\n                        -73,\n                        66,\n                        -63,\n                        113,\n                        0,\n                        66,\n                        119,\n                        125,\n                        -29,\n                        65,\n                        -43,\n                        -85,\n                        46,\n                        66,\n                        -59,\n                        116,\n                        66,\n                        66,\n                        -99,\n                        41,\n                        -87,\n                        66,\n                        -120,\n                        -36,\n                        -119,\n                        66,\n                        -82,\n                        58,\n                        99,\n                        66,\n                        96,\n                        83,\n                        97,\n                        66,\n                        -81,\n                        -71,\n                        17,\n                        66,\n                        -83,\n                        99,\n                        65,\n                        66,\n                        -64,\n                        -78,\n                        9,\n                        66,\n                        -94,\n                        11,\n                        -80,\n                        66,\n                        73,\n                        44,\n                        -111,\n                        66,\n                        -109,\n                        58,\n                        52,\n                        66,\n                        -66,\n                        -62,\n                        -119,\n                        66,\n                        -81,\n                        101,\n                        5,\n                        66,\n                        -97,\n                        -12,\n                        -43,\n                        66,\n                        -107,\n                        20,\n                        -13,\n                        66,\n                        82,\n                        -102,\n                        22,\n                        66,\n                        -68,\n                        -53,\n                        102,\n                        66,\n                        94,\n                        39,\n                        28,\n                        66,\n                        107,\n                        -47,\n                        31,\n                        66,\n                        72,\n                        -93,\n                        89,\n                        66,\n                        -72,\n                        91,\n                        -62,\n                        66,\n                        103,\n                        -74,\n                        53,\n                        66,\n                        -75,\n                        86,\n                        -126,\n                        66,\n                        113,\n                        -103,\n                        -50,\n                        66,\n                        -58,\n                        64,\n                        -36,\n                        66,\n                        -114,\n                        -58,\n                        77,\n                        66,\n                        -87,\n                        -15,\n                        -119,\n                        66,\n                        -60,\n                        -79,\n                        50,\n                        66,\n                        71,\n                        38,\n                        -15,\n                        66,\n                        -69,\n                        -26,\n                        65,\n                        66,\n                        -107,\n                        -54,\n                        69,\n                        66,\n                        81,\n                        66,\n                        23,\n                        66,\n                        -113,\n                        -114,\n                        24,\n                        66,\n                        72,\n                        -5,\n                        3,\n                        66,\n                        -96,\n                        114,\n                        -63,\n                        66,\n                        -60,\n                        -121,\n                        -41,\n                        66,\n                        69,\n                        -109,\n                        -21,\n                        66,\n                        -118,\n                        50,\n                        -94,\n                        66,\n                        -102,\n                        -120,\n                        -56,\n                        66,\n                        -104,\n                        -10,\n                        -95,\n                        66,\n                        -110,\n                        106,\n                        -125,\n                        66,\n                        -100,\n                        68,\n                        44,\n                        66,\n                        -65,\n                        42,\n                        16,\n                        66,\n                        -74,\n                        11,\n                        -76,\n                        66,\n                        -108,\n                        -11,\n                        -128,\n                        66,\n                        -106,\n                        42,\n                        -48,\n                        66,\n                        -83,\n                        -49,\n                        -4,\n                        66,\n                        -97,\n                        -120,\n                        3,\n                        66,\n                        -93,\n                        -52,\n                        57,\n                        66,\n                        80,\n                        35,\n                        1,\n                        66,\n                        -70,\n                        -94,\n                        -71,\n                        66,\n                        83,\n                        -104,\n                        -20,\n                        66,\n                        88,\n                        -112,\n                        -57,\n                        66,\n                        75,\n                        -113,\n                        56,\n                        66,\n                        -112,\n                        47,\n                        20,\n                        66,\n                        -125,\n                        45,\n                        -116,\n                        66,\n                        77,\n                        37,\n                        -9,\n                        66,\n                        -82,\n                        55,\n                        32,\n                        66,\n                        -125,\n                        94,\n                        86,\n                        66,\n                        80,\n                        -31,\n                        -76,\n                        63,\n                        -89,\n                        29,\n                        -121,\n                        66,\n                        -66,\n                        -81,\n                        -71,\n                        66,\n                        -68,\n                        36,\n                        30,\n                        66,\n                        -60,\n                        -25,\n                        25,\n                        66,\n                        -102,\n                        -120,\n                        111,\n                        66,\n                        70,\n                        79,\n                        85,\n                        66,\n                        -68,\n                        95,\n                        -68,\n                        66,\n                        -99,\n                        28,\n                        115,\n                        66,\n                        83,\n                        -78,\n                        106,\n                        66,\n                        122,\n                        13,\n                        -37,\n                        66,\n                        -62,\n                        -7,\n                        109,\n                        66,\n                        -89,\n                        -79,\n                        126,\n                        66,\n                        -80,\n                        -109,\n                        -31,\n                        66,\n                        -101,\n                        -90,\n                        13,\n                        66,\n                        -96,\n                        -117,\n                        87,\n                        66,\n                        -112,\n                        125,\n                        100,\n                        66,\n                        83,\n                        127,\n                        100,\n                        66,\n                        71,\n                        94,\n                        94,\n                        66,\n                        99,\n                        -55,\n                        43,\n                        66,\n                        -113,\n                        -21,\n                        50,\n                        66,\n                        -128,\n                        125,\n                        67,\n                        66,\n                        -82,\n                        56,\n                        86,\n                        66,\n                        -105,\n                        35,\n                        -38,\n                        66,\n                        -103,\n                        -34,\n                        93,\n                        66,\n                        -70,\n                        30,\n                        76,\n                        66,\n                        -101,\n                        -104,\n                        47,\n                        66,\n                        69,\n                        -122,\n                        -20,\n                        66,\n                        112,\n                        85,\n                        -50,\n                        66,\n                        90,\n                        -15,\n                        -41,\n                        66,\n                        94,\n                        -91,\n                        124,\n                        66,\n                        -128,\n                        14,\n                        -107,\n                        66,\n                        -105,\n                        -24,\n                        -7,\n                        66,\n                        -60,\n                        -115,\n                        -6,\n                        66,\n                        -114,\n                        -14,\n                        -44,\n                        66,\n                        -111,\n                        92,\n                        -47,\n                        66,\n                        -81,\n                        28,\n                        97,\n                        66,\n                        95,\n                        -71,\n                        55,\n                        66,\n                        76,\n                        88,\n                        100,\n                        66,\n                        -116,\n                        -126,\n                        -79,\n                        66,\n                        -86,\n                        79,\n                        -113,\n                        66,\n                        -101,\n                        -6,\n                        -51,\n                        66,\n                        -69,\n                        72,\n                        28,\n                        66,\n                        -100,\n                        122,\n                        -2,\n                        66,\n                        -78,\n                        33,\n                        -42,\n                        66,\n                        88,\n                        9,\n                        11,\n                        66,\n                        -108,\n                        -51,\n                        48,\n                        66,\n                        107,\n                        121,\n                        -18,\n                        66,\n                        85,\n                        5,\n                        -18,\n                        66,\n                        -103,\n                        -98,\n                        65,\n                        66,\n                        -112,\n                        81,\n                        -63,\n                        66,\n                        -105,\n                        93,\n                        -78,\n                        66,\n                        -63,\n                        58,\n                        -70,\n                        66,\n                        87,\n                        -20,\n                        59,\n                        66,\n                        -98,\n                        -110,\n                        48,\n                        66,\n                        75,\n                        53,\n                        20,\n                        66,\n                        -102,\n                        18,\n                        -113,\n                        66,\n                        -108,\n                        92,\n                        91,\n                        66,\n                        -105,\n                        -117,\n                        -11,\n                        66,\n                        -106,\n                        -126,\n                        -105,\n                        66,\n                        -82,\n                        -125,\n                        98,\n                        66,\n                        101,\n                        -103,\n                        -123,\n                        66,\n                        -108,\n                        -95,\n                        121,\n                        66,\n                        -98,\n                        27,\n                        -125,\n                        66,\n                        -105,\n                        -39,\n                        -79,\n                        66,\n                        -59,\n                        -24,\n                        -38,\n                        66,\n                        -70,\n                        -37,\n                        -125,\n                        66,\n                        100,\n                        49,\n                        57,\n                        66,\n                        -71,\n                        -54,\n                        -53,\n                        66,\n                        -127,\n                        111,\n                        -72,\n                        66,\n                        -60,\n                        -7,\n                        127,\n                        66,\n                        -108,\n                        -89,\n                        80,\n                        66,\n                        -105,\n                        27,\n                        -65,\n                        66,\n                        -62,\n                        -4,\n                        -89,\n                        66,\n                        -70,\n                        -98,\n                        -64,\n                        66,\n                        -73,\n                        -30,\n                        50,\n                        66,\n                        109,\n                        -98,\n                        100,\n                        66,\n                        -113,\n                        17,\n                        68,\n                        66,\n                        73,\n                        -120,\n                        26,\n                        66,\n                        -100,\n                        18,\n                        25,\n                        66,\n                        -97,\n                        5,\n                        -35,\n                        66,\n                        -62,\n                        -4,\n                        -102,\n                        66,\n                        91,\n                        -111,\n                        -117,\n                        66,\n                        -115,\n                        74,\n                        36,\n                        66,\n                        79,\n                        27,\n                        65,\n                        66,\n                        -73,\n                        -125,\n                        -122,\n                        66,\n                        79,\n                        -61,\n                        99,\n                        66,\n                        -66,\n                        72,\n                        -89,\n                        66,\n                        -60,\n                        -119,\n                        95,\n                        66,\n                        75,\n                        72,\n                        104,\n                        66,\n                        -125,\n                        18,\n                        -8,\n                        66,\n                        -98,\n                        -116,\n                        -82,\n                        66,\n                        -112,\n                        102,\n                        -6,\n                        66,\n                        -66,\n                        -36,\n                        -66,\n                        66,\n                        68,\n                        -6,\n                        6,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 224,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1028080268,\n                        1117081687,\n                        975190958,\n                        716904877,\n                        774831176,\n                        639322549,\n                        638555764,\n                        595666588,\n                        969288592,\n                        1012189408,\n                        989809142,\n                        7201519,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1027607764,\n                        1161729044,\n                        1032527560,\n                        1098284182,\n                        645160292,\n                        1012395092,\n                        716716960,\n                        726772144,\n                        1114345717,\n                        625838153,\n                        588041896,\n                        8827861,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 30,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 30,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 2247190326101601255,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        668518854,\n                        401940207,\n                        640755434,\n                        937474889,\n                        261731785,\n                        975152181,\n                        514923243,\n                        334452697,\n                        653561195,\n                        708155091,\n                        525945414,\n                        573290346,\n                        782200699,\n                        933399350,\n                        438040617,\n                        1066984925,\n                        1071890361,\n                        1029289941,\n                        173397579,\n                        192211567,\n                        94032977,\n                        313317207,\n                        359728693,\n                        866854081,\n                        447974187,\n                        842047015,\n                        191737714,\n                        644083570,\n                        1002012399,\n                        899747637,\n                        730906451,\n                        189980403,\n                        258315510,\n                        738027979,\n                        93416667,\n                        376759465,\n                        1010,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -76,\n                        97,\n                        71,\n                        68,\n                        -52,\n                        -40,\n                        92,\n                        69,\n                        15,\n                        -33,\n                        86,\n                        66,\n                        -100,\n                        -128,\n                        -32,\n                        66,\n                        -95,\n                        46,\n                        -85,\n                        66,\n                        74,\n                        27,\n                        -34,\n                        66,\n                        101,\n                        90,\n                        120,\n                        66,\n                        116,\n                        110,\n                        119,\n                        66,\n                        102,\n                        118,\n                        -25,\n                        66,\n                        -90,\n                        4,\n                        -87,\n                        65,\n                        67,\n                        8,\n                        -51,\n                        66,\n                        106,\n                        -59,\n                        -118,\n                        66,\n                        -64,\n                        50,\n                        94,\n                        66,\n                        81,\n                        125,\n                        68,\n                        66,\n                        -104,\n                        64,\n                        114,\n                        66,\n                        -84,\n                        6,\n                        -74,\n                        64,\n                        1,\n                        43,\n                        23,\n                        66,\n                        114,\n                        4,\n                        4,\n                        66,\n                        -103,\n                        47,\n                        84,\n                        66,\n                        -66,\n                        38,\n                        -63,\n                        66,\n                        -68,\n                        -32,\n                        80,\n                        66,\n                        54,\n                        -121,\n                        124,\n                        66,\n                        -66,\n                        109,\n                        82,\n                        66,\n                        78,\n                        -125,\n                        46,\n                        66,\n                        -96,\n                        -74,\n                        62,\n                        66,\n                        -72,\n                        -57,\n                        -45,\n                        66,\n                        -109,\n                        116,\n                        -83,\n                        66,\n                        54,\n                        93,\n                        -27,\n                        66,\n                        -108,\n                        -115,\n                        -89,\n                        66,\n                        110,\n                        105,\n                        12,\n                        66,\n                        -104,\n                        124,\n                        102,\n                        66,\n                        -97,\n                        97,\n                        -116,\n                        66,\n                        -108,\n                        104,\n                        -18,\n                        66,\n                        -128,\n                        15,\n                        -103,\n                        66,\n                        -121,\n                        93,\n                        38,\n                        66,\n                        122,\n                        -34,\n                        -61,\n                        66,\n                        77,\n                        77,\n                        107,\n                        66,\n                        87,\n                        -114,\n                        -7,\n                        66,\n                        -96,\n                        24,\n                        28,\n                        66,\n                        -82,\n                        -17,\n                        66,\n                        66,\n                        100,\n                        -79,\n                        -24,\n                        66,\n                        83,\n                        -93,\n                        34,\n                        66,\n                        -113,\n                        27,\n                        -37,\n                        66,\n                        -61,\n                        67,\n                        0,\n                        66,\n                        -103,\n                        62,\n                        17,\n                        66,\n                        -61,\n                        -37,\n                        90,\n                        66,\n                        -69,\n                        109,\n                        73,\n                        66,\n                        -103,\n                        -12,\n                        -24,\n                        66,\n                        23,\n                        44,\n                        32,\n                        66,\n                        79,\n                        -79,\n                        26,\n                        66,\n                        97,\n                        114,\n                        7,\n                        66,\n                        -95,\n                        -128,\n                        -39,\n                        66,\n                        95,\n                        -75,\n                        -94,\n                        66,\n                        119,\n                        -16,\n                        -38,\n                        66,\n                        -127,\n                        -13,\n                        18,\n                        66,\n                        -64,\n                        -80,\n                        -2,\n                        66,\n                        -106,\n                        118,\n                        -65,\n                        66,\n                        119,\n                        4,\n                        9,\n                        66,\n                        -123,\n                        -34,\n                        97,\n                        66,\n                        -119,\n                        40,\n                        103,\n                        66,\n                        -74,\n                        74,\n                        -98,\n                        66,\n                        -76,\n                        127,\n                        86,\n                        66,\n                        -69,\n                        44,\n                        9,\n                        66,\n                        -75,\n                        85,\n                        59,\n                        66,\n                        -98,\n                        -29,\n                        -1,\n                        66,\n                        110,\n                        46,\n                        17,\n                        66,\n                        -64,\n                        11,\n                        27,\n                        66,\n                        106,\n                        59,\n                        -41,\n                        66,\n                        -108,\n                        86,\n                        -11,\n                        66,\n                        80,\n                        80,\n                        17,\n                        66,\n                        -75,\n                        120,\n                        48,\n                        66,\n                        -111,\n                        -72,\n                        52,\n                        66,\n                        79,\n                        12,\n                        -109,\n                        66,\n                        108,\n                        118,\n                        30,\n                        66,\n                        85,\n                        -13,\n                        -12,\n                        66,\n                        -64,\n                        77,\n                        -58,\n                        66,\n                        -118,\n                        -38,\n                        -84,\n                        66,\n                        71,\n                        33,\n                        -27,\n                        66,\n                        -72,\n                        -30,\n                        -87,\n                        66,\n                        -89,\n                        79,\n                        -39,\n                        66,\n                        -81,\n                        -69,\n                        -47,\n                        66,\n                        -120,\n                        -127,\n                        -89,\n                        66,\n                        -83,\n                        -100,\n                        -52,\n                        66,\n                        95,\n                        -115,\n                        -36,\n                        66,\n                        -101,\n                        84,\n                        -122,\n                        66,\n                        -108,\n                        67,\n                        100,\n                        66,\n                        -92,\n                        -68,\n                        51,\n                        66,\n                        103,\n                        -3,\n                        110,\n                        66,\n                        -127,\n                        -67,\n                        104,\n                        66,\n                        -123,\n                        -100,\n                        -69,\n                        66,\n                        -123,\n                        123,\n                        -84,\n                        66,\n                        -83,\n                        4,\n                        28,\n                        66,\n                        -107,\n                        44,\n                        -71,\n                        66,\n                        -116,\n                        -60,\n                        5,\n                        66,\n                        -113,\n                        119,\n                        87,\n                        66,\n                        102,\n                        91,\n                        -9,\n                        66,\n                        -95,\n                        8,\n                        -96,\n                        66,\n                        -114,\n                        46,\n                        2,\n                        66,\n                        113,\n                        56,\n                        -125,\n                        66,\n                        88,\n                        3,\n                        49,\n                        66,\n                        -76,\n                        -35,\n                        16,\n                        66,\n                        91,\n                        -41,\n                        -19,\n                        66,\n                        -118,\n                        17,\n                        -41,\n                        66,\n                        -67,\n                        43,\n                        -68,\n                        66,\n                        -112,\n                        -128,\n                        -13,\n                        66,\n                        75,\n                        -62,\n                        4,\n                        66,\n                        -109,\n                        -3,\n                        -74,\n                        66,\n                        -92,\n                        -8,\n                        -123,\n                        66,\n                        -126,\n                        7,\n                        -79,\n                        66,\n                        -83,\n                        -9,\n                        -85,\n                        66,\n                        -103,\n                        22,\n                        105,\n                        66,\n                        103,\n                        -26,\n                        22,\n                        66,\n                        -117,\n                        117,\n                        118,\n                        66,\n                        -97,\n                        -41,\n                        -127,\n                        66,\n                        75,\n                        -70,\n                        -74,\n                        66,\n                        86,\n                        -31,\n                        97,\n                        66,\n                        -67,\n                        -84,\n                        85,\n                        66,\n                        -107,\n                        -105,\n                        94,\n                        66,\n                        92,\n                        -48,\n                        28,\n                        66,\n                        -121,\n                        9,\n                        -78,\n                        66,\n                        -126,\n                        91,\n                        63,\n                        66,\n                        -65,\n                        -122,\n                        -68,\n                        66,\n                        -115,\n                        -95,\n                        -73,\n                        66,\n                        -113,\n                        48,\n                        -93,\n                        66,\n                        -100,\n                        -78,\n                        101,\n                        66,\n                        -68,\n                        116,\n                        -46,\n                        66,\n                        77,\n                        -41,\n                        -58,\n                        66,\n                        -81,\n                        -51,\n                        44,\n                        66,\n                        -125,\n                        -31,\n                        -81,\n                        66,\n                        -91,\n                        84,\n                        -99,\n                        66,\n                        -77,\n                        -108,\n                        -34,\n                        66,\n                        -110,\n                        19,\n                        81,\n                        66,\n                        -98,\n                        -123,\n                        104,\n                        66,\n                        -111,\n                        119,\n                        44,\n                        66,\n                        -108,\n                        -87,\n                        3,\n                        66,\n                        -66,\n                        124,\n                        -116,\n                        66,\n                        80,\n                        48,\n                        28,\n                        66,\n                        -67,\n                        88,\n                        0,\n                        66,\n                        -105,\n                        100,\n                        46,\n                        66,\n                        -68,\n                        119,\n                        57,\n                        66,\n                        -105,\n                        -113,\n                        -67,\n                        66,\n                        -62,\n                        109,\n                        50,\n                        66,\n                        -85,\n                        61,\n                        87,\n                        66,\n                        -118,\n                        78,\n                        -76,\n                        66,\n                        70,\n                        -110,\n                        23,\n                        66,\n                        -103,\n                        105,\n                        20,\n                        66,\n                        -62,\n                        -114,\n                        28,\n                        66,\n                        102,\n                        -20,\n                        127,\n                        66,\n                        102,\n                        -120,\n                        -124,\n                        66,\n                        -107,\n                        109,\n                        -120,\n                        66,\n                        80,\n                        4,\n                        39,\n                        66,\n                        -115,\n                        -110,\n                        -59,\n                        66,\n                        100,\n                        5,\n                        -3,\n                        66,\n                        -94,\n                        -123,\n                        -72,\n                        66,\n                        74,\n                        31,\n                        50,\n                        66,\n                        -121,\n                        -35,\n                        117,\n                        66,\n                        -67,\n                        118,\n                        -112,\n                        66,\n                        79,\n                        -119,\n                        -56,\n                        66,\n                        96,\n                        64,\n                        -117,\n                        66,\n                        74,\n                        124,\n                        44,\n                        66,\n                        -64,\n                        -35,\n                        -81,\n                        66,\n                        -100,\n                        -16,\n                        71,\n                        66,\n                        -60,\n                        8,\n                        -82,\n                        66,\n                        76,\n                        -33,\n                        50,\n                        66,\n                        119,\n                        -75,\n                        -64,\n                        66,\n                        100,\n                        96,\n                        38,\n                        66,\n                        -84,\n                        46,\n                        32,\n                        66,\n                        81,\n                        -34,\n                        57,\n                        66,\n                        91,\n                        -20,\n                        65,\n                        66,\n                        80,\n                        28,\n                        107,\n                        66,\n                        91,\n                        -107,\n                        -87,\n                        66,\n                        -67,\n                        104,\n                        15,\n                        66,\n                        80,\n                        -28,\n                        70,\n                        66,\n                        -106,\n                        2,\n                        -63,\n                        66,\n                        -97,\n                        45,\n                        108,\n                        66,\n                        -112,\n                        123,\n                        -47,\n                        66,\n                        -127,\n                        122,\n                        -64,\n                        66,\n                        -67,\n                        28,\n                        94,\n                        66,\n                        -68,\n                        109,\n                        127,\n                        66,\n                        -57,\n                        103,\n                        -33,\n                        66,\n                        71,\n                        69,\n                        1,\n                        66,\n                        -66,\n                        30,\n                        -77,\n                        66,\n                        73,\n                        122,\n                        92,\n                        66,\n                        -107,\n                        -105,\n                        34,\n                        66,\n                        -99,\n                        -3,\n                        27,\n                        66,\n                        -119,\n                        -83,\n                        34,\n                        66,\n                        94,\n                        63,\n                        -47,\n                        66,\n                        119,\n                        -68,\n                        -2,\n                        66,\n                        78,\n                        0,\n                        7,\n                        66,\n                        -110,\n                        -6,\n                        76,\n                        66,\n                        -108,\n                        -12,\n                        12,\n                        66,\n                        -104,\n                        127,\n                        -16,\n                        66,\n                        -77,\n                        0,\n                        -37,\n                        66,\n                        127,\n                        31,\n                        32,\n                        66,\n                        -110,\n                        56,\n                        16,\n                        66,\n                        94,\n                        -43,\n                        87,\n                        66,\n                        -64,\n                        24,\n                        -51,\n                        66,\n                        71,\n                        32,\n                        -44,\n                        66,\n                        91,\n                        118,\n                        0,\n                        66,\n                        -84,\n                        105,\n                        33,\n                        66,\n                        -61,\n                        121,\n                        -74,\n                        66,\n                        -65,\n                        60,\n                        -122,\n                        66,\n                        89,\n                        117,\n                        -79,\n                        66,\n                        -113,\n                        7,\n                        10,\n                        66,\n                        125,\n                        -97,\n                        89,\n                        66,\n                        -69,\n                        -107,\n                        92,\n                        66,\n                        86,\n                        14,\n                        121,\n                        66,\n                        -63,\n                        104,\n                        96,\n                        66,\n                        -117,\n                        -123,\n                        -7,\n                        66,\n                        -75,\n                        -117,\n                        33,\n                        66,\n                        -103,\n                        78,\n                        64,\n                        66,\n                        -93,\n                        -80,\n                        118,\n                        66,\n                        -105,\n                        55,\n                        30,\n                        66,\n                        -98,\n                        48,\n                        105,\n                        66,\n                        90,\n                        -44,\n                        -13,\n                        66,\n                        82,\n                        -67,\n                        47,\n                        66,\n                        -60,\n                        -9,\n                        -36,\n                        66,\n                        90,\n                        -53,\n                        17,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 218,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        731715524,\n                        990054538,\n                        1026741703,\n                        987921818,\n                        1160427698,\n                        1027744577,\n                        1155172667,\n                        753501596,\n                        754980010,\n                        970705661,\n                        585921265,\n                        12283,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        770055808,\n                        1032510794,\n                        1098485999,\n                        987942229,\n                        772988480,\n                        643495504,\n                        628963700,\n                        710528990,\n                        758159636,\n                        973894064,\n                        982928569,\n                        10088,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 36,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 36,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 5668109095303649572,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        905644061,\n                        475760115,\n                        343269078,\n                        498908753,\n                        380147165,\n                        664402858,\n                        221587067,\n                        304048502,\n                        922151586,\n                        249281837,\n                        790437806,\n                        317134067,\n                        853531946,\n                        78737139,\n                        116205674,\n                        863303737,\n                        488729431,\n                        997545537,\n                        728103473,\n                        773499333,\n                        481474850,\n                        213716571,\n                        455575535,\n                        215173479,\n                        1005317039,\n                        904377537,\n                        573929313,\n                        534994493,\n                        1029109043,\n                        321695009,\n                        1021381295,\n                        584251965,\n                        82291706,\n                        182418734,\n                        500729415,\n                        174446697,\n                        635208918,\n                        524079082,\n                        15,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        67,\n                        25,\n                        -29,\n                        -114,\n                        61,\n                        -7,\n                        21,\n                        -38,\n                        67,\n                        -16,\n                        62,\n                        59,\n                        68,\n                        25,\n                        115,\n                        -42,\n                        66,\n                        102,\n                        -95,\n                        45,\n                        69,\n                        80,\n                        95,\n                        44,\n                        66,\n                        -127,\n                        71,\n                        31,\n                        66,\n                        -114,\n                        -40,\n                        61,\n                        66,\n                        -98,\n                        88,\n                        -49,\n                        64,\n                        -68,\n                        -115,\n                        10,\n                        66,\n                        -125,\n                        36,\n                        -113,\n                        66,\n                        -63,\n                        -85,\n                        49,\n                        66,\n                        -79,\n                        -87,\n                        46,\n                        66,\n                        -60,\n                        -26,\n                        -8,\n                        66,\n                        75,\n                        -56,\n                        -113,\n                        66,\n                        74,\n                        -7,\n                        -3,\n                        65,\n                        -105,\n                        -86,\n                        43,\n                        66,\n                        -59,\n                        114,\n                        121,\n                        66,\n                        -125,\n                        27,\n                        16,\n                        66,\n                        -94,\n                        40,\n                        -112,\n                        66,\n                        93,\n                        110,\n                        -71,\n                        66,\n                        -118,\n                        -127,\n                        -85,\n                        66,\n                        68,\n                        -37,\n                        108,\n                        66,\n                        -100,\n                        -82,\n                        -94,\n                        66,\n                        -89,\n                        112,\n                        -29,\n                        66,\n                        -69,\n                        51,\n                        -115,\n                        66,\n                        -114,\n                        -12,\n                        -1,\n                        66,\n                        -109,\n                        -18,\n                        50,\n                        66,\n                        -70,\n                        -18,\n                        39,\n                        66,\n                        82,\n                        51,\n                        87,\n                        66,\n                        -79,\n                        -31,\n                        46,\n                        66,\n                        77,\n                        -105,\n                        -96,\n                        66,\n                        -71,\n                        -20,\n                        21,\n                        66,\n                        78,\n                        102,\n                        -113,\n                        66,\n                        -110,\n                        -44,\n                        17,\n                        66,\n                        73,\n                        50,\n                        -85,\n                        66,\n                        82,\n                        -104,\n                        0,\n                        66,\n                        106,\n                        -118,\n                        -7,\n                        66,\n                        -114,\n                        -109,\n                        121,\n                        66,\n                        -57,\n                        -9,\n                        57,\n                        65,\n                        -93,\n                        -63,\n                        -66,\n                        66,\n                        -59,\n                        -70,\n                        -89,\n                        66,\n                        -74,\n                        -120,\n                        2,\n                        66,\n                        106,\n                        7,\n                        -88,\n                        66,\n                        -59,\n                        94,\n                        -105,\n                        66,\n                        -60,\n                        -83,\n                        -49,\n                        66,\n                        -103,\n                        0,\n                        -60,\n                        66,\n                        -113,\n                        126,\n                        120,\n                        66,\n                        -74,\n                        14,\n                        8,\n                        66,\n                        125,\n                        104,\n                        -117,\n                        66,\n                        -62,\n                        120,\n                        80,\n                        66,\n                        -122,\n                        -34,\n                        112,\n                        66,\n                        71,\n                        -3,\n                        117,\n                        66,\n                        83,\n                        104,\n                        57,\n                        66,\n                        -98,\n                        31,\n                        23,\n                        66,\n                        -94,\n                        -91,\n                        104,\n                        66,\n                        84,\n                        -87,\n                        47,\n                        66,\n                        69,\n                        117,\n                        -14,\n                        66,\n                        -94,\n                        -117,\n                        -86,\n                        66,\n                        76,\n                        94,\n                        -43,\n                        66,\n                        -65,\n                        -13,\n                        -78,\n                        66,\n                        -102,\n                        -53,\n                        43,\n                        66,\n                        75,\n                        92,\n                        -121,\n                        66,\n                        -80,\n                        69,\n                        -70,\n                        66,\n                        -108,\n                        -125,\n                        -44,\n                        66,\n                        91,\n                        -9,\n                        -39,\n                        66,\n                        74,\n                        -4,\n                        46,\n                        66,\n                        93,\n                        121,\n                        55,\n                        66,\n                        -106,\n                        -56,\n                        -94,\n                        66,\n                        -60,\n                        83,\n                        69,\n                        66,\n                        -66,\n                        34,\n                        109,\n                        66,\n                        -110,\n                        -60,\n                        95,\n                        66,\n                        -66,\n                        -80,\n                        49,\n                        66,\n                        -109,\n                        45,\n                        107,\n                        66,\n                        79,\n                        66,\n                        -59,\n                        66,\n                        116,\n                        108,\n                        68,\n                        66,\n                        -99,\n                        100,\n                        -72,\n                        66,\n                        -98,\n                        -28,\n                        53,\n                        66,\n                        100,\n                        18,\n                        -48,\n                        66,\n                        76,\n                        -26,\n                        55,\n                        66,\n                        83,\n                        40,\n                        -58,\n                        66,\n                        -62,\n                        3,\n                        -62,\n                        66,\n                        76,\n                        -27,\n                        -44,\n                        66,\n                        -59,\n                        113,\n                        -105,\n                        66,\n                        -74,\n                        117,\n                        92,\n                        66,\n                        -124,\n                        -53,\n                        37,\n                        66,\n                        -79,\n                        -98,\n                        -71,\n                        66,\n                        -66,\n                        32,\n                        -113,\n                        66,\n                        -72,\n                        105,\n                        112,\n                        66,\n                        82,\n                        -81,\n                        79,\n                        66,\n                        -121,\n                        101,\n                        -20,\n                        66,\n                        -120,\n                        -69,\n                        108,\n                        66,\n                        -61,\n                        70,\n                        72,\n                        66,\n                        -117,\n                        -40,\n                        -101,\n                        66,\n                        68,\n                        -41,\n                        63,\n                        66,\n                        -110,\n                        79,\n                        8,\n                        66,\n                        82,\n                        82,\n                        125,\n                        66,\n                        -69,\n                        78,\n                        -71,\n                        66,\n                        -68,\n                        101,\n                        98,\n                        66,\n                        -63,\n                        -77,\n                        -88,\n                        66,\n                        -64,\n                        97,\n                        -108,\n                        66,\n                        -74,\n                        107,\n                        -60,\n                        66,\n                        -111,\n                        -34,\n                        -26,\n                        66,\n                        -66,\n                        -84,\n                        41,\n                        66,\n                        -92,\n                        8,\n                        -25,\n                        66,\n                        -69,\n                        -4,\n                        74,\n                        66,\n                        106,\n                        -41,\n                        -101,\n                        66,\n                        -119,\n                        -126,\n                        1,\n                        66,\n                        -96,\n                        -63,\n                        -74,\n                        66,\n                        -127,\n                        -112,\n                        84,\n                        66,\n                        -75,\n                        -23,\n                        91,\n                        66,\n                        79,\n                        -23,\n                        28,\n                        66,\n                        -68,\n                        30,\n                        0,\n                        66,\n                        -104,\n                        -123,\n                        -83,\n                        66,\n                        -99,\n                        -119,\n                        -127,\n                        66,\n                        -83,\n                        63,\n                        -67,\n                        66,\n                        -71,\n                        -61,\n                        -76,\n                        66,\n                        -105,\n                        -73,\n                        -124,\n                        66,\n                        -115,\n                        -17,\n                        81,\n                        66,\n                        -119,\n                        -109,\n                        -17,\n                        66,\n                        -69,\n                        3,\n                        -46,\n                        66,\n                        -107,\n                        -9,\n                        126,\n                        66,\n                        -63,\n                        40,\n                        39,\n                        66,\n                        -126,\n                        88,\n                        -121,\n                        66,\n                        68,\n                        -49,\n                        -34,\n                        66,\n                        -59,\n                        -78,\n                        43,\n                        64,\n                        -113,\n                        -25,\n                        -1,\n                        66,\n                        -75,\n                        97,\n                        113,\n                        66,\n                        122,\n                        -51,\n                        -96,\n                        66,\n                        -70,\n                        -83,\n                        -120,\n                        66,\n                        72,\n                        -18,\n                        -77,\n                        66,\n                        -70,\n                        -66,\n                        -34,\n                        66,\n                        94,\n                        -72,\n                        94,\n                        66,\n                        78,\n                        29,\n                        -114,\n                        66,\n                        -107,\n                        21,\n                        -29,\n                        66,\n                        77,\n                        -6,\n                        -127,\n                        66,\n                        -64,\n                        -41,\n                        83,\n                        66,\n                        -105,\n                        102,\n                        -45,\n                        66,\n                        99,\n                        33,\n                        -34,\n                        66,\n                        98,\n                        -117,\n                        117,\n                        66,\n                        -62,\n                        27,\n                        20,\n                        66,\n                        -65,\n                        -69,\n                        -73,\n                        66,\n                        -100,\n                        -33,\n                        60,\n                        66,\n                        -68,\n                        64,\n                        -41,\n                        66,\n                        85,\n                        -59,\n                        84,\n                        66,\n                        -114,\n                        -53,\n                        -76,\n                        66,\n                        -83,\n                        37,\n                        -25,\n                        66,\n                        102,\n                        -59,\n                        46,\n                        66,\n                        -88,\n                        -126,\n                        -38,\n                        66,\n                        123,\n                        -23,\n                        -103,\n                        66,\n                        -121,\n                        41,\n                        -57,\n                        66,\n                        -73,\n                        -15,\n                        -111,\n                        66,\n                        -109,\n                        -93,\n                        56,\n                        66,\n                        71,\n                        -24,\n                        92,\n                        66,\n                        -61,\n                        89,\n                        -51,\n                        66,\n                        -84,\n                        97,\n                        -5,\n                        66,\n                        -99,\n                        85,\n                        86,\n                        66,\n                        74,\n                        -122,\n                        86,\n                        66,\n                        -113,\n                        71,\n                        56,\n                        66,\n                        -65,\n                        94,\n                        25,\n                        66,\n                        105,\n                        -90,\n                        81,\n                        66,\n                        -114,\n                        34,\n                        -85,\n                        66,\n                        -111,\n                        25,\n                        -36,\n                        66,\n                        -103,\n                        -124,\n                        126,\n                        66,\n                        78,\n                        21,\n                        77,\n                        66,\n                        -64,\n                        -113,\n                        -73,\n                        66,\n                        -83,\n                        -73,\n                        -85,\n                        66,\n                        97,\n                        72,\n                        67,\n                        66,\n                        109,\n                        -69,\n                        42,\n                        66,\n                        -109,\n                        102,\n                        44,\n                        66,\n                        -108,\n                        10,\n                        28,\n                        66,\n                        -112,\n                        -46,\n                        36,\n                        66,\n                        -101,\n                        -71,\n                        8,\n                        66,\n                        -88,\n                        63,\n                        -16,\n                        66,\n                        -95,\n                        -93,\n                        -25,\n                        66,\n                        -104,\n                        110,\n                        -37,\n                        66,\n                        88,\n                        43,\n                        66,\n                        66,\n                        -104,\n                        69,\n                        -122,\n                        66,\n                        -75,\n                        -62,\n                        28,\n                        66,\n                        -117,\n                        67,\n                        47,\n                        66,\n                        73,\n                        12,\n                        -32,\n                        66,\n                        -99,\n                        104,\n                        6,\n                        66,\n                        -71,\n                        127,\n                        -57,\n                        66,\n                        -68,\n                        13,\n                        96,\n                        66,\n                        -74,\n                        9,\n                        -9,\n                        66,\n                        -61,\n                        17,\n                        101,\n                        66,\n                        -108,\n                        -37,\n                        10,\n                        66,\n                        -119,\n                        109,\n                        -5,\n                        66,\n                        -64,\n                        -62,\n                        12,\n                        66,\n                        -125,\n                        125,\n                        126,\n                        66,\n                        -113,\n                        50,\n                        -18,\n                        66,\n                        -121,\n                        -88,\n                        115,\n                        66,\n                        -71,\n                        60,\n                        64,\n                        66,\n                        71,\n                        103,\n                        54,\n                        66,\n                        -90,\n                        107,\n                        50,\n                        66,\n                        79,\n                        87,\n                        28,\n                        66,\n                        -60,\n                        -7,\n                        3,\n                        66,\n                        -83,\n                        115,\n                        7,\n                        66,\n                        -60,\n                        -121,\n                        83,\n                        66,\n                        -114,\n                        -48,\n                        -14,\n                        66,\n                        81,\n                        -124,\n                        -18,\n                        66,\n                        -73,\n                        -109,\n                        -128,\n                        66,\n                        -103,\n                        109,\n                        34,\n                        66,\n                        -103,\n                        -110,\n                        -88,\n                        66,\n                        -128,\n                        58,\n                        -33,\n                        66,\n                        -79,\n                        -56,\n                        -9,\n                        66,\n                        -105,\n                        -66,\n                        19,\n                        66,\n                        -103,\n                        26,\n                        -50,\n                        66,\n                        -101,\n                        -108,\n                        -10,\n                        66,\n                        -64,\n                        -85,\n                        89,\n                        66,\n                        -104,\n                        -3,\n                        -85,\n                        66,\n                        97,\n                        16,\n                        16,\n                        66,\n                        -61,\n                        -39,\n                        -22,\n                        66,\n                        70,\n                        -53,\n                        17,\n                        66,\n                        -76,\n                        -65,\n                        42,\n                        66,\n                        -108,\n                        -117,\n                        59,\n                        66,\n                        -67,\n                        -11,\n                        -32,\n                        66,\n                        -68,\n                        40,\n                        -117,\n                        66,\n                        -115,\n                        38,\n                        121,\n                        66,\n                        -113,\n                        -114,\n                        83,\n                        66,\n                        -110,\n                        4,\n                        -45,\n                        66,\n                        -69,\n                        -100,\n                        78,\n                        66,\n                        -61,\n                        122,\n                        45,\n                        66,\n                        83,\n                        -110,\n                        32,\n                        66,\n                        98,\n                        -95,\n                        92,\n                        66,\n                        -118,\n                        79,\n                        58,\n                        66,\n                        87,\n                        -22,\n                        110,\n                        66,\n                        74,\n                        -98,\n                        48,\n                        66,\n                        79,\n                        30,\n                        15,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 229,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        645503984,\n                        754932688,\n                        1112574895,\n                        772528310,\n                        727004726,\n                        1103212466,\n                        1142518334,\n                        968737928,\n                        1114349854,\n                        726301852,\n                        715612291,\n                        600441944,\n                        1,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1031346634,\n                        769874273,\n                        968790667,\n                        643571855,\n                        1100062132,\n                        730120472,\n                        1028158001,\n                        1017983984,\n                        731079067,\n                        984501022,\n                        581840062,\n                        624776773,\n                        1,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 25,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 25,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -2564745619268294314,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        192256842,\n                        175568857,\n                        367496865,\n                        1055719231,\n                        378742481,\n                        202427477,\n                        773694774,\n                        623950843,\n                        231783221,\n                        632383305,\n                        620189034,\n                        196558758,\n                        349878587,\n                        1056540149,\n                        70600491,\n                        312944081,\n                        580429739,\n                        907859757,\n                        459868150,\n                        82024246,\n                        760411894,\n                        746158818,\n                        663805373,\n                        73718511,\n                        879564727,\n                        705772005,\n                        1005532225,\n                        89722041,\n                        723745745,\n                        799472890,\n                        1054250306,\n                        841709619,\n                        444237647,\n                        571555557,\n                        1001506038,\n                        108188741,\n                        849344946,\n                        606686422,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -25,\n                        64,\n                        -27,\n                        68,\n                        17,\n                        100,\n                        117,\n                        69,\n                        50,\n                        88,\n                        30,\n                        66,\n                        121,\n                        124,\n                        -116,\n                        66,\n                        8,\n                        25,\n                        55,\n                        66,\n                        -125,\n                        97,\n                        53,\n                        66,\n                        -92,\n                        -48,\n                        0,\n                        66,\n                        -69,\n                        31,\n                        -50,\n                        66,\n                        -101,\n                        16,\n                        -76,\n                        66,\n                        58,\n                        28,\n                        51,\n                        66,\n                        91,\n                        64,\n                        -38,\n                        66,\n                        -97,\n                        7,\n                        40,\n                        66,\n                        -102,\n                        63,\n                        -116,\n                        66,\n                        -111,\n                        -101,\n                        7,\n                        62,\n                        20,\n                        67,\n                        110,\n                        66,\n                        126,\n                        125,\n                        100,\n                        66,\n                        -79,\n                        50,\n                        -45,\n                        66,\n                        -66,\n                        -62,\n                        82,\n                        66,\n                        109,\n                        50,\n                        43,\n                        66,\n                        -118,\n                        -11,\n                        -50,\n                        65,\n                        -96,\n                        -74,\n                        -16,\n                        66,\n                        -88,\n                        96,\n                        -101,\n                        66,\n                        -74,\n                        36,\n                        -94,\n                        66,\n                        3,\n                        32,\n                        -4,\n                        66,\n                        -106,\n                        123,\n                        -106,\n                        66,\n                        -59,\n                        102,\n                        -92,\n                        66,\n                        -110,\n                        -98,\n                        34,\n                        66,\n                        -82,\n                        -55,\n                        -120,\n                        66,\n                        -93,\n                        80,\n                        -37,\n                        65,\n                        -121,\n                        83,\n                        19,\n                        66,\n                        -123,\n                        -108,\n                        59,\n                        66,\n                        -70,\n                        108,\n                        119,\n                        66,\n                        72,\n                        77,\n                        -13,\n                        66,\n                        -86,\n                        -66,\n                        13,\n                        66,\n                        -128,\n                        85,\n                        8,\n                        66,\n                        -68,\n                        -126,\n                        25,\n                        66,\n                        -66,\n                        89,\n                        1,\n                        66,\n                        -119,\n                        11,\n                        -49,\n                        66,\n                        -96,\n                        15,\n                        10,\n                        66,\n                        78,\n                        -68,\n                        -32,\n                        66,\n                        -120,\n                        -60,\n                        53,\n                        66,\n                        73,\n                        116,\n                        -104,\n                        66,\n                        116,\n                        52,\n                        45,\n                        66,\n                        109,\n                        111,\n                        1,\n                        66,\n                        -72,\n                        60,\n                        -31,\n                        66,\n                        -111,\n                        -84,\n                        -99,\n                        66,\n                        125,\n                        -72,\n                        -98,\n                        66,\n                        -66,\n                        16,\n                        -105,\n                        66,\n                        -104,\n                        127,\n                        -47,\n                        66,\n                        -95,\n                        -38,\n                        -5,\n                        66,\n                        -84,\n                        88,\n                        41,\n                        66,\n                        -108,\n                        -40,\n                        -66,\n                        66,\n                        -116,\n                        78,\n                        -24,\n                        66,\n                        -92,\n                        97,\n                        11,\n                        66,\n                        -104,\n                        96,\n                        116,\n                        66,\n                        -63,\n                        -121,\n                        -24,\n                        66,\n                        -117,\n                        -32,\n                        -74,\n                        66,\n                        -67,\n                        91,\n                        107,\n                        66,\n                        101,\n                        -91,\n                        -59,\n                        66,\n                        -61,\n                        71,\n                        118,\n                        66,\n                        -86,\n                        -89,\n                        11,\n                        66,\n                        92,\n                        -3,\n                        -64,\n                        66,\n                        -114,\n                        102,\n                        74,\n                        66,\n                        -64,\n                        32,\n                        19,\n                        66,\n                        79,\n                        -56,\n                        25,\n                        66,\n                        -59,\n                        -114,\n                        77,\n                        66,\n                        -97,\n                        -122,\n                        71,\n                        66,\n                        -119,\n                        -112,\n                        51,\n                        66,\n                        -128,\n                        37,\n                        17,\n                        66,\n                        -68,\n                        41,\n                        84,\n                        65,\n                        90,\n                        91,\n                        -107,\n                        66,\n                        -95,\n                        -39,\n                        -79,\n                        66,\n                        106,\n                        64,\n                        -61,\n                        66,\n                        -119,\n                        -106,\n                        25,\n                        66,\n                        -64,\n                        -5,\n                        -44,\n                        66,\n                        -121,\n                        -116,\n                        -77,\n                        66,\n                        -85,\n                        -117,\n                        -3,\n                        66,\n                        -103,\n                        -56,\n                        4,\n                        66,\n                        -102,\n                        -24,\n                        37,\n                        66,\n                        84,\n                        119,\n                        54,\n                        66,\n                        -113,\n                        104,\n                        -78,\n                        66,\n                        113,\n                        -102,\n                        -116,\n                        66,\n                        68,\n                        4,\n                        104,\n                        66,\n                        -122,\n                        -2,\n                        -49,\n                        66,\n                        98,\n                        -60,\n                        -88,\n                        66,\n                        -122,\n                        14,\n                        19,\n                        66,\n                        -113,\n                        -111,\n                        77,\n                        66,\n                        -73,\n                        -15,\n                        -59,\n                        66,\n                        86,\n                        -74,\n                        18,\n                        66,\n                        -77,\n                        -90,\n                        -41,\n                        66,\n                        -119,\n                        -5,\n                        21,\n                        66,\n                        -59,\n                        50,\n                        50,\n                        66,\n                        -103,\n                        60,\n                        21,\n                        66,\n                        -66,\n                        121,\n                        18,\n                        66,\n                        -70,\n                        21,\n                        24,\n                        66,\n                        -128,\n                        -112,\n                        -127,\n                        66,\n                        86,\n                        48,\n                        -89,\n                        66,\n                        -109,\n                        -64,\n                        -96,\n                        66,\n                        -116,\n                        28,\n                        68,\n                        66,\n                        -100,\n                        127,\n                        0,\n                        66,\n                        -112,\n                        -63,\n                        -67,\n                        66,\n                        -110,\n                        -65,\n                        76,\n                        66,\n                        -112,\n                        -72,\n                        43,\n                        66,\n                        9,\n                        65,\n                        65,\n                        66,\n                        -93,\n                        -8,\n                        -34,\n                        66,\n                        -106,\n                        -20,\n                        23,\n                        66,\n                        -100,\n                        31,\n                        -114,\n                        66,\n                        90,\n                        120,\n                        93,\n                        66,\n                        -62,\n                        -18,\n                        50,\n                        66,\n                        96,\n                        -5,\n                        -29,\n                        66,\n                        -94,\n                        97,\n                        2,\n                        66,\n                        -74,\n                        -101,\n                        88,\n                        66,\n                        -70,\n                        28,\n                        14,\n                        66,\n                        -106,\n                        14,\n                        33,\n                        66,\n                        -67,\n                        75,\n                        127,\n                        66,\n                        -98,\n                        90,\n                        -38,\n                        66,\n                        -119,\n                        32,\n                        -96,\n                        66,\n                        85,\n                        99,\n                        -60,\n                        66,\n                        -64,\n                        -46,\n                        72,\n                        66,\n                        -64,\n                        -28,\n                        -6,\n                        66,\n                        -81,\n                        54,\n                        94,\n                        66,\n                        86,\n                        126,\n                        97,\n                        66,\n                        -106,\n                        -94,\n                        -6,\n                        66,\n                        -111,\n                        2,\n                        -114,\n                        66,\n                        -95,\n                        9,\n                        -27,\n                        66,\n                        -68,\n                        23,\n                        62,\n                        66,\n                        -60,\n                        -56,\n                        -77,\n                        66,\n                        82,\n                        -115,\n                        -79,\n                        66,\n                        -62,\n                        26,\n                        -36,\n                        66,\n                        -59,\n                        101,\n                        70,\n                        65,\n                        95,\n                        54,\n                        62,\n                        66,\n                        -60,\n                        7,\n                        13,\n                        66,\n                        -100,\n                        -94,\n                        22,\n                        66,\n                        -110,\n                        -103,\n                        34,\n                        66,\n                        75,\n                        77,\n                        4,\n                        66,\n                        -99,\n                        -126,\n                        -41,\n                        66,\n                        -110,\n                        -111,\n                        101,\n                        66,\n                        100,\n                        -23,\n                        85,\n                        66,\n                        95,\n                        73,\n                        26,\n                        66,\n                        104,\n                        -86,\n                        92,\n                        66,\n                        -74,\n                        -45,\n                        29,\n                        66,\n                        -125,\n                        -56,\n                        106,\n                        66,\n                        -64,\n                        112,\n                        -125,\n                        66,\n                        -62,\n                        115,\n                        -101,\n                        66,\n                        89,\n                        86,\n                        40,\n                        66,\n                        -103,\n                        97,\n                        57,\n                        66,\n                        -109,\n                        77,\n                        -23,\n                        66,\n                        -74,\n                        -14,\n                        -104,\n                        66,\n                        -85,\n                        21,\n                        82,\n                        66,\n                        -81,\n                        127,\n                        5,\n                        66,\n                        -121,\n                        60,\n                        26,\n                        66,\n                        103,\n                        -36,\n                        -53,\n                        66,\n                        -61,\n                        -76,\n                        -45,\n                        66,\n                        -67,\n                        112,\n                        -87,\n                        66,\n                        -102,\n                        86,\n                        103,\n                        66,\n                        -110,\n                        16,\n                        69,\n                        66,\n                        -104,\n                        -9,\n                        122,\n                        66,\n                        -68,\n                        115,\n                        63,\n                        66,\n                        -86,\n                        69,\n                        88,\n                        66,\n                        -78,\n                        21,\n                        100,\n                        66,\n                        -73,\n                        123,\n                        -29,\n                        66,\n                        -101,\n                        60,\n                        -90,\n                        66,\n                        -112,\n                        -47,\n                        -54,\n                        66,\n                        -102,\n                        -124,\n                        2,\n                        66,\n                        92,\n                        -55,\n                        46,\n                        66,\n                        -70,\n                        88,\n                        93,\n                        66,\n                        -115,\n                        -23,\n                        -56,\n                        66,\n                        -63,\n                        -75,\n                        -103,\n                        66,\n                        -100,\n                        -62,\n                        -24,\n                        66,\n                        -62,\n                        86,\n                        -116,\n                        66,\n                        -69,\n                        -30,\n                        -74,\n                        66,\n                        -57,\n                        -123,\n                        21,\n                        66,\n                        -61,\n                        -26,\n                        45,\n                        66,\n                        -108,\n                        -83,\n                        58,\n                        66,\n                        -67,\n                        117,\n                        -66,\n                        66,\n                        90,\n                        -35,\n                        -125,\n                        66,\n                        112,\n                        -123,\n                        -45,\n                        66,\n                        -92,\n                        -97,\n                        84,\n                        66,\n                        -80,\n                        24,\n                        116,\n                        66,\n                        74,\n                        116,\n                        75,\n                        66,\n                        -69,\n                        -86,\n                        -105,\n                        66,\n                        -74,\n                        41,\n                        49,\n                        66,\n                        -119,\n                        -39,\n                        -23,\n                        66,\n                        -127,\n                        -39,\n                        106,\n                        66,\n                        -115,\n                        -74,\n                        103,\n                        66,\n                        126,\n                        113,\n                        -89,\n                        66,\n                        85,\n                        -46,\n                        -119,\n                        66,\n                        -101,\n                        5,\n                        -44,\n                        66,\n                        -59,\n                        84,\n                        124,\n                        66,\n                        -77,\n                        116,\n                        -68,\n                        66,\n                        -62,\n                        42,\n                        -69,\n                        66,\n                        -120,\n                        -42,\n                        -64,\n                        66,\n                        -126,\n                        -57,\n                        -15,\n                        66,\n                        -94,\n                        72,\n                        4,\n                        66,\n                        -123,\n                        -62,\n                        -81,\n                        66,\n                        -100,\n                        76,\n                        93,\n                        66,\n                        87,\n                        46,\n                        -8,\n                        66,\n                        -117,\n                        -97,\n                        -43,\n                        66,\n                        -101,\n                        -44,\n                        21,\n                        66,\n                        117,\n                        -2,\n                        43,\n                        66,\n                        113,\n                        95,\n                        36,\n                        66,\n                        -62,\n                        -69,\n                        10,\n                        66,\n                        -105,\n                        107,\n                        -51,\n                        66,\n                        -105,\n                        -39,\n                        89,\n                        66,\n                        -66,\n                        33,\n                        74,\n                        66,\n                        94,\n                        -93,\n                        -20,\n                        66,\n                        -99,\n                        -44,\n                        -10,\n                        66,\n                        79,\n                        2,\n                        -27,\n                        66,\n                        72,\n                        -21,\n                        1,\n                        66,\n                        -109,\n                        -5,\n                        98,\n                        66,\n                        -109,\n                        25,\n                        40,\n                        66,\n                        -72,\n                        117,\n                        -104,\n                        66,\n                        -106,\n                        122,\n                        -18,\n                        66,\n                        -111,\n                        -114,\n                        5,\n                        66,\n                        102,\n                        55,\n                        -127,\n                        66,\n                        81,\n                        22,\n                        -99,\n                        66,\n                        -77,\n                        108,\n                        51,\n                        66,\n                        -103,\n                        32,\n                        -74,\n                        66,\n                        -65,\n                        66,\n                        -116,\n                        66,\n                        87,\n                        29,\n                        -99,\n                        66,\n                        -118,\n                        -73,\n                        -109,\n                        66,\n                        -104,\n                        72,\n                        -27,\n                        66,\n                        -59,\n                        -30,\n                        118,\n                        66,\n                        -66,\n                        105,\n                        95,\n                        66,\n                        85,\n                        -90,\n                        -14,\n                        66,\n                        -83,\n                        10,\n                        -3,\n                        66,\n                        -61,\n                        -28,\n                        68,\n                        66,\n                        -65,\n                        109,\n                        6,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 228,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        983695022,\n                        758358652,\n                        1097956376,\n                        644027516,\n                        730002335,\n                        758809528,\n                        581218456,\n                        968814673,\n                        1016468675,\n                        1145541514,\n                        581196598,\n                        726234781,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1147319869,\n                        772687915,\n                        1157235496,\n                        1162025270,\n                        731174326,\n                        1160646694,\n                        581219185,\n                        1147144588,\n                        987751169,\n                        754111534,\n                        581316658,\n                        582922615,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 26,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 26,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -6258575445422044724,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        196447062,\n                        858087215,\n                        488471342,\n                        591964861,\n                        866245582,\n                        880327665,\n                        454684506,\n                        1005898102,\n                        488539182,\n                        880387785,\n                        115723387,\n                        363034074,\n                        337086034,\n                        465001035,\n                        128511614,\n                        204907702,\n                        190798375,\n                        212814947,\n                        447661509,\n                        903664767,\n                        798058290,\n                        1008183138,\n                        783631675,\n                        624912210,\n                        395759066,\n                        70870983,\n                        346394591,\n                        50118327,\n                        43067743,\n                        1005576053,\n                        500361595,\n                        364677745,\n                        386971634,\n                        1038415566,\n                        389105102,\n                        439010385,\n                        267610930,\n                        18056933,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        7,\n                        84,\n                        -42,\n                        68,\n                        115,\n                        -35,\n                        -55,\n                        69,\n                        49,\n                        51,\n                        -54,\n                        66,\n                        52,\n                        88,\n                        -72,\n                        66,\n                        99,\n                        -100,\n                        -76,\n                        66,\n                        -94,\n                        -88,\n                        119,\n                        65,\n                        -52,\n                        9,\n                        -59,\n                        66,\n                        -108,\n                        11,\n                        -92,\n                        66,\n                        -113,\n                        28,\n                        -74,\n                        66,\n                        -67,\n                        58,\n                        107,\n                        66,\n                        -71,\n                        52,\n                        74,\n                        66,\n                        -118,\n                        -22,\n                        69,\n                        66,\n                        -71,\n                        95,\n                        7,\n                        66,\n                        19,\n                        30,\n                        -73,\n                        66,\n                        -62,\n                        48,\n                        -76,\n                        66,\n                        -85,\n                        -14,\n                        -76,\n                        65,\n                        -56,\n                        -34,\n                        3,\n                        66,\n                        -85,\n                        47,\n                        -59,\n                        66,\n                        -128,\n                        46,\n                        55,\n                        66,\n                        -110,\n                        109,\n                        -40,\n                        66,\n                        -81,\n                        -43,\n                        52,\n                        66,\n                        -85,\n                        -32,\n                        -123,\n                        64,\n                        -20,\n                        -16,\n                        76,\n                        66,\n                        -115,\n                        122,\n                        26,\n                        66,\n                        -61,\n                        35,\n                        23,\n                        66,\n                        -67,\n                        -54,\n                        113,\n                        66,\n                        -68,\n                        54,\n                        123,\n                        66,\n                        35,\n                        41,\n                        -68,\n                        66,\n                        -65,\n                        11,\n                        117,\n                        66,\n                        -124,\n                        -79,\n                        -46,\n                        66,\n                        -89,\n                        -14,\n                        65,\n                        66,\n                        115,\n                        -74,\n                        -23,\n                        66,\n                        -71,\n                        83,\n                        26,\n                        66,\n                        122,\n                        -53,\n                        -85,\n                        66,\n                        90,\n                        49,\n                        -60,\n                        66,\n                        -77,\n                        127,\n                        4,\n                        66,\n                        -84,\n                        -123,\n                        122,\n                        66,\n                        -65,\n                        31,\n                        -122,\n                        66,\n                        5,\n                        -52,\n                        -7,\n                        63,\n                        -88,\n                        64,\n                        -50,\n                        66,\n                        -85,\n                        -105,\n                        -56,\n                        66,\n                        -126,\n                        -10,\n                        36,\n                        66,\n                        -88,\n                        97,\n                        -14,\n                        66,\n                        105,\n                        20,\n                        -17,\n                        66,\n                        -63,\n                        97,\n                        -9,\n                        66,\n                        -107,\n                        -108,\n                        6,\n                        66,\n                        102,\n                        -15,\n                        51,\n                        66,\n                        -104,\n                        117,\n                        -26,\n                        66,\n                        -68,\n                        -57,\n                        -14,\n                        66,\n                        -92,\n                        6,\n                        73,\n                        66,\n                        -109,\n                        -61,\n                        64,\n                        66,\n                        -95,\n                        -25,\n                        -55,\n                        66,\n                        -101,\n                        82,\n                        122,\n                        66,\n                        -78,\n                        31,\n                        -34,\n                        66,\n                        -117,\n                        -49,\n                        -47,\n                        66,\n                        -74,\n                        -85,\n                        -50,\n                        66,\n                        -108,\n                        45,\n                        -117,\n                        66,\n                        75,\n                        47,\n                        87,\n                        66,\n                        102,\n                        80,\n                        -94,\n                        66,\n                        -90,\n                        -111,\n                        1,\n                        66,\n                        -124,\n                        76,\n                        -106,\n                        66,\n                        91,\n                        -120,\n                        28,\n                        66,\n                        79,\n                        39,\n                        -7,\n                        66,\n                        91,\n                        -22,\n                        -7,\n                        66,\n                        -59,\n                        121,\n                        87,\n                        66,\n                        115,\n                        43,\n                        -80,\n                        66,\n                        -69,\n                        -38,\n                        118,\n                        66,\n                        -61,\n                        -128,\n                        -112,\n                        66,\n                        -88,\n                        -18,\n                        17,\n                        66,\n                        -72,\n                        66,\n                        -66,\n                        66,\n                        -70,\n                        -117,\n                        33,\n                        66,\n                        -68,\n                        43,\n                        8,\n                        66,\n                        -85,\n                        -48,\n                        7,\n                        66,\n                        -76,\n                        43,\n                        37,\n                        66,\n                        -117,\n                        -4,\n                        -119,\n                        66,\n                        86,\n                        84,\n                        125,\n                        66,\n                        -98,\n                        97,\n                        59,\n                        66,\n                        -72,\n                        102,\n                        -36,\n                        66,\n                        103,\n                        99,\n                        -56,\n                        66,\n                        -63,\n                        79,\n                        -98,\n                        66,\n                        -73,\n                        100,\n                        2,\n                        66,\n                        -59,\n                        -16,\n                        -60,\n                        66,\n                        73,\n                        34,\n                        15,\n                        66,\n                        -100,\n                        -94,\n                        -30,\n                        66,\n                        -73,\n                        -29,\n                        75,\n                        66,\n                        73,\n                        -83,\n                        -37,\n                        66,\n                        76,\n                        -34,\n                        32,\n                        66,\n                        -108,\n                        120,\n                        107,\n                        66,\n                        -72,\n                        62,\n                        -105,\n                        66,\n                        -124,\n                        19,\n                        -82,\n                        66,\n                        -74,\n                        -4,\n                        2,\n                        66,\n                        -113,\n                        -5,\n                        -103,\n                        66,\n                        -87,\n                        -112,\n                        -74,\n                        66,\n                        127,\n                        -45,\n                        42,\n                        66,\n                        101,\n                        92,\n                        63,\n                        66,\n                        -88,\n                        70,\n                        -128,\n                        66,\n                        77,\n                        91,\n                        -93,\n                        66,\n                        -98,\n                        -110,\n                        -6,\n                        66,\n                        -75,\n                        37,\n                        -69,\n                        66,\n                        -73,\n                        100,\n                        98,\n                        66,\n                        -96,\n                        -15,\n                        -90,\n                        66,\n                        -103,\n                        -61,\n                        -37,\n                        66,\n                        95,\n                        34,\n                        54,\n                        66,\n                        96,\n                        -79,\n                        -21,\n                        66,\n                        98,\n                        4,\n                        99,\n                        66,\n                        -64,\n                        -24,\n                        -21,\n                        66,\n                        -76,\n                        74,\n                        -60,\n                        66,\n                        -76,\n                        -112,\n                        40,\n                        66,\n                        -96,\n                        -48,\n                        99,\n                        66,\n                        -66,\n                        -93,\n                        -30,\n                        66,\n                        -103,\n                        -29,\n                        -94,\n                        66,\n                        -104,\n                        67,\n                        -36,\n                        66,\n                        -66,\n                        118,\n                        117,\n                        66,\n                        -84,\n                        124,\n                        95,\n                        66,\n                        79,\n                        -51,\n                        102,\n                        66,\n                        122,\n                        -84,\n                        -56,\n                        66,\n                        -92,\n                        -67,\n                        63,\n                        66,\n                        -107,\n                        -25,\n                        -77,\n                        66,\n                        -127,\n                        90,\n                        -90,\n                        66,\n                        -73,\n                        43,\n                        73,\n                        66,\n                        -78,\n                        -47,\n                        -81,\n                        66,\n                        -109,\n                        -75,\n                        13,\n                        66,\n                        -94,\n                        0,\n                        -76,\n                        66,\n                        -66,\n                        -2,\n                        -106,\n                        66,\n                        -110,\n                        -71,\n                        -69,\n                        66,\n                        70,\n                        60,\n                        -99,\n                        66,\n                        -64,\n                        67,\n                        -28,\n                        66,\n                        92,\n                        -17,\n                        -111,\n                        66,\n                        -106,\n                        31,\n                        -96,\n                        66,\n                        103,\n                        47,\n                        66,\n                        66,\n                        -111,\n                        20,\n                        77,\n                        66,\n                        -67,\n                        77,\n                        16,\n                        66,\n                        84,\n                        -41,\n                        86,\n                        66,\n                        -101,\n                        32,\n                        54,\n                        66,\n                        -111,\n                        103,\n                        -8,\n                        66,\n                        -62,\n                        23,\n                        -103,\n                        66,\n                        110,\n                        -108,\n                        126,\n                        66,\n                        -128,\n                        77,\n                        125,\n                        66,\n                        -82,\n                        -76,\n                        117,\n                        66,\n                        -63,\n                        38,\n                        -94,\n                        66,\n                        -109,\n                        81,\n                        -33,\n                        66,\n                        -68,\n                        -37,\n                        29,\n                        66,\n                        118,\n                        -45,\n                        102,\n                        66,\n                        -63,\n                        -114,\n                        -83,\n                        66,\n                        -67,\n                        92,\n                        48,\n                        66,\n                        -74,\n                        -86,\n                        -106,\n                        66,\n                        -125,\n                        8,\n                        -49,\n                        66,\n                        -119,\n                        57,\n                        34,\n                        66,\n                        -99,\n                        -88,\n                        -56,\n                        66,\n                        103,\n                        -71,\n                        -124,\n                        66,\n                        70,\n                        -15,\n                        29,\n                        66,\n                        -64,\n                        -106,\n                        -25,\n                        66,\n                        -100,\n                        -6,\n                        -79,\n                        66,\n                        -68,\n                        80,\n                        17,\n                        66,\n                        109,\n                        -85,\n                        -3,\n                        66,\n                        -65,\n                        30,\n                        14,\n                        66,\n                        75,\n                        42,\n                        25,\n                        66,\n                        -89,\n                        -109,\n                        91,\n                        66,\n                        70,\n                        -77,\n                        99,\n                        66,\n                        79,\n                        51,\n                        -14,\n                        66,\n                        -66,\n                        70,\n                        -30,\n                        66,\n                        -69,\n                        74,\n                        48,\n                        66,\n                        76,\n                        -50,\n                        53,\n                        66,\n                        -102,\n                        121,\n                        -20,\n                        66,\n                        99,\n                        103,\n                        -12,\n                        66,\n                        -90,\n                        66,\n                        -1,\n                        66,\n                        -123,\n                        13,\n                        119,\n                        66,\n                        -94,\n                        110,\n                        102,\n                        66,\n                        -123,\n                        -125,\n                        125,\n                        66,\n                        -84,\n                        18,\n                        -108,\n                        66,\n                        -96,\n                        116,\n                        -15,\n                        66,\n                        -88,\n                        -5,\n                        85,\n                        66,\n                        -107,\n                        18,\n                        37,\n                        66,\n                        -124,\n                        -72,\n                        14,\n                        66,\n                        -117,\n                        -36,\n                        -64,\n                        66,\n                        123,\n                        -68,\n                        65,\n                        66,\n                        114,\n                        96,\n                        -108,\n                        66,\n                        112,\n                        35,\n                        -112,\n                        66,\n                        -63,\n                        -98,\n                        -30,\n                        66,\n                        -112,\n                        -103,\n                        -34,\n                        66,\n                        79,\n                        -28,\n                        96,\n                        66,\n                        73,\n                        -76,\n                        49,\n                        66,\n                        -62,\n                        -54,\n                        52,\n                        66,\n                        -106,\n                        109,\n                        -29,\n                        66,\n                        -101,\n                        -61,\n                        -47,\n                        66,\n                        -78,\n                        97,\n                        -128,\n                        66,\n                        -119,\n                        -84,\n                        -110,\n                        66,\n                        82,\n                        -102,\n                        -25,\n                        66,\n                        -82,\n                        -1,\n                        72,\n                        66,\n                        -109,\n                        -80,\n                        106,\n                        66,\n                        77,\n                        14,\n                        -41,\n                        66,\n                        -64,\n                        -75,\n                        -108,\n                        66,\n                        -62,\n                        63,\n                        -45,\n                        66,\n                        97,\n                        -109,\n                        102,\n                        66,\n                        123,\n                        -95,\n                        -37,\n                        66,\n                        -120,\n                        -114,\n                        -105,\n                        66,\n                        -114,\n                        -110,\n                        102,\n                        66,\n                        85,\n                        -23,\n                        23,\n                        66,\n                        -62,\n                        34,\n                        7,\n                        66,\n                        -69,\n                        -122,\n                        -82,\n                        66,\n                        -127,\n                        21,\n                        -1,\n                        66,\n                        -116,\n                        -80,\n                        -109,\n                        66,\n                        -60,\n                        32,\n                        118,\n                        66,\n                        -67,\n                        70,\n                        44,\n                        66,\n                        -66,\n                        -115,\n                        4,\n                        66,\n                        -59,\n                        41,\n                        80,\n                        66,\n                        -115,\n                        -116,\n                        -108,\n                        66,\n                        -65,\n                        -10,\n                        -49,\n                        66,\n                        111,\n                        64,\n                        99,\n                        66,\n                        118,\n                        121,\n                        -62,\n                        66,\n                        -120,\n                        76,\n                        -126,\n                        66,\n                        -65,\n                        32,\n                        -30,\n                        66,\n                        -106,\n                        -8,\n                        26,\n                        66,\n                        -111,\n                        29,\n                        43,\n                        66,\n                        -74,\n                        -12,\n                        10,\n                        66,\n                        -113,\n                        11,\n                        -118,\n                        66,\n                        -61,\n                        40,\n                        28,\n                        66,\n                        -102,\n                        -106,\n                        83,\n                        66,\n                        -62,\n                        109,\n                        38,\n                        66,\n                        -80,\n                        -36,\n                        81,\n                        66,\n                        90,\n                        -66,\n                        -50,\n                        66,\n                        86,\n                        101,\n                        44,\n                        66,\n                        -111,\n                        125,\n                        105,\n                        66,\n                        84,\n                        108,\n                        -86,\n                        66,\n                        -126,\n                        74,\n                        -105,\n                        66,\n                        81,\n                        -109,\n                        -9,\n                        66,\n                        -105,\n                        65,\n                        -47,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 227,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        600468470,\n                        1033035683,\n                        753521989,\n                        974069864,\n                        985034669,\n                        1162077727,\n                        983616262,\n                        1013448983,\n                        753378865,\n                        1013785996,\n                        726931733,\n                        198512897,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        774289759,\n                        774584818,\n                        624434183,\n                        715231364,\n                        1117383866,\n                        712636127,\n                        1104797758,\n                        1103183918,\n                        1098232306,\n                        1146114728,\n                        769447103,\n                        209712766,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 27,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 27,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -7118534165006407185,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        907630914,\n                        43452073,\n                        52886071,\n                        483960655,\n                        463058385,\n                        133350823,\n                        888458967,\n                        853219053,\n                        1000381230,\n                        643782055,\n                        368746353,\n                        752585267,\n                        576031587,\n                        756983211,\n                        1028849529,\n                        179697501,\n                        1031214894,\n                        919921769,\n                        783399599,\n                        863552871,\n                        711703893,\n                        267228721,\n                        52370723,\n                        610856437,\n                        606020794,\n                        786210742,\n                        357788135,\n                        996140795,\n                        354089839,\n                        57068493,\n                        739572467,\n                        388188502,\n                        261918058,\n                        219784354,\n                        72454101,\n                        190183473,\n                        857675045,\n                        829,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        32,\n                        -53,\n                        67,\n                        69,\n                        65,\n                        -110,\n                        -22,\n                        69,\n                        37,\n                        -95,\n                        45,\n                        69,\n                        35,\n                        6,\n                        -57,\n                        65,\n                        -25,\n                        65,\n                        -90,\n                        65,\n                        -84,\n                        101,\n                        7,\n                        66,\n                        -125,\n                        -125,\n                        29,\n                        66,\n                        -100,\n                        -38,\n                        104,\n                        66,\n                        -93,\n                        10,\n                        -37,\n                        66,\n                        -79,\n                        73,\n                        29,\n                        66,\n                        84,\n                        44,\n                        26,\n                        66,\n                        -122,\n                        -68,\n                        18,\n                        66,\n                        98,\n                        126,\n                        23,\n                        66,\n                        -95,\n                        -116,\n                        -88,\n                        66,\n                        -82,\n                        -9,\n                        45,\n                        66,\n                        59,\n                        12,\n                        125,\n                        66,\n                        -66,\n                        -9,\n                        119,\n                        66,\n                        -89,\n                        -36,\n                        112,\n                        66,\n                        77,\n                        42,\n                        -75,\n                        66,\n                        -61,\n                        123,\n                        -109,\n                        66,\n                        -115,\n                        -78,\n                        -16,\n                        66,\n                        -116,\n                        51,\n                        43,\n                        66,\n                        126,\n                        -50,\n                        -78,\n                        66,\n                        -67,\n                        -2,\n                        82,\n                        66,\n                        -116,\n                        21,\n                        -60,\n                        66,\n                        -64,\n                        12,\n                        118,\n                        66,\n                        -107,\n                        -57,\n                        29,\n                        66,\n                        106,\n                        -13,\n                        -127,\n                        66,\n                        -114,\n                        -67,\n                        90,\n                        66,\n                        -107,\n                        81,\n                        38,\n                        66,\n                        68,\n                        115,\n                        -6,\n                        66,\n                        126,\n                        -108,\n                        -114,\n                        66,\n                        -88,\n                        -51,\n                        -117,\n                        66,\n                        -113,\n                        -110,\n                        -57,\n                        66,\n                        89,\n                        105,\n                        4,\n                        66,\n                        80,\n                        125,\n                        -126,\n                        66,\n                        85,\n                        89,\n                        103,\n                        66,\n                        -82,\n                        13,\n                        -28,\n                        66,\n                        75,\n                        -9,\n                        89,\n                        66,\n                        -109,\n                        33,\n                        -43,\n                        66,\n                        75,\n                        -51,\n                        87,\n                        66,\n                        -71,\n                        -98,\n                        88,\n                        66,\n                        -114,\n                        93,\n                        -11,\n                        66,\n                        70,\n                        115,\n                        121,\n                        66,\n                        -107,\n                        83,\n                        16,\n                        66,\n                        -94,\n                        -44,\n                        -40,\n                        66,\n                        -99,\n                        -28,\n                        92,\n                        66,\n                        -110,\n                        -60,\n                        109,\n                        66,\n                        -78,\n                        55,\n                        -71,\n                        66,\n                        -110,\n                        -47,\n                        -52,\n                        66,\n                        -63,\n                        22,\n                        48,\n                        66,\n                        -101,\n                        -4,\n                        90,\n                        66,\n                        -59,\n                        -128,\n                        -91,\n                        66,\n                        -86,\n                        66,\n                        65,\n                        66,\n                        6,\n                        121,\n                        -109,\n                        66,\n                        -116,\n                        -83,\n                        32,\n                        66,\n                        -119,\n                        25,\n                        -98,\n                        66,\n                        -76,\n                        -24,\n                        71,\n                        66,\n                        -110,\n                        -8,\n                        30,\n                        66,\n                        108,\n                        106,\n                        -23,\n                        66,\n                        -102,\n                        51,\n                        -75,\n                        66,\n                        125,\n                        -19,\n                        13,\n                        66,\n                        119,\n                        21,\n                        -67,\n                        66,\n                        -105,\n                        -30,\n                        1,\n                        66,\n                        -128,\n                        9,\n                        -101,\n                        66,\n                        -65,\n                        122,\n                        -37,\n                        66,\n                        115,\n                        -124,\n                        69,\n                        66,\n                        -110,\n                        121,\n                        -6,\n                        66,\n                        -75,\n                        61,\n                        111,\n                        66,\n                        69,\n                        -30,\n                        35,\n                        66,\n                        -112,\n                        108,\n                        -103,\n                        66,\n                        -68,\n                        7,\n                        -85,\n                        66,\n                        79,\n                        -14,\n                        -38,\n                        66,\n                        -128,\n                        93,\n                        -48,\n                        66,\n                        -62,\n                        -109,\n                        70,\n                        66,\n                        81,\n                        -28,\n                        64,\n                        66,\n                        -86,\n                        96,\n                        -102,\n                        66,\n                        -98,\n                        -66,\n                        -65,\n                        66,\n                        101,\n                        55,\n                        -120,\n                        66,\n                        -125,\n                        78,\n                        -125,\n                        66,\n                        -114,\n                        -31,\n                        90,\n                        66,\n                        -121,\n                        -37,\n                        -59,\n                        66,\n                        -117,\n                        -100,\n                        58,\n                        66,\n                        -62,\n                        5,\n                        13,\n                        66,\n                        -128,\n                        23,\n                        114,\n                        66,\n                        71,\n                        -108,\n                        -98,\n                        66,\n                        76,\n                        -106,\n                        2,\n                        66,\n                        -110,\n                        -70,\n                        105,\n                        66,\n                        -105,\n                        -36,\n                        125,\n                        66,\n                        -71,\n                        -122,\n                        5,\n                        66,\n                        -111,\n                        110,\n                        91,\n                        66,\n                        -68,\n                        -84,\n                        26,\n                        66,\n                        -120,\n                        -47,\n                        20,\n                        66,\n                        70,\n                        -20,\n                        101,\n                        66,\n                        124,\n                        -123,\n                        88,\n                        66,\n                        119,\n                        -53,\n                        -126,\n                        66,\n                        -59,\n                        -24,\n                        122,\n                        66,\n                        -92,\n                        18,\n                        -11,\n                        66,\n                        -118,\n                        74,\n                        -11,\n                        66,\n                        -89,\n                        50,\n                        103,\n                        66,\n                        70,\n                        -94,\n                        -104,\n                        66,\n                        -67,\n                        -67,\n                        -120,\n                        66,\n                        -99,\n                        -58,\n                        39,\n                        66,\n                        101,\n                        86,\n                        -33,\n                        66,\n                        -111,\n                        20,\n                        -109,\n                        66,\n                        -119,\n                        38,\n                        16,\n                        66,\n                        -120,\n                        -111,\n                        -85,\n                        66,\n                        71,\n                        -11,\n                        3,\n                        66,\n                        80,\n                        -123,\n                        124,\n                        66,\n                        -98,\n                        -123,\n                        -27,\n                        66,\n                        -76,\n                        68,\n                        -76,\n                        66,\n                        72,\n                        93,\n                        118,\n                        66,\n                        97,\n                        -102,\n                        -18,\n                        66,\n                        105,\n                        24,\n                        -34,\n                        66,\n                        107,\n                        -27,\n                        -55,\n                        66,\n                        122,\n                        -89,\n                        -50,\n                        66,\n                        89,\n                        29,\n                        -45,\n                        66,\n                        -104,\n                        -107,\n                        -70,\n                        66,\n                        123,\n                        123,\n                        103,\n                        66,\n                        -115,\n                        -29,\n                        -127,\n                        66,\n                        -110,\n                        40,\n                        -46,\n                        66,\n                        -71,\n                        -52,\n                        -100,\n                        66,\n                        77,\n                        -104,\n                        -4,\n                        66,\n                        68,\n                        90,\n                        -68,\n                        66,\n                        -61,\n                        61,\n                        -73,\n                        66,\n                        -118,\n                        30,\n                        27,\n                        66,\n                        -104,\n                        -26,\n                        -96,\n                        66,\n                        -122,\n                        -89,\n                        -84,\n                        66,\n                        -104,\n                        72,\n                        104,\n                        66,\n                        88,\n                        97,\n                        -12,\n                        66,\n                        -92,\n                        47,\n                        39,\n                        66,\n                        95,\n                        114,\n                        47,\n                        66,\n                        91,\n                        -22,\n                        -23,\n                        66,\n                        -113,\n                        -29,\n                        28,\n                        66,\n                        103,\n                        -44,\n                        106,\n                        66,\n                        -65,\n                        52,\n                        -31,\n                        66,\n                        -114,\n                        117,\n                        -54,\n                        66,\n                        -105,\n                        -84,\n                        57,\n                        66,\n                        -93,\n                        -38,\n                        -50,\n                        66,\n                        85,\n                        -100,\n                        122,\n                        66,\n                        107,\n                        62,\n                        116,\n                        66,\n                        -109,\n                        29,\n                        -89,\n                        66,\n                        -59,\n                        69,\n                        -39,\n                        66,\n                        -64,\n                        29,\n                        -10,\n                        66,\n                        -70,\n                        -75,\n                        22,\n                        66,\n                        -123,\n                        58,\n                        2,\n                        66,\n                        -98,\n                        7,\n                        55,\n                        66,\n                        -71,\n                        70,\n                        -108,\n                        66,\n                        -105,\n                        88,\n                        92,\n                        66,\n                        -88,\n                        113,\n                        -126,\n                        66,\n                        -64,\n                        -108,\n                        -108,\n                        66,\n                        -101,\n                        -119,\n                        -62,\n                        66,\n                        91,\n                        84,\n                        0,\n                        66,\n                        -110,\n                        67,\n                        40,\n                        66,\n                        -107,\n                        105,\n                        -102,\n                        66,\n                        83,\n                        16,\n                        -96,\n                        66,\n                        125,\n                        52,\n                        55,\n                        66,\n                        109,\n                        -96,\n                        -83,\n                        66,\n                        -66,\n                        -82,\n                        56,\n                        66,\n                        -67,\n                        -48,\n                        112,\n                        66,\n                        -118,\n                        101,\n                        -1,\n                        66,\n                        -59,\n                        -14,\n                        -56,\n                        66,\n                        84,\n                        -85,\n                        73,\n                        66,\n                        95,\n                        33,\n                        -27,\n                        66,\n                        -94,\n                        65,\n                        -12,\n                        66,\n                        77,\n                        116,\n                        -45,\n                        66,\n                        -114,\n                        62,\n                        105,\n                        66,\n                        -106,\n                        65,\n                        -60,\n                        66,\n                        101,\n                        27,\n                        36,\n                        66,\n                        83,\n                        -94,\n                        -15,\n                        66,\n                        -65,\n                        -28,\n                        -71,\n                        66,\n                        -115,\n                        70,\n                        108,\n                        66,\n                        122,\n                        102,\n                        74,\n                        66,\n                        -74,\n                        12,\n                        11,\n                        66,\n                        -101,\n                        -125,\n                        67,\n                        66,\n                        -81,\n                        -76,\n                        -49,\n                        66,\n                        -62,\n                        -26,\n                        -32,\n                        66,\n                        -99,\n                        69,\n                        -63,\n                        66,\n                        -75,\n                        73,\n                        77,\n                        66,\n                        -113,\n                        30,\n                        -100,\n                        66,\n                        71,\n                        89,\n                        -31,\n                        66,\n                        97,\n                        52,\n                        53,\n                        66,\n                        -68,\n                        -18,\n                        -114,\n                        66,\n                        -106,\n                        44,\n                        -107,\n                        66,\n                        -112,\n                        -71,\n                        93,\n                        66,\n                        -59,\n                        -17,\n                        -75,\n                        66,\n                        -62,\n                        -46,\n                        101,\n                        66,\n                        -69,\n                        14,\n                        96,\n                        66,\n                        -77,\n                        -127,\n                        82,\n                        66,\n                        -77,\n                        -117,\n                        18,\n                        66,\n                        -68,\n                        -59,\n                        53,\n                        66,\n                        69,\n                        121,\n                        -12,\n                        66,\n                        -60,\n                        -2,\n                        -71,\n                        66,\n                        104,\n                        -102,\n                        -85,\n                        66,\n                        70,\n                        -38,\n                        37,\n                        66,\n                        -92,\n                        -55,\n                        69,\n                        66,\n                        -112,\n                        -38,\n                        2,\n                        66,\n                        83,\n                        59,\n                        -122,\n                        66,\n                        -66,\n                        -23,\n                        121,\n                        66,\n                        -113,\n                        -27,\n                        -35,\n                        66,\n                        -108,\n                        -33,\n                        -119,\n                        66,\n                        86,\n                        -49,\n                        22,\n                        66,\n                        -113,\n                        -47,\n                        112,\n                        66,\n                        -65,\n                        -11,\n                        74,\n                        66,\n                        -119,\n                        -51,\n                        -82,\n                        66,\n                        -61,\n                        -79,\n                        -50,\n                        66,\n                        84,\n                        -98,\n                        -103,\n                        66,\n                        79,\n                        27,\n                        -33,\n                        66,\n                        -109,\n                        -15,\n                        126,\n                        66,\n                        -68,\n                        -72,\n                        -5,\n                        66,\n                        -104,\n                        95,\n                        100,\n                        66,\n                        -110,\n                        -74,\n                        -75,\n                        66,\n                        -62,\n                        54,\n                        74,\n                        66,\n                        83,\n                        -80,\n                        45,\n                        66,\n                        -103,\n                        -31,\n                        89,\n                        66,\n                        -108,\n                        -60,\n                        27,\n                        66,\n                        -111,\n                        -82,\n                        -128,\n                        66,\n                        -115,\n                        101,\n                        24,\n                        66,\n                        -104,\n                        56,\n                        -108,\n                        66,\n                        -71,\n                        -126,\n                        105,\n                        66,\n                        -106,\n                        35,\n                        -68,\n                        66,\n                        -108,\n                        14,\n                        38,\n                        66,\n                        -107,\n                        -111,\n                        68,\n                        66,\n                        -98,\n                        17,\n                        -116,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 224,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1033042247,\n                        774818287,\n                        643847449,\n                        1156860994,\n                        645640709,\n                        759900490,\n                        1118662559,\n                        581140570,\n                        639257174,\n                        602040974,\n                        583492814,\n                        7371529,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160608054,\n                        968787773,\n                        754052569,\n                        1018686262,\n                        629206123,\n                        774660554,\n                        970351891,\n                        710457638,\n                        600439768,\n                        726417854,\n                        583436672,\n                        8775581,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 30,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 30,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -383864129972597491,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        657791446,\n                        899217370,\n                        582649659,\n                        1021403074,\n                        578610617,\n                        492013155,\n                        363629919,\n                        636173671,\n                        517688865,\n                        913290846,\n                        451351891,\n                        509017666,\n                        65459558,\n                        213757125,\n                        463693029,\n                        1071109586,\n                        123065565,\n                        208727925,\n                        788362713,\n                        752153770,\n                        309124941,\n                        1070811597,\n                        619885657,\n                        854383865,\n                        513408289,\n                        496286787,\n                        60538338,\n                        572868917,\n                        489369269,\n                        43364083,\n                        884256585,\n                        510639963,\n                        783796679,\n                        761377953,\n                        641164927,\n                        607741022,\n                        1005700346,\n                        589597670,\n                        18217,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -119,\n                        111,\n                        -123,\n                        69,\n                        30,\n                        5,\n                        62,\n                        68,\n                        1,\n                        -122,\n                        -50,\n                        66,\n                        -64,\n                        92,\n                        1,\n                        66,\n                        -119,\n                        -56,\n                        24,\n                        66,\n                        -118,\n                        -44,\n                        32,\n                        68,\n                        -77,\n                        -109,\n                        119,\n                        66,\n                        -96,\n                        -43,\n                        -92,\n                        66,\n                        -111,\n                        -106,\n                        -11,\n                        66,\n                        -113,\n                        -110,\n                        -89,\n                        66,\n                        -83,\n                        101,\n                        -43,\n                        66,\n                        -88,\n                        -41,\n                        115,\n                        66,\n                        80,\n                        -94,\n                        -96,\n                        66,\n                        124,\n                        109,\n                        -52,\n                        66,\n                        -106,\n                        -20,\n                        -50,\n                        66,\n                        31,\n                        78,\n                        120,\n                        66,\n                        14,\n                        -24,\n                        -46,\n                        66,\n                        -96,\n                        59,\n                        67,\n                        66,\n                        -77,\n                        -126,\n                        87,\n                        66,\n                        -70,\n                        -105,\n                        126,\n                        66,\n                        -114,\n                        -62,\n                        10,\n                        66,\n                        -69,\n                        98,\n                        -68,\n                        66,\n                        -86,\n                        -33,\n                        -119,\n                        66,\n                        -64,\n                        -110,\n                        -4,\n                        66,\n                        -116,\n                        -71,\n                        114,\n                        66,\n                        -118,\n                        1,\n                        30,\n                        66,\n                        -117,\n                        -68,\n                        -67,\n                        66,\n                        -100,\n                        87,\n                        -71,\n                        66,\n                        7,\n                        -10,\n                        -122,\n                        66,\n                        -110,\n                        -82,\n                        -26,\n                        66,\n                        81,\n                        104,\n                        36,\n                        65,\n                        28,\n                        -36,\n                        46,\n                        66,\n                        -109,\n                        78,\n                        33,\n                        66,\n                        88,\n                        -30,\n                        6,\n                        66,\n                        -110,\n                        85,\n                        -126,\n                        66,\n                        -72,\n                        -85,\n                        -13,\n                        66,\n                        87,\n                        104,\n                        -21,\n                        66,\n                        -76,\n                        13,\n                        -119,\n                        66,\n                        83,\n                        7,\n                        47,\n                        66,\n                        -119,\n                        112,\n                        -32,\n                        66,\n                        -71,\n                        110,\n                        122,\n                        66,\n                        -71,\n                        38,\n                        -35,\n                        66,\n                        103,\n                        104,\n                        17,\n                        65,\n                        -30,\n                        52,\n                        20,\n                        66,\n                        71,\n                        -45,\n                        78,\n                        66,\n                        -71,\n                        -4,\n                        36,\n                        66,\n                        -85,\n                        -76,\n                        -109,\n                        66,\n                        -77,\n                        2,\n                        25,\n                        66,\n                        -110,\n                        126,\n                        4,\n                        66,\n                        -123,\n                        -127,\n                        -3,\n                        66,\n                        69,\n                        78,\n                        115,\n                        66,\n                        -67,\n                        47,\n                        110,\n                        66,\n                        -122,\n                        -87,\n                        60,\n                        66,\n                        76,\n                        -96,\n                        4,\n                        66,\n                        -74,\n                        -66,\n                        -72,\n                        66,\n                        -77,\n                        66,\n                        45,\n                        66,\n                        -101,\n                        32,\n                        60,\n                        66,\n                        91,\n                        126,\n                        -82,\n                        66,\n                        -62,\n                        -70,\n                        27,\n                        66,\n                        73,\n                        11,\n                        36,\n                        66,\n                        86,\n                        -103,\n                        -123,\n                        66,\n                        -67,\n                        -121,\n                        -57,\n                        66,\n                        -101,\n                        24,\n                        102,\n                        66,\n                        -91,\n                        -19,\n                        -123,\n                        66,\n                        -61,\n                        -4,\n                        116,\n                        66,\n                        -100,\n                        1,\n                        -41,\n                        66,\n                        -69,\n                        93,\n                        -44,\n                        66,\n                        -72,\n                        -27,\n                        82,\n                        66,\n                        -125,\n                        -115,\n                        -36,\n                        66,\n                        -98,\n                        112,\n                        118,\n                        66,\n                        -125,\n                        39,\n                        52,\n                        66,\n                        4,\n                        25,\n                        108,\n                        66,\n                        -92,\n                        -72,\n                        -6,\n                        66,\n                        109,\n                        -43,\n                        33,\n                        66,\n                        -118,\n                        16,\n                        -68,\n                        66,\n                        -103,\n                        -95,\n                        -77,\n                        66,\n                        -69,\n                        -112,\n                        88,\n                        66,\n                        -110,\n                        55,\n                        16,\n                        66,\n                        -86,\n                        -52,\n                        108,\n                        66,\n                        -68,\n                        -110,\n                        -95,\n                        66,\n                        94,\n                        126,\n                        -49,\n                        66,\n                        73,\n                        122,\n                        -83,\n                        66,\n                        106,\n                        7,\n                        -5,\n                        66,\n                        -65,\n                        115,\n                        -12,\n                        66,\n                        -102,\n                        -87,\n                        124,\n                        66,\n                        89,\n                        110,\n                        -49,\n                        66,\n                        -103,\n                        -15,\n                        -107,\n                        66,\n                        -63,\n                        54,\n                        42,\n                        66,\n                        -64,\n                        0,\n                        5,\n                        66,\n                        -106,\n                        -11,\n                        7,\n                        66,\n                        -69,\n                        -54,\n                        -34,\n                        66,\n                        -65,\n                        -88,\n                        36,\n                        66,\n                        -102,\n                        -51,\n                        -32,\n                        66,\n                        -122,\n                        -89,\n                        5,\n                        66,\n                        26,\n                        109,\n                        -118,\n                        66,\n                        79,\n                        -38,\n                        -83,\n                        66,\n                        -116,\n                        82,\n                        -62,\n                        66,\n                        -65,\n                        5,\n                        26,\n                        66,\n                        -98,\n                        113,\n                        -60,\n                        66,\n                        109,\n                        -37,\n                        -32,\n                        66,\n                        -109,\n                        -46,\n                        50,\n                        66,\n                        81,\n                        119,\n                        -24,\n                        66,\n                        -121,\n                        26,\n                        123,\n                        66,\n                        82,\n                        -111,\n                        -48,\n                        66,\n                        80,\n                        59,\n                        118,\n                        66,\n                        -122,\n                        2,\n                        12,\n                        66,\n                        73,\n                        112,\n                        67,\n                        66,\n                        -65,\n                        -8,\n                        127,\n                        66,\n                        -109,\n                        111,\n                        -65,\n                        66,\n                        -84,\n                        8,\n                        68,\n                        66,\n                        -104,\n                        120,\n                        -71,\n                        66,\n                        -66,\n                        -25,\n                        -61,\n                        66,\n                        87,\n                        -64,\n                        99,\n                        66,\n                        78,\n                        39,\n                        -53,\n                        66,\n                        -71,\n                        50,\n                        -13,\n                        66,\n                        -122,\n                        20,\n                        97,\n                        66,\n                        -109,\n                        123,\n                        59,\n                        66,\n                        -110,\n                        -66,\n                        -49,\n                        66,\n                        -107,\n                        116,\n                        -36,\n                        66,\n                        -70,\n                        -99,\n                        116,\n                        66,\n                        -111,\n                        -57,\n                        -71,\n                        66,\n                        -79,\n                        -21,\n                        8,\n                        66,\n                        79,\n                        -44,\n                        44,\n                        66,\n                        -127,\n                        -39,\n                        -80,\n                        66,\n                        -75,\n                        66,\n                        61,\n                        66,\n                        -95,\n                        70,\n                        5,\n                        66,\n                        -114,\n                        -109,\n                        70,\n                        66,\n                        -67,\n                        -42,\n                        104,\n                        66,\n                        -71,\n                        -103,\n                        -121,\n                        66,\n                        -68,\n                        -97,\n                        -71,\n                        66,\n                        -127,\n                        -85,\n                        -122,\n                        66,\n                        79,\n                        5,\n                        -123,\n                        66,\n                        -123,\n                        108,\n                        110,\n                        66,\n                        -61,\n                        75,\n                        79,\n                        66,\n                        -109,\n                        112,\n                        -23,\n                        66,\n                        -120,\n                        -98,\n                        59,\n                        66,\n                        68,\n                        14,\n                        26,\n                        66,\n                        -69,\n                        -26,\n                        -16,\n                        66,\n                        -111,\n                        14,\n                        -38,\n                        66,\n                        107,\n                        16,\n                        -31,\n                        66,\n                        76,\n                        -105,\n                        -110,\n                        66,\n                        -113,\n                        -8,\n                        -50,\n                        66,\n                        -68,\n                        127,\n                        74,\n                        66,\n                        -101,\n                        79,\n                        -2,\n                        66,\n                        -113,\n                        7,\n                        106,\n                        66,\n                        -103,\n                        117,\n                        88,\n                        66,\n                        76,\n                        -26,\n                        108,\n                        66,\n                        82,\n                        121,\n                        -79,\n                        66,\n                        -101,\n                        72,\n                        -117,\n                        66,\n                        102,\n                        -122,\n                        -119,\n                        66,\n                        101,\n                        116,\n                        26,\n                        66,\n                        -65,\n                        -65,\n                        60,\n                        66,\n                        81,\n                        3,\n                        -121,\n                        66,\n                        -107,\n                        -28,\n                        -111,\n                        66,\n                        -111,\n                        121,\n                        1,\n                        66,\n                        -63,\n                        -96,\n                        -110,\n                        66,\n                        -82,\n                        -67,\n                        25,\n                        66,\n                        74,\n                        -47,\n                        117,\n                        66,\n                        91,\n                        105,\n                        37,\n                        66,\n                        93,\n                        -104,\n                        -85,\n                        66,\n                        -116,\n                        -120,\n                        -83,\n                        66,\n                        -85,\n                        22,\n                        56,\n                        66,\n                        -97,\n                        -15,\n                        74,\n                        66,\n                        -116,\n                        85,\n                        71,\n                        66,\n                        -66,\n                        -62,\n                        -85,\n                        66,\n                        -76,\n                        84,\n                        -58,\n                        66,\n                        -72,\n                        -82,\n                        -47,\n                        66,\n                        -114,\n                        125,\n                        -80,\n                        66,\n                        -106,\n                        -124,\n                        114,\n                        66,\n                        -102,\n                        59,\n                        105,\n                        66,\n                        93,\n                        -74,\n                        53,\n                        66,\n                        -70,\n                        -24,\n                        -115,\n                        66,\n                        -61,\n                        -121,\n                        73,\n                        66,\n                        -90,\n                        112,\n                        -4,\n                        66,\n                        90,\n                        -73,\n                        -68,\n                        66,\n                        85,\n                        54,\n                        45,\n                        66,\n                        90,\n                        100,\n                        70,\n                        66,\n                        110,\n                        -88,\n                        -47,\n                        66,\n                        -111,\n                        59,\n                        100,\n                        66,\n                        -110,\n                        -128,\n                        -2,\n                        66,\n                        -115,\n                        67,\n                        -1,\n                        66,\n                        -63,\n                        -13,\n                        -84,\n                        66,\n                        77,\n                        81,\n                        -121,\n                        66,\n                        -111,\n                        123,\n                        98,\n                        66,\n                        94,\n                        -47,\n                        -31,\n                        66,\n                        -70,\n                        101,\n                        14,\n                        66,\n                        69,\n                        -104,\n                        -11,\n                        66,\n                        -72,\n                        -17,\n                        -58,\n                        66,\n                        81,\n                        -54,\n                        -83,\n                        66,\n                        107,\n                        -65,\n                        -103,\n                        66,\n                        -67,\n                        111,\n                        31,\n                        66,\n                        97,\n                        -77,\n                        125,\n                        66,\n                        93,\n                        -79,\n                        25,\n                        66,\n                        -65,\n                        -56,\n                        91,\n                        66,\n                        -72,\n                        125,\n                        -122,\n                        66,\n                        69,\n                        -25,\n                        122,\n                        66,\n                        78,\n                        59,\n                        55,\n                        66,\n                        117,\n                        82,\n                        79,\n                        66,\n                        -105,\n                        12,\n                        -81,\n                        66,\n                        -102,\n                        30,\n                        -94,\n                        66,\n                        -113,\n                        111,\n                        -65,\n                        66,\n                        84,\n                        -29,\n                        111,\n                        66,\n                        -61,\n                        115,\n                        108,\n                        66,\n                        -80,\n                        -100,\n                        28,\n                        66,\n                        73,\n                        -123,\n                        85,\n                        66,\n                        77,\n                        -21,\n                        46,\n                        66,\n                        -119,\n                        55,\n                        -22,\n                        66,\n                        -90,\n                        127,\n                        -97,\n                        66,\n                        95,\n                        72,\n                        -31,\n                        66,\n                        77,\n                        -38,\n                        65,\n                        66,\n                        -67,\n                        104,\n                        -117,\n                        66,\n                        -59,\n                        116,\n                        46,\n                        66,\n                        -107,\n                        5,\n                        98,\n                        66,\n                        -85,\n                        49,\n                        -24,\n                        66,\n                        85,\n                        72,\n                        39,\n                        66,\n                        -66,\n                        -5,\n                        107,\n                        66,\n                        -81,\n                        -50,\n                        53,\n                        66,\n                        89,\n                        41,\n                        -31,\n                        66,\n                        -104,\n                        -40,\n                        -83,\n                        66,\n                        91,\n                        119,\n                        5,\n                        66,\n                        81,\n                        87,\n                        -10,\n                        66,\n                        -105,\n                        57,\n                        74,\n                        66,\n                        -65,\n                        -24,\n                        -58,\n                        66,\n                        71,\n                        -2,\n                        43,\n                        66,\n                        -63,\n                        119,\n                        -1,\n                        66,\n                        -100,\n                        -14,\n                        112,\n                        66,\n                        -62,\n                        124,\n                        -124,\n                        66,\n                        -100,\n                        58,\n                        -5,\n                        66,\n                        -106,\n                        -97,\n                        -35,\n                        66,\n                        -89,\n                        26,\n                        87,\n                        66,\n                        -101,\n                        -2,\n                        -32,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 231,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1098311399,\n                        1161709991,\n                        1162061360,\n                        1141278415,\n                        631172210,\n                        625792513,\n                        774832136,\n                        1030808822,\n                        596092733,\n                        595486228,\n                        595657519,\n                        710979521,\n                        13,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1155824059,\n                        1162261382,\n                        1162261358,\n                        1119034435,\n                        1018684795,\n                        975716890,\n                        1162023080,\n                        969281689,\n                        731535917,\n                        639084055,\n                        581150528,\n                        586624567,\n                        13,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 23,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 23,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -3659552152257293168,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        756914650,\n                        655449419,\n                        241077338,\n                        773286393,\n                        317564493,\n                        922970742,\n                        310240305,\n                        661821857,\n                        1047602866,\n                        976735609,\n                        729737213,\n                        635283406,\n                        717067626,\n                        102160233,\n                        59971026,\n                        664525046,\n                        991674161,\n                        854027370,\n                        499060198,\n                        375451518,\n                        871718259,\n                        1046831037,\n                        209254327,\n                        585823959,\n                        1052567975,\n                        638646123,\n                        874210494,\n                        790981751,\n                        909854517,\n                        56866030,\n                        636464493,\n                        996341883,\n                        922606834,\n                        123508551,\n                        91168441,\n                        240574077,\n                        1029876730,\n                        458612681,\n                        209,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -91,\n                        73,\n                        -35,\n                        68,\n                        121,\n                        36,\n                        102,\n                        69,\n                        84,\n                        -10,\n                        -91,\n                        65,\n                        -21,\n                        -26,\n                        -18,\n                        66,\n                        -122,\n                        -37,\n                        -37,\n                        67,\n                        117,\n                        -38,\n                        69,\n                        66,\n                        83,\n                        -20,\n                        55,\n                        69,\n                        6,\n                        -119,\n                        -126,\n                        65,\n                        93,\n                        -84,\n                        13,\n                        66,\n                        -64,\n                        13,\n                        -124,\n                        66,\n                        -116,\n                        111,\n                        29,\n                        66,\n                        106,\n                        -79,\n                        4,\n                        66,\n                        -78,\n                        27,\n                        -61,\n                        66,\n                        -73,\n                        62,\n                        66,\n                        66,\n                        124,\n                        -106,\n                        74,\n                        66,\n                        -120,\n                        121,\n                        -28,\n                        66,\n                        -120,\n                        19,\n                        -35,\n                        66,\n                        93,\n                        -71,\n                        -102,\n                        66,\n                        127,\n                        122,\n                        -106,\n                        66,\n                        95,\n                        76,\n                        -1,\n                        66,\n                        -64,\n                        -38,\n                        52,\n                        66,\n                        -81,\n                        -115,\n                        -11,\n                        66,\n                        -111,\n                        -13,\n                        73,\n                        66,\n                        127,\n                        -108,\n                        114,\n                        66,\n                        -113,\n                        122,\n                        81,\n                        66,\n                        -84,\n                        -109,\n                        105,\n                        66,\n                        -117,\n                        -71,\n                        77,\n                        66,\n                        69,\n                        -106,\n                        -87,\n                        66,\n                        -69,\n                        -29,\n                        -22,\n                        66,\n                        -116,\n                        28,\n                        -119,\n                        66,\n                        -62,\n                        57,\n                        -34,\n                        66,\n                        127,\n                        -21,\n                        -41,\n                        66,\n                        -87,\n                        44,\n                        56,\n                        66,\n                        -79,\n                        -104,\n                        -44,\n                        64,\n                        2,\n                        60,\n                        80,\n                        66,\n                        71,\n                        -103,\n                        39,\n                        66,\n                        -100,\n                        22,\n                        31,\n                        66,\n                        124,\n                        126,\n                        -115,\n                        66,\n                        -119,\n                        39,\n                        -78,\n                        66,\n                        -118,\n                        -71,\n                        -128,\n                        66,\n                        103,\n                        -48,\n                        42,\n                        66,\n                        -103,\n                        46,\n                        4,\n                        66,\n                        -103,\n                        116,\n                        -29,\n                        65,\n                        -1,\n                        50,\n                        -106,\n                        66,\n                        -81,\n                        27,\n                        -126,\n                        66,\n                        -118,\n                        -16,\n                        -126,\n                        66,\n                        73,\n                        -28,\n                        -126,\n                        66,\n                        -111,\n                        -48,\n                        -107,\n                        66,\n                        -96,\n                        -64,\n                        124,\n                        66,\n                        -121,\n                        -15,\n                        7,\n                        66,\n                        -80,\n                        41,\n                        18,\n                        66,\n                        -76,\n                        91,\n                        -20,\n                        66,\n                        78,\n                        51,\n                        76,\n                        66,\n                        75,\n                        112,\n                        8,\n                        66,\n                        -111,\n                        -116,\n                        -111,\n                        66,\n                        100,\n                        -100,\n                        70,\n                        66,\n                        104,\n                        108,\n                        47,\n                        66,\n                        9,\n                        102,\n                        -76,\n                        66,\n                        73,\n                        97,\n                        91,\n                        66,\n                        -109,\n                        41,\n                        21,\n                        66,\n                        -104,\n                        42,\n                        108,\n                        66,\n                        76,\n                        -32,\n                        54,\n                        66,\n                        123,\n                        -123,\n                        2,\n                        66,\n                        -112,\n                        115,\n                        22,\n                        66,\n                        120,\n                        -86,\n                        -81,\n                        66,\n                        -109,\n                        -58,\n                        47,\n                        66,\n                        -68,\n                        -58,\n                        81,\n                        66,\n                        -77,\n                        -125,\n                        32,\n                        66,\n                        -114,\n                        70,\n                        12,\n                        66,\n                        114,\n                        17,\n                        -72,\n                        66,\n                        -110,\n                        17,\n                        71,\n                        66,\n                        -89,\n                        -32,\n                        -107,\n                        66,\n                        -59,\n                        -10,\n                        -23,\n                        66,\n                        9,\n                        -120,\n                        -89,\n                        66,\n                        -120,\n                        -81,\n                        -57,\n                        66,\n                        68,\n                        -49,\n                        -115,\n                        66,\n                        97,\n                        -97,\n                        -55,\n                        66,\n                        -118,\n                        12,\n                        -125,\n                        66,\n                        -109,\n                        49,\n                        -97,\n                        66,\n                        75,\n                        -84,\n                        -41,\n                        66,\n                        -109,\n                        -21,\n                        -97,\n                        66,\n                        -97,\n                        -82,\n                        63,\n                        66,\n                        -104,\n                        0,\n                        -107,\n                        66,\n                        92,\n                        24,\n                        -53,\n                        66,\n                        -60,\n                        -19,\n                        104,\n                        66,\n                        -57,\n                        -51,\n                        -28,\n                        66,\n                        123,\n                        91,\n                        -13,\n                        66,\n                        -61,\n                        111,\n                        64,\n                        66,\n                        -99,\n                        37,\n                        -21,\n                        66,\n                        -99,\n                        79,\n                        -43,\n                        66,\n                        -70,\n                        -96,\n                        -90,\n                        66,\n                        69,\n                        50,\n                        71,\n                        66,\n                        -67,\n                        84,\n                        -112,\n                        66,\n                        87,\n                        -1,\n                        -1,\n                        66,\n                        -106,\n                        123,\n                        117,\n                        66,\n                        78,\n                        -98,\n                        -8,\n                        66,\n                        -123,\n                        122,\n                        -35,\n                        66,\n                        -114,\n                        -27,\n                        -127,\n                        66,\n                        68,\n                        50,\n                        -80,\n                        66,\n                        102,\n                        -61,\n                        -115,\n                        66,\n                        -97,\n                        8,\n                        109,\n                        66,\n                        -112,\n                        44,\n                        -10,\n                        66,\n                        -65,\n                        56,\n                        76,\n                        66,\n                        111,\n                        25,\n                        -34,\n                        66,\n                        124,\n                        76,\n                        -46,\n                        66,\n                        -77,\n                        -73,\n                        42,\n                        66,\n                        -70,\n                        38,\n                        39,\n                        66,\n                        -99,\n                        69,\n                        9,\n                        66,\n                        -64,\n                        -92,\n                        -96,\n                        66,\n                        96,\n                        59,\n                        -25,\n                        66,\n                        99,\n                        -102,\n                        33,\n                        66,\n                        -83,\n                        35,\n                        -34,\n                        66,\n                        121,\n                        39,\n                        -76,\n                        66,\n                        -64,\n                        -70,\n                        -41,\n                        66,\n                        -73,\n                        -85,\n                        -26,\n                        66,\n                        80,\n                        17,\n                        -6,\n                        66,\n                        78,\n                        57,\n                        -49,\n                        66,\n                        -102,\n                        112,\n                        119,\n                        66,\n                        -70,\n                        102,\n                        93,\n                        66,\n                        110,\n                        -67,\n                        43,\n                        66,\n                        89,\n                        89,\n                        50,\n                        66,\n                        79,\n                        -83,\n                        91,\n                        66,\n                        78,\n                        8,\n                        47,\n                        66,\n                        -63,\n                        38,\n                        -49,\n                        66,\n                        94,\n                        2,\n                        -105,\n                        66,\n                        -117,\n                        -122,\n                        -111,\n                        66,\n                        -107,\n                        -17,\n                        -29,\n                        66,\n                        -99,\n                        70,\n                        19,\n                        66,\n                        74,\n                        -45,\n                        -26,\n                        66,\n                        -59,\n                        16,\n                        -81,\n                        66,\n                        -60,\n                        -17,\n                        -96,\n                        66,\n                        -109,\n                        107,\n                        92,\n                        66,\n                        98,\n                        78,\n                        62,\n                        66,\n                        -101,\n                        98,\n                        -57,\n                        66,\n                        -115,\n                        39,\n                        -53,\n                        66,\n                        -114,\n                        -82,\n                        91,\n                        66,\n                        102,\n                        57,\n                        -65,\n                        66,\n                        -93,\n                        -111,\n                        30,\n                        66,\n                        76,\n                        -45,\n                        -50,\n                        66,\n                        -74,\n                        -8,\n                        3,\n                        66,\n                        -99,\n                        13,\n                        -86,\n                        66,\n                        -108,\n                        112,\n                        8,\n                        66,\n                        -64,\n                        -11,\n                        -109,\n                        66,\n                        -100,\n                        15,\n                        -23,\n                        66,\n                        101,\n                        108,\n                        114,\n                        66,\n                        -119,\n                        13,\n                        -90,\n                        66,\n                        -62,\n                        16,\n                        -42,\n                        66,\n                        -115,\n                        -24,\n                        7,\n                        66,\n                        81,\n                        111,\n                        67,\n                        66,\n                        68,\n                        19,\n                        12,\n                        66,\n                        107,\n                        37,\n                        -39,\n                        66,\n                        91,\n                        88,\n                        -80,\n                        66,\n                        -108,\n                        64,\n                        -89,\n                        66,\n                        -111,\n                        114,\n                        40,\n                        66,\n                        -115,\n                        -28,\n                        -26,\n                        66,\n                        70,\n                        -79,\n                        60,\n                        66,\n                        -67,\n                        -53,\n                        -78,\n                        66,\n                        -120,\n                        -30,\n                        -44,\n                        66,\n                        -104,\n                        45,\n                        -89,\n                        66,\n                        -65,\n                        -27,\n                        11,\n                        66,\n                        -108,\n                        -97,\n                        -88,\n                        66,\n                        -81,\n                        -74,\n                        -36,\n                        66,\n                        -116,\n                        2,\n                        -104,\n                        66,\n                        70,\n                        -28,\n                        120,\n                        66,\n                        80,\n                        -24,\n                        40,\n                        66,\n                        -69,\n                        116,\n                        11,\n                        66,\n                        -69,\n                        53,\n                        -57,\n                        66,\n                        74,\n                        -66,\n                        -114,\n                        66,\n                        -100,\n                        117,\n                        -121,\n                        66,\n                        -104,\n                        34,\n                        107,\n                        66,\n                        -122,\n                        88,\n                        2,\n                        66,\n                        -87,\n                        -125,\n                        -38,\n                        66,\n                        73,\n                        43,\n                        -5,\n                        66,\n                        90,\n                        91,\n                        -26,\n                        66,\n                        -75,\n                        74,\n                        -82,\n                        66,\n                        75,\n                        -105,\n                        110,\n                        66,\n                        -106,\n                        53,\n                        64,\n                        66,\n                        119,\n                        -103,\n                        -16,\n                        66,\n                        -61,\n                        -20,\n                        -41,\n                        66,\n                        -116,\n                        -35,\n                        7,\n                        66,\n                        -111,\n                        -89,\n                        -52,\n                        66,\n                        97,\n                        103,\n                        94,\n                        66,\n                        121,\n                        -99,\n                        -53,\n                        66,\n                        21,\n                        -89,\n                        -128,\n                        66,\n                        -60,\n                        -14,\n                        -33,\n                        66,\n                        -59,\n                        124,\n                        86,\n                        66,\n                        86,\n                        -16,\n                        53,\n                        66,\n                        69,\n                        74,\n                        -59,\n                        66,\n                        -83,\n                        82,\n                        8,\n                        66,\n                        -101,\n                        -100,\n                        79,\n                        66,\n                        -78,\n                        -91,\n                        -120,\n                        66,\n                        -95,\n                        -94,\n                        -104,\n                        66,\n                        -57,\n                        51,\n                        -50,\n                        66,\n                        91,\n                        -13,\n                        -90,\n                        66,\n                        78,\n                        29,\n                        71,\n                        66,\n                        81,\n                        23,\n                        -72,\n                        66,\n                        54,\n                        -109,\n                        -94,\n                        66,\n                        83,\n                        127,\n                        18,\n                        66,\n                        94,\n                        -40,\n                        1,\n                        66,\n                        -60,\n                        85,\n                        55,\n                        66,\n                        -109,\n                        72,\n                        -76,\n                        66,\n                        -96,\n                        32,\n                        67,\n                        66,\n                        -105,\n                        22,\n                        -9,\n                        66,\n                        93,\n                        27,\n                        48,\n                        66,\n                        -119,\n                        -116,\n                        -53,\n                        66,\n                        -108,\n                        45,\n                        112,\n                        66,\n                        107,\n                        -58,\n                        124,\n                        66,\n                        -70,\n                        -70,\n                        46,\n                        66,\n                        -65,\n                        -69,\n                        -74,\n                        66,\n                        -67,\n                        45,\n                        100,\n                        66,\n                        126,\n                        64,\n                        1,\n                        66,\n                        70,\n                        46,\n                        -98,\n                        66,\n                        102,\n                        98,\n                        -81,\n                        66,\n                        -102,\n                        -40,\n                        -128,\n                        66,\n                        -118,\n                        6,\n                        -100,\n                        66,\n                        72,\n                        22,\n                        -127,\n                        66,\n                        -75,\n                        54,\n                        25,\n                        66,\n                        87,\n                        -14,\n                        89,\n                        66,\n                        -70,\n                        94,\n                        -54,\n                        66,\n                        -108,\n                        -53,\n                        -67,\n                        66,\n                        -62,\n                        -13,\n                        -107,\n                        66,\n                        -64,\n                        9,\n                        -46,\n                        66,\n                        -109,\n                        78,\n                        -53,\n                        66,\n                        -77,\n                        56,\n                        -5,\n                        66,\n                        76,\n                        -42,\n                        -85,\n                        66,\n                        91,\n                        62,\n                        1,\n                        66,\n                        -112,\n                        10,\n                        14,\n                        66,\n                        -106,\n                        34,\n                        26,\n                        66,\n                        -106,\n                        -102,\n                        127,\n                        66,\n                        -61,\n                        52,\n                        102,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 230,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        717437978,\n                        1119213980,\n                        1118444668,\n                        989286208,\n                        970882574,\n                        988223299,\n                        726931786,\n                        1117026662,\n                        710338696,\n                        602037004,\n                        712655080,\n                        969089990,\n                        4,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162258294,\n                        975725585,\n                        1104078506,\n                        1104332129,\n                        597802270,\n                        988276139,\n                        602574268,\n                        1117551892,\n                        975479669,\n                        626329463,\n                        1011657829,\n                        968552234,\n                        4,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 24,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 24,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -1557102390726603767,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        334547014,\n                        454423597,\n                        590996778,\n                        207284134,\n                        1064297783,\n                        340994793,\n                        911955070,\n                        337419323,\n                        183057998,\n                        757176701,\n                        531099199,\n                        1004924066,\n                        459475923,\n                        849672573,\n                        1023248702,\n                        634775925,\n                        924285275,\n                        497880063,\n                        41503966,\n                        317367421,\n                        886170957,\n                        202459491,\n                        933685293,\n                        720539343,\n                        187430363,\n                        179875177,\n                        324372037,\n                        237219754,\n                        91203445,\n                        190356075,\n                        349218209,\n                        196730430,\n                        247066034,\n                        259757171,\n                        615832889,\n                        1020566075,\n                        868218209,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        106,\n                        119,\n                        16,\n                        67,\n                        -41,\n                        2,\n                        -41,\n                        69,\n                        30,\n                        -127,\n                        72,\n                        66,\n                        -86,\n                        -67,\n                        -82,\n                        66,\n                        96,\n                        80,\n                        3,\n                        66,\n                        -112,\n                        -16,\n                        -50,\n                        66,\n                        -116,\n                        -81,\n                        95,\n                        66,\n                        73,\n                        -35,\n                        108,\n                        66,\n                        -120,\n                        -70,\n                        31,\n                        66,\n                        70,\n                        83,\n                        102,\n                        66,\n                        125,\n                        3,\n                        124,\n                        65,\n                        -98,\n                        71,\n                        -4,\n                        66,\n                        -72,\n                        111,\n                        75,\n                        66,\n                        -99,\n                        -123,\n                        -22,\n                        65,\n                        -51,\n                        113,\n                        -47,\n                        66,\n                        87,\n                        8,\n                        -67,\n                        66,\n                        77,\n                        -107,\n                        -119,\n                        66,\n                        -122,\n                        -66,\n                        -30,\n                        66,\n                        -64,\n                        -80,\n                        -100,\n                        66,\n                        -122,\n                        126,\n                        77,\n                        66,\n                        75,\n                        86,\n                        8,\n                        66,\n                        -93,\n                        118,\n                        -125,\n                        66,\n                        -96,\n                        -95,\n                        85,\n                        66,\n                        -77,\n                        -115,\n                        -64,\n                        65,\n                        -101,\n                        -72,\n                        91,\n                        66,\n                        -126,\n                        50,\n                        83,\n                        66,\n                        -110,\n                        -106,\n                        -110,\n                        66,\n                        32,\n                        -121,\n                        -85,\n                        66,\n                        -71,\n                        51,\n                        -51,\n                        66,\n                        124,\n                        -46,\n                        65,\n                        66,\n                        -113,\n                        -5,\n                        16,\n                        66,\n                        116,\n                        0,\n                        120,\n                        66,\n                        -68,\n                        -100,\n                        -114,\n                        66,\n                        -72,\n                        16,\n                        83,\n                        66,\n                        -108,\n                        3,\n                        120,\n                        66,\n                        -77,\n                        -58,\n                        124,\n                        66,\n                        -77,\n                        -56,\n                        23,\n                        66,\n                        87,\n                        -17,\n                        -19,\n                        66,\n                        -124,\n                        110,\n                        -78,\n                        66,\n                        -61,\n                        -53,\n                        104,\n                        66,\n                        -115,\n                        82,\n                        120,\n                        66,\n                        96,\n                        84,\n                        -118,\n                        66,\n                        97,\n                        -19,\n                        57,\n                        66,\n                        -126,\n                        -100,\n                        73,\n                        66,\n                        71,\n                        17,\n                        -50,\n                        66,\n                        -111,\n                        117,\n                        -25,\n                        66,\n                        -105,\n                        -116,\n                        -25,\n                        66,\n                        -78,\n                        55,\n                        13,\n                        66,\n                        -71,\n                        -24,\n                        21,\n                        66,\n                        -76,\n                        111,\n                        -91,\n                        66,\n                        87,\n                        -68,\n                        124,\n                        66,\n                        -60,\n                        -114,\n                        -55,\n                        66,\n                        -69,\n                        -90,\n                        104,\n                        66,\n                        -88,\n                        54,\n                        109,\n                        66,\n                        -117,\n                        -81,\n                        -5,\n                        66,\n                        78,\n                        -104,\n                        13,\n                        66,\n                        -78,\n                        -37,\n                        -81,\n                        66,\n                        109,\n                        59,\n                        -105,\n                        66,\n                        -75,\n                        77,\n                        61,\n                        66,\n                        -73,\n                        -16,\n                        -2,\n                        66,\n                        119,\n                        31,\n                        -51,\n                        66,\n                        -115,\n                        97,\n                        106,\n                        66,\n                        122,\n                        83,\n                        -28,\n                        66,\n                        84,\n                        126,\n                        43,\n                        66,\n                        -93,\n                        -9,\n                        28,\n                        66,\n                        59,\n                        -82,\n                        -85,\n                        66,\n                        -83,\n                        92,\n                        74,\n                        66,\n                        -89,\n                        -54,\n                        126,\n                        66,\n                        79,\n                        -118,\n                        47,\n                        66,\n                        102,\n                        -114,\n                        87,\n                        66,\n                        -78,\n                        58,\n                        79,\n                        66,\n                        -111,\n                        -102,\n                        113,\n                        66,\n                        111,\n                        -24,\n                        100,\n                        66,\n                        -75,\n                        -123,\n                        -113,\n                        66,\n                        -74,\n                        -105,\n                        -86,\n                        66,\n                        -70,\n                        -7,\n                        -20,\n                        66,\n                        -77,\n                        63,\n                        19,\n                        66,\n                        -127,\n                        123,\n                        11,\n                        66,\n                        -103,\n                        -81,\n                        1,\n                        66,\n                        117,\n                        -37,\n                        -107,\n                        66,\n                        -61,\n                        110,\n                        -109,\n                        66,\n                        -112,\n                        -20,\n                        -123,\n                        66,\n                        -92,\n                        117,\n                        45,\n                        66,\n                        -117,\n                        -18,\n                        87,\n                        66,\n                        -69,\n                        -98,\n                        73,\n                        66,\n                        -117,\n                        -78,\n                        16,\n                        66,\n                        -59,\n                        -123,\n                        -84,\n                        66,\n                        75,\n                        114,\n                        35,\n                        66,\n                        -125,\n                        -73,\n                        -75,\n                        66,\n                        -80,\n                        -28,\n                        111,\n                        66,\n                        -100,\n                        -97,\n                        -22,\n                        66,\n                        -110,\n                        115,\n                        103,\n                        66,\n                        -59,\n                        91,\n                        27,\n                        66,\n                        81,\n                        73,\n                        -100,\n                        66,\n                        -120,\n                        6,\n                        -53,\n                        66,\n                        -69,\n                        -114,\n                        103,\n                        66,\n                        98,\n                        -100,\n                        15,\n                        66,\n                        -68,\n                        75,\n                        80,\n                        66,\n                        -61,\n                        97,\n                        8,\n                        66,\n                        -75,\n                        -20,\n                        54,\n                        66,\n                        -120,\n                        38,\n                        -95,\n                        66,\n                        77,\n                        -115,\n                        20,\n                        66,\n                        72,\n                        121,\n                        -115,\n                        66,\n                        78,\n                        -18,\n                        -7,\n                        66,\n                        -66,\n                        -24,\n                        -34,\n                        66,\n                        -79,\n                        85,\n                        -91,\n                        66,\n                        -67,\n                        34,\n                        -47,\n                        66,\n                        -71,\n                        -97,\n                        7,\n                        66,\n                        -86,\n                        15,\n                        -126,\n                        66,\n                        -67,\n                        -111,\n                        43,\n                        66,\n                        -124,\n                        82,\n                        -6,\n                        66,\n                        -80,\n                        72,\n                        120,\n                        66,\n                        78,\n                        61,\n                        -11,\n                        66,\n                        -113,\n                        9,\n                        73,\n                        66,\n                        -107,\n                        -105,\n                        0,\n                        66,\n                        74,\n                        -107,\n                        65,\n                        66,\n                        -100,\n                        -5,\n                        -63,\n                        66,\n                        -111,\n                        1,\n                        -65,\n                        66,\n                        -102,\n                        71,\n                        -19,\n                        66,\n                        -127,\n                        109,\n                        80,\n                        66,\n                        -103,\n                        55,\n                        110,\n                        66,\n                        -90,\n                        -48,\n                        35,\n                        66,\n                        -104,\n                        -36,\n                        46,\n                        66,\n                        93,\n                        -28,\n                        53,\n                        66,\n                        -103,\n                        54,\n                        -61,\n                        66,\n                        -62,\n                        71,\n                        31,\n                        66,\n                        102,\n                        111,\n                        46,\n                        66,\n                        70,\n                        -87,\n                        -36,\n                        66,\n                        -60,\n                        89,\n                        -90,\n                        66,\n                        -57,\n                        -19,\n                        -12,\n                        66,\n                        -99,\n                        25,\n                        -52,\n                        66,\n                        -73,\n                        -61,\n                        -71,\n                        66,\n                        -127,\n                        -128,\n                        -114,\n                        66,\n                        -110,\n                        105,\n                        84,\n                        66,\n                        -65,\n                        29,\n                        -31,\n                        66,\n                        -100,\n                        55,\n                        14,\n                        66,\n                        -86,\n                        -38,\n                        -7,\n                        66,\n                        81,\n                        71,\n                        -122,\n                        66,\n                        73,\n                        92,\n                        -56,\n                        66,\n                        -89,\n                        -5,\n                        -38,\n                        65,\n                        -96,\n                        -96,\n                        -50,\n                        66,\n                        -90,\n                        29,\n                        -59,\n                        66,\n                        74,\n                        -85,\n                        -47,\n                        66,\n                        -109,\n                        3,\n                        -13,\n                        66,\n                        76,\n                        99,\n                        47,\n                        66,\n                        -61,\n                        17,\n                        111,\n                        66,\n                        -96,\n                        -65,\n                        -69,\n                        66,\n                        83,\n                        118,\n                        0,\n                        66,\n                        -66,\n                        83,\n                        30,\n                        66,\n                        -106,\n                        -48,\n                        -54,\n                        66,\n                        -116,\n                        83,\n                        43,\n                        66,\n                        103,\n                        -5,\n                        33,\n                        65,\n                        -11,\n                        -15,\n                        -113,\n                        66,\n                        -121,\n                        117,\n                        -19,\n                        66,\n                        82,\n                        -62,\n                        -13,\n                        66,\n                        -117,\n                        -124,\n                        60,\n                        66,\n                        -106,\n                        -99,\n                        -47,\n                        66,\n                        -59,\n                        -62,\n                        32,\n                        66,\n                        -113,\n                        102,\n                        89,\n                        66,\n                        114,\n                        53,\n                        -126,\n                        66,\n                        -97,\n                        -64,\n                        -22,\n                        66,\n                        85,\n                        84,\n                        50,\n                        66,\n                        -61,\n                        70,\n                        -6,\n                        66,\n                        -113,\n                        6,\n                        -53,\n                        66,\n                        96,\n                        -101,\n                        -23,\n                        66,\n                        85,\n                        103,\n                        19,\n                        66,\n                        -68,\n                        -25,\n                        67,\n                        66,\n                        85,\n                        -47,\n                        74,\n                        66,\n                        -114,\n                        124,\n                        95,\n                        66,\n                        89,\n                        -23,\n                        -27,\n                        66,\n                        -111,\n                        -90,\n                        20,\n                        66,\n                        85,\n                        -104,\n                        2,\n                        66,\n                        -77,\n                        90,\n                        99,\n                        66,\n                        -79,\n                        -107,\n                        -7,\n                        66,\n                        98,\n                        124,\n                        111,\n                        66,\n                        86,\n                        56,\n                        -10,\n                        66,\n                        -72,\n                        -104,\n                        57,\n                        66,\n                        -111,\n                        -65,\n                        -43,\n                        66,\n                        -113,\n                        35,\n                        -104,\n                        66,\n                        -99,\n                        102,\n                        -102,\n                        66,\n                        -118,\n                        71,\n                        -40,\n                        66,\n                        -109,\n                        -45,\n                        18,\n                        66,\n                        -103,\n                        -112,\n                        71,\n                        66,\n                        -119,\n                        58,\n                        60,\n                        66,\n                        -100,\n                        55,\n                        -33,\n                        66,\n                        -64,\n                        -52,\n                        -19,\n                        66,\n                        -67,\n                        -48,\n                        121,\n                        66,\n                        -114,\n                        -113,\n                        112,\n                        66,\n                        83,\n                        64,\n                        -8,\n                        66,\n                        119,\n                        12,\n                        64,\n                        66,\n                        122,\n                        25,\n                        32,\n                        66,\n                        -91,\n                        33,\n                        -72,\n                        66,\n                        -68,\n                        -24,\n                        37,\n                        66,\n                        -95,\n                        -76,\n                        122,\n                        66,\n                        80,\n                        110,\n                        60,\n                        66,\n                        73,\n                        92,\n                        -48,\n                        66,\n                        77,\n                        -11,\n                        106,\n                        66,\n                        86,\n                        93,\n                        -57,\n                        66,\n                        112,\n                        69,\n                        16,\n                        66,\n                        81,\n                        122,\n                        28,\n                        66,\n                        -117,\n                        -57,\n                        -67,\n                        66,\n                        69,\n                        -124,\n                        -37,\n                        66,\n                        82,\n                        28,\n                        -55,\n                        66,\n                        91,\n                        -74,\n                        99,\n                        66,\n                        -111,\n                        15,\n                        -113,\n                        66,\n                        -104,\n                        43,\n                        115,\n                        66,\n                        82,\n                        2,\n                        74,\n                        66,\n                        -117,\n                        -66,\n                        -97,\n                        66,\n                        110,\n                        1,\n                        39,\n                        66,\n                        -78,\n                        -119,\n                        -32,\n                        66,\n                        88,\n                        -13,\n                        34,\n                        66,\n                        -106,\n                        12,\n                        105,\n                        66,\n                        -60,\n                        99,\n                        -51,\n                        66,\n                        -112,\n                        -78,\n                        -118,\n                        66,\n                        -104,\n                        107,\n                        124,\n                        66,\n                        -74,\n                        59,\n                        45,\n                        66,\n                        -98,\n                        28,\n                        12,\n                        66,\n                        92,\n                        99,\n                        -38,\n                        66,\n                        -122,\n                        -31,\n                        8,\n                        66,\n                        68,\n                        63,\n                        53,\n                        66,\n                        103,\n                        119,\n                        94,\n                        66,\n                        -108,\n                        54,\n                        31,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 222,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        985046669,\n                        1142949442,\n                        1119015724,\n                        1157213840,\n                        773164723,\n                        987705700,\n                        597867872,\n                        581689228,\n                        1157399005,\n                        1119154712,\n                        973334957,\n                        803749,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1033114486,\n                        1142420885,\n                        1102739963,\n                        1142361671,\n                        758300587,\n                        975665623,\n                        712458250,\n                        581862005,\n                        1117610944,\n                        1104097918,\n                        585916618,\n                        798136,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 32,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 32,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -6409199319416524683,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        1021646933,\n                        647268721,\n                        716742249,\n                        116602150,\n                        460414571,\n                        41242661,\n                        103863414,\n                        800447167,\n                        883388197,\n                        932936781,\n                        451910127,\n                        1061072235,\n                        169843189,\n                        116108163,\n                        987602797,\n                        191433166,\n                        880127831,\n                        194078438,\n                        1001698749,\n                        258674287,\n                        317697218,\n                        576575781,\n                        228028153,\n                        437316001,\n                        480413485,\n                        249747785,\n                        64201949,\n                        523103735,\n                        633141329,\n                        204699517,\n                        733193805,\n                        208571447,\n                        499115686,\n                        974299113,\n                        489373154,\n                        1033721647,\n                        257926714,\n                        27354329,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        67,\n                        -82,\n                        -91,\n                        100,\n                        68,\n                        90,\n                        28,\n                        55,\n                        67,\n                        120,\n                        94,\n                        -14,\n                        67,\n                        -43,\n                        -13,\n                        -27,\n                        67,\n                        -116,\n                        -125,\n                        -100,\n                        66,\n                        -83,\n                        -122,\n                        -36,\n                        66,\n                        -122,\n                        -92,\n                        41,\n                        65,\n                        -124,\n                        -104,\n                        -92,\n                        66,\n                        -69,\n                        22,\n                        -26,\n                        66,\n                        -114,\n                        10,\n                        -72,\n                        65,\n                        -21,\n                        -30,\n                        -124,\n                        66,\n                        -115,\n                        37,\n                        3,\n                        66,\n                        -99,\n                        -87,\n                        114,\n                        66,\n                        69,\n                        -63,\n                        -99,\n                        66,\n                        99,\n                        119,\n                        -122,\n                        66,\n                        -95,\n                        10,\n                        -75,\n                        66,\n                        99,\n                        36,\n                        11,\n                        66,\n                        126,\n                        -8,\n                        25,\n                        66,\n                        -69,\n                        -58,\n                        91,\n                        66,\n                        -110,\n                        -31,\n                        41,\n                        66,\n                        -106,\n                        -126,\n                        51,\n                        66,\n                        -68,\n                        59,\n                        119,\n                        66,\n                        -128,\n                        26,\n                        -61,\n                        66,\n                        96,\n                        53,\n                        -93,\n                        66,\n                        97,\n                        -86,\n                        88,\n                        66,\n                        93,\n                        69,\n                        -124,\n                        66,\n                        120,\n                        -105,\n                        34,\n                        66,\n                        -70,\n                        111,\n                        -87,\n                        66,\n                        111,\n                        100,\n                        -122,\n                        66,\n                        -99,\n                        -4,\n                        47,\n                        66,\n                        -123,\n                        122,\n                        -38,\n                        66,\n                        -100,\n                        -57,\n                        28,\n                        65,\n                        84,\n                        -116,\n                        -116,\n                        66,\n                        -95,\n                        -28,\n                        -96,\n                        65,\n                        10,\n                        -28,\n                        -35,\n                        66,\n                        -107,\n                        -30,\n                        13,\n                        66,\n                        -73,\n                        -69,\n                        117,\n                        66,\n                        87,\n                        -87,\n                        -60,\n                        66,\n                        -117,\n                        -77,\n                        109,\n                        66,\n                        -108,\n                        81,\n                        -43,\n                        65,\n                        32,\n                        72,\n                        -117,\n                        66,\n                        76,\n                        -57,\n                        -107,\n                        66,\n                        116,\n                        6,\n                        -7,\n                        66,\n                        -105,\n                        15,\n                        -4,\n                        66,\n                        -62,\n                        -99,\n                        124,\n                        66,\n                        71,\n                        -43,\n                        -43,\n                        66,\n                        79,\n                        -81,\n                        -22,\n                        65,\n                        -84,\n                        -40,\n                        38,\n                        66,\n                        -110,\n                        -74,\n                        -88,\n                        66,\n                        -93,\n                        -49,\n                        -26,\n                        66,\n                        115,\n                        -72,\n                        11,\n                        66,\n                        -95,\n                        -5,\n                        -128,\n                        66,\n                        -63,\n                        79,\n                        -41,\n                        66,\n                        -84,\n                        -58,\n                        96,\n                        66,\n                        -121,\n                        33,\n                        -78,\n                        66,\n                        -81,\n                        125,\n                        18,\n                        66,\n                        79,\n                        68,\n                        57,\n                        66,\n                        -69,\n                        24,\n                        -91,\n                        66,\n                        -115,\n                        61,\n                        111,\n                        66,\n                        -126,\n                        77,\n                        17,\n                        66,\n                        81,\n                        124,\n                        76,\n                        66,\n                        104,\n                        -99,\n                        87,\n                        66,\n                        -68,\n                        -56,\n                        89,\n                        66,\n                        118,\n                        -74,\n                        52,\n                        66,\n                        -72,\n                        -32,\n                        4,\n                        66,\n                        -98,\n                        115,\n                        -77,\n                        66,\n                        98,\n                        -40,\n                        127,\n                        66,\n                        -113,\n                        -3,\n                        -97,\n                        66,\n                        -128,\n                        -66,\n                        -89,\n                        66,\n                        -107,\n                        31,\n                        -113,\n                        66,\n                        123,\n                        56,\n                        -43,\n                        66,\n                        104,\n                        35,\n                        38,\n                        66,\n                        -109,\n                        92,\n                        -126,\n                        66,\n                        88,\n                        -76,\n                        69,\n                        66,\n                        -80,\n                        16,\n                        -93,\n                        66,\n                        83,\n                        40,\n                        123,\n                        66,\n                        -108,\n                        -104,\n                        30,\n                        66,\n                        119,\n                        -14,\n                        25,\n                        66,\n                        90,\n                        112,\n                        11,\n                        64,\n                        -109,\n                        94,\n                        88,\n                        66,\n                        -63,\n                        -62,\n                        29,\n                        66,\n                        84,\n                        9,\n                        -87,\n                        66,\n                        -89,\n                        99,\n                        120,\n                        66,\n                        117,\n                        100,\n                        21,\n                        66,\n                        -111,\n                        -81,\n                        -67,\n                        66,\n                        88,\n                        12,\n                        1,\n                        66,\n                        110,\n                        2,\n                        88,\n                        66,\n                        115,\n                        -103,\n                        13,\n                        66,\n                        123,\n                        50,\n                        -6,\n                        66,\n                        -118,\n                        68,\n                        21,\n                        66,\n                        -67,\n                        31,\n                        6,\n                        66,\n                        -62,\n                        34,\n                        123,\n                        66,\n                        -66,\n                        -85,\n                        10,\n                        66,\n                        -78,\n                        -60,\n                        55,\n                        66,\n                        -70,\n                        70,\n                        90,\n                        66,\n                        -113,\n                        -32,\n                        111,\n                        66,\n                        92,\n                        67,\n                        -113,\n                        66,\n                        -73,\n                        -7,\n                        -107,\n                        66,\n                        115,\n                        -76,\n                        113,\n                        66,\n                        73,\n                        109,\n                        37,\n                        66,\n                        95,\n                        37,\n                        78,\n                        66,\n                        -74,\n                        -71,\n                        90,\n                        66,\n                        -61,\n                        -68,\n                        100,\n                        66,\n                        68,\n                        94,\n                        -88,\n                        66,\n                        -102,\n                        -110,\n                        -88,\n                        66,\n                        -74,\n                        49,\n                        -110,\n                        66,\n                        -121,\n                        125,\n                        93,\n                        66,\n                        -109,\n                        -2,\n                        60,\n                        66,\n                        -96,\n                        -21,\n                        -107,\n                        66,\n                        -101,\n                        -28,\n                        -39,\n                        66,\n                        -105,\n                        -82,\n                        3,\n                        66,\n                        -120,\n                        127,\n                        118,\n                        66,\n                        74,\n                        89,\n                        28,\n                        66,\n                        -106,\n                        -80,\n                        -96,\n                        66,\n                        97,\n                        -101,\n                        -23,\n                        66,\n                        78,\n                        115,\n                        -108,\n                        66,\n                        80,\n                        -20,\n                        -102,\n                        66,\n                        -83,\n                        -122,\n                        120,\n                        66,\n                        -87,\n                        -110,\n                        97,\n                        66,\n                        101,\n                        -121,\n                        -54,\n                        66,\n                        -78,\n                        -108,\n                        -95,\n                        66,\n                        -94,\n                        -98,\n                        11,\n                        66,\n                        69,\n                        -88,\n                        -107,\n                        63,\n                        -69,\n                        -59,\n                        4,\n                        66,\n                        -83,\n                        -16,\n                        72,\n                        66,\n                        -115,\n                        -52,\n                        -11,\n                        66,\n                        -89,\n                        105,\n                        -13,\n                        66,\n                        -95,\n                        116,\n                        71,\n                        66,\n                        -67,\n                        -91,\n                        -65,\n                        66,\n                        84,\n                        54,\n                        -77,\n                        66,\n                        -103,\n                        -106,\n                        -90,\n                        66,\n                        -84,\n                        101,\n                        6,\n                        66,\n                        -102,\n                        42,\n                        4,\n                        66,\n                        101,\n                        -56,\n                        71,\n                        66,\n                        78,\n                        41,\n                        38,\n                        66,\n                        -81,\n                        -88,\n                        36,\n                        66,\n                        -110,\n                        -51,\n                        112,\n                        66,\n                        -72,\n                        -87,\n                        -86,\n                        66,\n                        -112,\n                        -86,\n                        -107,\n                        66,\n                        -89,\n                        93,\n                        -14,\n                        66,\n                        -71,\n                        -47,\n                        -27,\n                        66,\n                        -99,\n                        98,\n                        -38,\n                        66,\n                        -103,\n                        -100,\n                        -70,\n                        66,\n                        -102,\n                        -84,\n                        -11,\n                        66,\n                        -88,\n                        -6,\n                        -12,\n                        66,\n                        -88,\n                        83,\n                        80,\n                        66,\n                        -103,\n                        -90,\n                        -27,\n                        66,\n                        -101,\n                        -121,\n                        -49,\n                        66,\n                        -65,\n                        117,\n                        -74,\n                        66,\n                        -75,\n                        107,\n                        11,\n                        66,\n                        -108,\n                        124,\n                        -16,\n                        66,\n                        -72,\n                        -43,\n                        88,\n                        66,\n                        -65,\n                        119,\n                        -34,\n                        66,\n                        -112,\n                        8,\n                        55,\n                        66,\n                        -73,\n                        59,\n                        100,\n                        66,\n                        74,\n                        53,\n                        69,\n                        66,\n                        -103,\n                        31,\n                        112,\n                        66,\n                        -64,\n                        12,\n                        38,\n                        66,\n                        -121,\n                        -112,\n                        -33,\n                        66,\n                        83,\n                        -99,\n                        -80,\n                        66,\n                        -99,\n                        -110,\n                        63,\n                        66,\n                        -99,\n                        -77,\n                        92,\n                        66,\n                        81,\n                        54,\n                        -46,\n                        66,\n                        87,\n                        -7,\n                        -84,\n                        66,\n                        77,\n                        -88,\n                        62,\n                        66,\n                        103,\n                        -127,\n                        -3,\n                        66,\n                        -64,\n                        -106,\n                        16,\n                        66,\n                        69,\n                        42,\n                        68,\n                        66,\n                        -98,\n                        105,\n                        -98,\n                        66,\n                        -60,\n                        83,\n                        -96,\n                        66,\n                        -59,\n                        -125,\n                        106,\n                        66,\n                        -100,\n                        -18,\n                        -48,\n                        66,\n                        79,\n                        117,\n                        -19,\n                        66,\n                        -64,\n                        36,\n                        -5,\n                        66,\n                        -111,\n                        -28,\n                        87,\n                        66,\n                        86,\n                        126,\n                        6,\n                        66,\n                        -92,\n                        70,\n                        119,\n                        66,\n                        -70,\n                        61,\n                        -73,\n                        66,\n                        77,\n                        42,\n                        48,\n                        66,\n                        -64,\n                        -67,\n                        126,\n                        66,\n                        -105,\n                        -46,\n                        -41,\n                        66,\n                        -71,\n                        2,\n                        44,\n                        66,\n                        -96,\n                        -38,\n                        10,\n                        66,\n                        92,\n                        13,\n                        -5,\n                        66,\n                        79,\n                        -62,\n                        -109,\n                        66,\n                        -114,\n                        113,\n                        -104,\n                        66,\n                        78,\n                        101,\n                        37,\n                        66,\n                        -128,\n                        -32,\n                        97,\n                        66,\n                        93,\n                        -47,\n                        -108,\n                        66,\n                        -98,\n                        -102,\n                        12,\n                        66,\n                        -73,\n                        24,\n                        -42,\n                        66,\n                        -79,\n                        -36,\n                        124,\n                        66,\n                        -67,\n                        -88,\n                        80,\n                        66,\n                        -114,\n                        -104,\n                        -38,\n                        66,\n                        -101,\n                        19,\n                        46,\n                        66,\n                        125,\n                        4,\n                        -81,\n                        66,\n                        75,\n                        46,\n                        90,\n                        66,\n                        -68,\n                        -41,\n                        -94,\n                        66,\n                        -106,\n                        -109,\n                        -115,\n                        66,\n                        79,\n                        -87,\n                        -21,\n                        66,\n                        84,\n                        -111,\n                        -73,\n                        66,\n                        -97,\n                        106,\n                        120,\n                        66,\n                        -108,\n                        27,\n                        -80,\n                        66,\n                        -123,\n                        -62,\n                        67,\n                        66,\n                        -61,\n                        69,\n                        -42,\n                        66,\n                        109,\n                        -32,\n                        40,\n                        66,\n                        93,\n                        -39,\n                        -122,\n                        66,\n                        -61,\n                        -122,\n                        -65,\n                        66,\n                        -62,\n                        -32,\n                        -47,\n                        66,\n                        -64,\n                        -17,\n                        72,\n                        66,\n                        92,\n                        -124,\n                        26,\n                        66,\n                        -119,\n                        -118,\n                        116,\n                        66,\n                        -111,\n                        -8,\n                        28,\n                        66,\n                        -59,\n                        68,\n                        115,\n                        66,\n                        -114,\n                        72,\n                        -69,\n                        66,\n                        -69,\n                        55,\n                        127,\n                        66,\n                        -73,\n                        27,\n                        -40,\n                        66,\n                        -115,\n                        -59,\n                        -3,\n                        66,\n                        -98,\n                        96,\n                        52,\n                        66,\n                        70,\n                        -107,\n                        1,\n                        66,\n                        -114,\n                        -73,\n                        -104,\n                        66,\n                        71,\n                        -21,\n                        54,\n                        66,\n                        -110,\n                        46,\n                        -119,\n                        66,\n                        -60,\n                        -125,\n                        13,\n                        66,\n                        -118,\n                        108,\n                        -42,\n                        66,\n                        -67,\n                        59,\n                        -69,\n                        66,\n                        -63,\n                        -110,\n                        -63,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 227,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162176173,\n                        1099306471,\n                        974102908,\n                        1161730016,\n                        581376037,\n                        602456443,\n                        731705777,\n                        730120136,\n                        588297802,\n                        1112579032,\n                        629513977,\n                        237465553,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162018588,\n                        1117027489,\n                        989520461,\n                        1027747025,\n                        588108236,\n                        1112815571,\n                        581396173,\n                        772451791,\n                        988473614,\n                        1025956354,\n                        588049028,\n                        194419561,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 27,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 27,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -6381589943519458827,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        352281810,\n                        446095089,\n                        1012246111,\n                        849907043,\n                        782611623,\n                        763426931,\n                        992660639,\n                        215657078,\n                        217409703,\n                        623679705,\n                        241085029,\n                        364113786,\n                        363982511,\n                        92009673,\n                        56743473,\n                        790423497,\n                        653489239,\n                        863761909,\n                        36568358,\n                        209016702,\n                        385558117,\n                        329419439,\n                        107947858,\n                        472772570,\n                        656668410,\n                        358792059,\n                        1014472255,\n                        321861093,\n                        379960866,\n                        305114406,\n                        706424673,\n                        190769787,\n                        441896547,\n                        120269177,\n                        334486061,\n                        855607679,\n                        259879998,\n                        639696419,\n                        14,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -27,\n                        -117,\n                        -90,\n                        69,\n                        97,\n                        -12,\n                        50,\n                        67,\n                        -57,\n                        -21,\n                        87,\n                        68,\n                        -102,\n                        77,\n                        4,\n                        65,\n                        25,\n                        -115,\n                        120,\n                        67,\n                        8,\n                        11,\n                        -116,\n                        66,\n                        -116,\n                        94,\n                        116,\n                        66,\n                        121,\n                        76,\n                        -37,\n                        66,\n                        -117,\n                        19,\n                        78,\n                        66,\n                        -126,\n                        98,\n                        -52,\n                        66,\n                        -117,\n                        -81,\n                        -40,\n                        66,\n                        -111,\n                        -85,\n                        88,\n                        66,\n                        -126,\n                        -29,\n                        -69,\n                        66,\n                        -88,\n                        -7,\n                        -57,\n                        66,\n                        -98,\n                        11,\n                        74,\n                        65,\n                        -37,\n                        -93,\n                        -4,\n                        66,\n                        96,\n                        42,\n                        -108,\n                        66,\n                        -81,\n                        -79,\n                        -108,\n                        65,\n                        -18,\n                        31,\n                        48,\n                        66,\n                        -126,\n                        101,\n                        -124,\n                        64,\n                        -26,\n                        20,\n                        106,\n                        66,\n                        -120,\n                        -85,\n                        42,\n                        66,\n                        -66,\n                        -20,\n                        -61,\n                        66,\n                        -89,\n                        97,\n                        -40,\n                        66,\n                        76,\n                        40,\n                        52,\n                        66,\n                        -104,\n                        105,\n                        71,\n                        66,\n                        -103,\n                        -12,\n                        -36,\n                        66,\n                        -126,\n                        -59,\n                        39,\n                        66,\n                        -60,\n                        -91,\n                        106,\n                        66,\n                        121,\n                        -62,\n                        119,\n                        66,\n                        82,\n                        22,\n                        9,\n                        66,\n                        80,\n                        9,\n                        127,\n                        66,\n                        -67,\n                        -96,\n                        28,\n                        66,\n                        -95,\n                        -80,\n                        -43,\n                        64,\n                        -17,\n                        81,\n                        57,\n                        66,\n                        -62,\n                        -59,\n                        -11,\n                        66,\n                        82,\n                        -47,\n                        -96,\n                        65,\n                        17,\n                        1,\n                        108,\n                        66,\n                        101,\n                        -97,\n                        -41,\n                        66,\n                        -98,\n                        120,\n                        -45,\n                        66,\n                        -74,\n                        -71,\n                        36,\n                        66,\n                        -117,\n                        100,\n                        -84,\n                        66,\n                        -71,\n                        -104,\n                        44,\n                        66,\n                        3,\n                        5,\n                        89,\n                        66,\n                        -74,\n                        108,\n                        -89,\n                        66,\n                        -103,\n                        117,\n                        20,\n                        66,\n                        -108,\n                        70,\n                        -21,\n                        66,\n                        -66,\n                        80,\n                        -44,\n                        66,\n                        68,\n                        25,\n                        -51,\n                        66,\n                        -110,\n                        -115,\n                        -23,\n                        66,\n                        -64,\n                        68,\n                        94,\n                        66,\n                        -66,\n                        -120,\n                        -121,\n                        66,\n                        109,\n                        -85,\n                        125,\n                        66,\n                        -68,\n                        -28,\n                        75,\n                        66,\n                        -88,\n                        -9,\n                        -106,\n                        66,\n                        -60,\n                        -127,\n                        58,\n                        66,\n                        -62,\n                        -62,\n                        -80,\n                        66,\n                        -97,\n                        -106,\n                        4,\n                        66,\n                        -62,\n                        98,\n                        -73,\n                        66,\n                        -79,\n                        115,\n                        -97,\n                        66,\n                        -101,\n                        122,\n                        -20,\n                        66,\n                        112,\n                        -1,\n                        -33,\n                        66,\n                        -73,\n                        24,\n                        -74,\n                        66,\n                        -96,\n                        125,\n                        81,\n                        66,\n                        -109,\n                        -28,\n                        -41,\n                        66,\n                        86,\n                        -55,\n                        -66,\n                        66,\n                        -76,\n                        40,\n                        -39,\n                        66,\n                        111,\n                        41,\n                        50,\n                        66,\n                        104,\n                        14,\n                        91,\n                        66,\n                        98,\n                        62,\n                        49,\n                        66,\n                        124,\n                        45,\n                        -39,\n                        66,\n                        -70,\n                        15,\n                        4,\n                        66,\n                        77,\n                        41,\n                        -110,\n                        66,\n                        -93,\n                        -6,\n                        20,\n                        66,\n                        99,\n                        -34,\n                        1,\n                        66,\n                        100,\n                        3,\n                        -51,\n                        66,\n                        77,\n                        -116,\n                        56,\n                        66,\n                        -75,\n                        30,\n                        45,\n                        66,\n                        -126,\n                        -62,\n                        35,\n                        66,\n                        -80,\n                        -60,\n                        8,\n                        66,\n                        -124,\n                        -109,\n                        -7,\n                        66,\n                        86,\n                        112,\n                        -118,\n                        66,\n                        78,\n                        33,\n                        -33,\n                        66,\n                        -68,\n                        -124,\n                        -95,\n                        66,\n                        -88,\n                        26,\n                        26,\n                        66,\n                        -104,\n                        77,\n                        38,\n                        66,\n                        -116,\n                        23,\n                        46,\n                        66,\n                        78,\n                        26,\n                        36,\n                        66,\n                        -73,\n                        -34,\n                        -20,\n                        66,\n                        -126,\n                        44,\n                        -102,\n                        66,\n                        -86,\n                        93,\n                        -39,\n                        66,\n                        -60,\n                        93,\n                        -47,\n                        66,\n                        -116,\n                        -41,\n                        -55,\n                        66,\n                        -102,\n                        -118,\n                        108,\n                        66,\n                        -97,\n                        -105,\n                        117,\n                        66,\n                        74,\n                        -79,\n                        -27,\n                        63,\n                        -63,\n                        -29,\n                        -25,\n                        66,\n                        -81,\n                        -80,\n                        -2,\n                        66,\n                        -111,\n                        -21,\n                        86,\n                        66,\n                        -68,\n                        115,\n                        -45,\n                        66,\n                        73,\n                        63,\n                        7,\n                        66,\n                        115,\n                        84,\n                        -107,\n                        66,\n                        -127,\n                        105,\n                        -86,\n                        66,\n                        92,\n                        -90,\n                        -57,\n                        66,\n                        -114,\n                        -34,\n                        -122,\n                        66,\n                        88,\n                        -110,\n                        1,\n                        64,\n                        -56,\n                        29,\n                        42,\n                        66,\n                        -104,\n                        1,\n                        -41,\n                        66,\n                        -97,\n                        -78,\n                        -4,\n                        66,\n                        -119,\n                        -30,\n                        -98,\n                        66,\n                        78,\n                        105,\n                        69,\n                        66,\n                        110,\n                        -20,\n                        -121,\n                        66,\n                        -60,\n                        -35,\n                        -72,\n                        66,\n                        -112,\n                        -1,\n                        15,\n                        66,\n                        -57,\n                        70,\n                        -128,\n                        66,\n                        114,\n                        63,\n                        97,\n                        66,\n                        -111,\n                        -117,\n                        99,\n                        66,\n                        -83,\n                        -24,\n                        -41,\n                        66,\n                        92,\n                        109,\n                        19,\n                        66,\n                        -66,\n                        30,\n                        -66,\n                        66,\n                        -107,\n                        3,\n                        0,\n                        66,\n                        77,\n                        -54,\n                        -44,\n                        66,\n                        -116,\n                        100,\n                        -15,\n                        66,\n                        -71,\n                        -105,\n                        -61,\n                        66,\n                        87,\n                        115,\n                        98,\n                        66,\n                        74,\n                        24,\n                        27,\n                        66,\n                        73,\n                        111,\n                        -59,\n                        66,\n                        -115,\n                        -90,\n                        -58,\n                        66,\n                        -87,\n                        -36,\n                        51,\n                        66,\n                        -117,\n                        -92,\n                        61,\n                        66,\n                        -94,\n                        -94,\n                        -110,\n                        66,\n                        -104,\n                        114,\n                        66,\n                        66,\n                        -72,\n                        -55,\n                        53,\n                        66,\n                        -64,\n                        -123,\n                        -45,\n                        66,\n                        -107,\n                        -119,\n                        -63,\n                        66,\n                        -79,\n                        -51,\n                        85,\n                        66,\n                        -91,\n                        -4,\n                        22,\n                        66,\n                        116,\n                        -98,\n                        105,\n                        66,\n                        -74,\n                        -25,\n                        -95,\n                        66,\n                        -78,\n                        -99,\n                        119,\n                        66,\n                        69,\n                        100,\n                        -23,\n                        66,\n                        123,\n                        55,\n                        -96,\n                        66,\n                        -76,\n                        -64,\n                        59,\n                        66,\n                        -62,\n                        -82,\n                        -67,\n                        66,\n                        -70,\n                        -12,\n                        18,\n                        66,\n                        97,\n                        -99,\n                        -87,\n                        66,\n                        -101,\n                        101,\n                        -49,\n                        66,\n                        103,\n                        0,\n                        18,\n                        66,\n                        -65,\n                        -41,\n                        -114,\n                        66,\n                        76,\n                        -41,\n                        20,\n                        66,\n                        96,\n                        54,\n                        67,\n                        66,\n                        87,\n                        125,\n                        118,\n                        66,\n                        -69,\n                        34,\n                        90,\n                        66,\n                        -120,\n                        0,\n                        4,\n                        66,\n                        -75,\n                        -32,\n                        -116,\n                        66,\n                        -66,\n                        5,\n                        -37,\n                        66,\n                        100,\n                        52,\n                        -67,\n                        66,\n                        -96,\n                        32,\n                        -104,\n                        66,\n                        97,\n                        81,\n                        -87,\n                        66,\n                        96,\n                        -79,\n                        93,\n                        66,\n                        127,\n                        91,\n                        80,\n                        66,\n                        -81,\n                        39,\n                        -43,\n                        66,\n                        -99,\n                        27,\n                        -50,\n                        66,\n                        95,\n                        -44,\n                        55,\n                        66,\n                        -111,\n                        -70,\n                        -28,\n                        66,\n                        -76,\n                        41,\n                        -38,\n                        66,\n                        -70,\n                        33,\n                        58,\n                        66,\n                        -108,\n                        -107,\n                        -34,\n                        66,\n                        -61,\n                        75,\n                        -39,\n                        66,\n                        -101,\n                        116,\n                        -67,\n                        66,\n                        102,\n                        -77,\n                        50,\n                        66,\n                        82,\n                        -24,\n                        -56,\n                        66,\n                        -65,\n                        -45,\n                        117,\n                        66,\n                        112,\n                        118,\n                        -25,\n                        66,\n                        -74,\n                        -41,\n                        -73,\n                        66,\n                        -96,\n                        9,\n                        127,\n                        66,\n                        96,\n                        -84,\n                        115,\n                        66,\n                        89,\n                        -7,\n                        -127,\n                        66,\n                        -70,\n                        75,\n                        6,\n                        66,\n                        -107,\n                        -42,\n                        -108,\n                        66,\n                        -97,\n                        -29,\n                        -59,\n                        66,\n                        87,\n                        -123,\n                        51,\n                        66,\n                        73,\n                        31,\n                        71,\n                        66,\n                        -64,\n                        -65,\n                        -34,\n                        66,\n                        -113,\n                        -104,\n                        10,\n                        66,\n                        -126,\n                        99,\n                        -85,\n                        66,\n                        74,\n                        -79,\n                        114,\n                        66,\n                        90,\n                        -55,\n                        20,\n                        66,\n                        -70,\n                        3,\n                        17,\n                        66,\n                        -92,\n                        13,\n                        -101,\n                        66,\n                        -101,\n                        -90,\n                        116,\n                        66,\n                        -100,\n                        -117,\n                        -94,\n                        66,\n                        104,\n                        -104,\n                        58,\n                        66,\n                        84,\n                        -62,\n                        79,\n                        66,\n                        99,\n                        -55,\n                        -37,\n                        66,\n                        -97,\n                        88,\n                        -66,\n                        66,\n                        -125,\n                        59,\n                        26,\n                        66,\n                        -96,\n                        118,\n                        28,\n                        66,\n                        -122,\n                        84,\n                        -93,\n                        66,\n                        78,\n                        -47,\n                        113,\n                        66,\n                        -65,\n                        92,\n                        64,\n                        66,\n                        -74,\n                        40,\n                        102,\n                        66,\n                        -66,\n                        -65,\n                        -22,\n                        66,\n                        87,\n                        115,\n                        -52,\n                        66,\n                        -120,\n                        -5,\n                        -102,\n                        66,\n                        -107,\n                        20,\n                        -90,\n                        66,\n                        -86,\n                        113,\n                        -26,\n                        66,\n                        121,\n                        -99,\n                        124,\n                        66,\n                        -64,\n                        23,\n                        14,\n                        66,\n                        -108,\n                        30,\n                        -44,\n                        66,\n                        113,\n                        31,\n                        11,\n                        66,\n                        79,\n                        -112,\n                        -29,\n                        66,\n                        -80,\n                        -91,\n                        22,\n                        66,\n                        109,\n                        -92,\n                        -51,\n                        66,\n                        93,\n                        0,\n                        -90,\n                        66,\n                        -121,\n                        -111,\n                        -25,\n                        66,\n                        -62,\n                        118,\n                        21,\n                        66,\n                        -113,\n                        34,\n                        116,\n                        66,\n                        -107,\n                        -99,\n                        45,\n                        66,\n                        -83,\n                        79,\n                        101,\n                        66,\n                        101,\n                        -114,\n                        36,\n                        66,\n                        86,\n                        89,\n                        -54,\n                        66,\n                        76,\n                        -61,\n                        83,\n                        66,\n                        -93,\n                        57,\n                        -6,\n                        66,\n                        83,\n                        47,\n                        -113,\n                        66,\n                        -109,\n                        -89,\n                        -8,\n                        66,\n                        -79,\n                        -45,\n                        -118,\n                        66,\n                        86,\n                        62,\n                        120,\n                        66,\n                        -70,\n                        -108,\n                        -128,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 229,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        586632050,\n                        1161551879,\n                        774283210,\n                        1013980649,\n                        758107400,\n                        1140803963,\n                        724700843,\n                        1147909361,\n                        753917911,\n                        1156707913,\n                        1116904261,\n                        982900129,\n                        1,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160660299,\n                        759776164,\n                        643546618,\n                        581690938,\n                        631086548,\n                        1104609203,\n                        772475854,\n                        1018765715,\n                        624737357,\n                        717188386,\n                        643920232,\n                        726273913,\n                        1,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 25,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 25,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 4315119003239682117,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        919976138,\n                        598153421,\n                        911850099,\n                        1058089295,\n                        472597809,\n                        48325977,\n                        184355545,\n                        1025637586,\n                        1067899175,\n                        804300141,\n                        588347067,\n                        780007033,\n                        226026913,\n                        867506907,\n                        535546303,\n                        486001593,\n                        367601249,\n                        200992302,\n                        995732201,\n                        731609342,\n                        903854405,\n                        460159671,\n                        611882022,\n                        876015154,\n                        720693681,\n                        220133731,\n                        603434450,\n                        207341437,\n                        989431535,\n                        907646381,\n                        1052191299,\n                        381257029,\n                        346068294,\n                        134122171,\n                        759129197,\n                        321561653,\n                        917574851,\n                        784835,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        83,\n                        89,\n                        37,\n                        69,\n                        82,\n                        -50,\n                        5,\n                        66,\n                        -57,\n                        -42,\n                        -67,\n                        66,\n                        75,\n                        -14,\n                        -61,\n                        66,\n                        -104,\n                        -98,\n                        -94,\n                        66,\n                        64,\n                        58,\n                        67,\n                        66,\n                        -100,\n                        -40,\n                        -101,\n                        66,\n                        -61,\n                        -57,\n                        -43,\n                        66,\n                        -69,\n                        -108,\n                        33,\n                        66,\n                        -85,\n                        -128,\n                        -59,\n                        66,\n                        -76,\n                        -57,\n                        123,\n                        66,\n                        -122,\n                        122,\n                        -6,\n                        66,\n                        93,\n                        -81,\n                        -41,\n                        66,\n                        105,\n                        23,\n                        65,\n                        66,\n                        -91,\n                        -54,\n                        123,\n                        64,\n                        -112,\n                        -84,\n                        114,\n                        66,\n                        -109,\n                        -75,\n                        -26,\n                        66,\n                        113,\n                        24,\n                        113,\n                        66,\n                        85,\n                        -108,\n                        77,\n                        66,\n                        -75,\n                        27,\n                        -12,\n                        66,\n                        -82,\n                        45,\n                        -1,\n                        66,\n                        -67,\n                        56,\n                        -46,\n                        66,\n                        -120,\n                        72,\n                        -118,\n                        66,\n                        106,\n                        92,\n                        -8,\n                        65,\n                        -65,\n                        -14,\n                        123,\n                        66,\n                        -104,\n                        -75,\n                        120,\n                        66,\n                        -105,\n                        -101,\n                        27,\n                        66,\n                        -69,\n                        69,\n                        -21,\n                        66,\n                        -64,\n                        -32,\n                        -73,\n                        66,\n                        -78,\n                        -77,\n                        35,\n                        66,\n                        -109,\n                        -80,\n                        75,\n                        66,\n                        -58,\n                        -26,\n                        -96,\n                        66,\n                        -113,\n                        -67,\n                        69,\n                        66,\n                        -80,\n                        86,\n                        -5,\n                        66,\n                        -65,\n                        -66,\n                        -49,\n                        66,\n                        -119,\n                        21,\n                        23,\n                        66,\n                        -121,\n                        58,\n                        32,\n                        66,\n                        -83,\n                        -60,\n                        5,\n                        66,\n                        -75,\n                        117,\n                        -42,\n                        66,\n                        -88,\n                        33,\n                        94,\n                        66,\n                        122,\n                        25,\n                        28,\n                        66,\n                        -109,\n                        7,\n                        -86,\n                        66,\n                        -77,\n                        10,\n                        -91,\n                        66,\n                        -74,\n                        -22,\n                        -67,\n                        66,\n                        90,\n                        -72,\n                        -37,\n                        66,\n                        95,\n                        -105,\n                        57,\n                        66,\n                        -62,\n                        -121,\n                        26,\n                        66,\n                        -67,\n                        104,\n                        -82,\n                        66,\n                        109,\n                        -112,\n                        -127,\n                        66,\n                        -110,\n                        -77,\n                        -43,\n                        66,\n                        -66,\n                        -33,\n                        -102,\n                        66,\n                        -122,\n                        59,\n                        22,\n                        66,\n                        -65,\n                        -78,\n                        52,\n                        66,\n                        72,\n                        -91,\n                        -104,\n                        66,\n                        49,\n                        -65,\n                        49,\n                        66,\n                        87,\n                        -127,\n                        -112,\n                        66,\n                        -110,\n                        -101,\n                        0,\n                        66,\n                        -117,\n                        -29,\n                        -39,\n                        66,\n                        71,\n                        57,\n                        33,\n                        66,\n                        113,\n                        -19,\n                        -50,\n                        66,\n                        90,\n                        -90,\n                        -44,\n                        66,\n                        -103,\n                        86,\n                        63,\n                        66,\n                        -116,\n                        123,\n                        30,\n                        66,\n                        -72,\n                        116,\n                        -1,\n                        66,\n                        -107,\n                        -77,\n                        108,\n                        66,\n                        -104,\n                        54,\n                        -4,\n                        66,\n                        -128,\n                        27,\n                        -101,\n                        66,\n                        75,\n                        -43,\n                        61,\n                        66,\n                        -110,\n                        -79,\n                        -93,\n                        66,\n                        106,\n                        -66,\n                        114,\n                        66,\n                        88,\n                        -25,\n                        -58,\n                        66,\n                        70,\n                        -89,\n                        24,\n                        66,\n                        -120,\n                        -124,\n                        -97,\n                        66,\n                        -110,\n                        98,\n                        -1,\n                        66,\n                        -106,\n                        -16,\n                        9,\n                        66,\n                        -112,\n                        -44,\n                        10,\n                        66,\n                        114,\n                        38,\n                        -21,\n                        66,\n                        -96,\n                        125,\n                        67,\n                        66,\n                        107,\n                        71,\n                        92,\n                        66,\n                        -81,\n                        -26,\n                        55,\n                        66,\n                        -60,\n                        20,\n                        9,\n                        66,\n                        -60,\n                        49,\n                        -105,\n                        66,\n                        97,\n                        39,\n                        16,\n                        66,\n                        -114,\n                        50,\n                        105,\n                        66,\n                        68,\n                        -36,\n                        -20,\n                        66,\n                        -124,\n                        -56,\n                        39,\n                        66,\n                        -69,\n                        -59,\n                        -14,\n                        66,\n                        115,\n                        92,\n                        -17,\n                        66,\n                        -66,\n                        -104,\n                        118,\n                        66,\n                        69,\n                        -63,\n                        26,\n                        66,\n                        -93,\n                        85,\n                        -52,\n                        66,\n                        -105,\n                        94,\n                        8,\n                        66,\n                        -67,\n                        -89,\n                        5,\n                        66,\n                        101,\n                        -109,\n                        55,\n                        66,\n                        108,\n                        -81,\n                        18,\n                        66,\n                        -59,\n                        72,\n                        -86,\n                        66,\n                        -92,\n                        -66,\n                        102,\n                        66,\n                        90,\n                        43,\n                        -55,\n                        66,\n                        -102,\n                        -121,\n                        -50,\n                        66,\n                        -61,\n                        115,\n                        -36,\n                        66,\n                        -62,\n                        34,\n                        -112,\n                        66,\n                        -71,\n                        -32,\n                        -61,\n                        66,\n                        -65,\n                        -20,\n                        24,\n                        66,\n                        -104,\n                        -102,\n                        -123,\n                        66,\n                        -99,\n                        -90,\n                        -43,\n                        66,\n                        -94,\n                        34,\n                        56,\n                        66,\n                        105,\n                        -38,\n                        -79,\n                        66,\n                        -101,\n                        21,\n                        -96,\n                        66,\n                        -113,\n                        63,\n                        121,\n                        66,\n                        78,\n                        115,\n                        36,\n                        66,\n                        -68,\n                        -119,\n                        -77,\n                        66,\n                        81,\n                        -69,\n                        -40,\n                        66,\n                        -119,\n                        42,\n                        127,\n                        66,\n                        -123,\n                        -60,\n                        123,\n                        66,\n                        -76,\n                        -35,\n                        -55,\n                        66,\n                        104,\n                        16,\n                        1,\n                        66,\n                        -77,\n                        9,\n                        -31,\n                        66,\n                        -68,\n                        -51,\n                        122,\n                        66,\n                        -96,\n                        43,\n                        89,\n                        66,\n                        -120,\n                        73,\n                        -2,\n                        66,\n                        -117,\n                        28,\n                        81,\n                        66,\n                        -78,\n                        35,\n                        108,\n                        66,\n                        -63,\n                        100,\n                        56,\n                        66,\n                        71,\n                        24,\n                        112,\n                        66,\n                        -107,\n                        -35,\n                        10,\n                        66,\n                        -67,\n                        61,\n                        77,\n                        65,\n                        -21,\n                        -95,\n                        -6,\n                        66,\n                        -109,\n                        -66,\n                        55,\n                        66,\n                        -71,\n                        36,\n                        -32,\n                        66,\n                        -63,\n                        60,\n                        50,\n                        66,\n                        -70,\n                        102,\n                        17,\n                        66,\n                        -104,\n                        104,\n                        57,\n                        66,\n                        -73,\n                        28,\n                        62,\n                        66,\n                        47,\n                        -8,\n                        -81,\n                        66,\n                        -100,\n                        -95,\n                        98,\n                        66,\n                        -95,\n                        81,\n                        93,\n                        66,\n                        92,\n                        13,\n                        71,\n                        66,\n                        -66,\n                        -54,\n                        122,\n                        66,\n                        -63,\n                        77,\n                        50,\n                        66,\n                        -99,\n                        -82,\n                        -97,\n                        66,\n                        92,\n                        74,\n                        -122,\n                        66,\n                        -109,\n                        -118,\n                        -27,\n                        66,\n                        76,\n                        115,\n                        67,\n                        66,\n                        -71,\n                        -36,\n                        21,\n                        66,\n                        -111,\n                        71,\n                        -8,\n                        66,\n                        -112,\n                        28,\n                        -97,\n                        66,\n                        -63,\n                        -60,\n                        41,\n                        66,\n                        -119,\n                        -55,\n                        -26,\n                        66,\n                        89,\n                        77,\n                        77,\n                        66,\n                        -109,\n                        -16,\n                        8,\n                        66,\n                        77,\n                        13,\n                        41,\n                        66,\n                        100,\n                        -82,\n                        -88,\n                        66,\n                        -62,\n                        -94,\n                        -42,\n                        66,\n                        -111,\n                        -120,\n                        108,\n                        66,\n                        -116,\n                        -87,\n                        -74,\n                        66,\n                        -85,\n                        -71,\n                        114,\n                        66,\n                        -65,\n                        -107,\n                        5,\n                        66,\n                        -65,\n                        55,\n                        -72,\n                        66,\n                        93,\n                        46,\n                        -12,\n                        66,\n                        82,\n                        82,\n                        55,\n                        66,\n                        81,\n                        -112,\n                        8,\n                        66,\n                        -90,\n                        97,\n                        -110,\n                        66,\n                        -106,\n                        124,\n                        -113,\n                        66,\n                        80,\n                        99,\n                        -46,\n                        66,\n                        -106,\n                        71,\n                        -109,\n                        66,\n                        99,\n                        -91,\n                        -57,\n                        66,\n                        -98,\n                        -77,\n                        -87,\n                        66,\n                        -61,\n                        -33,\n                        -29,\n                        66,\n                        84,\n                        -59,\n                        -104,\n                        66,\n                        91,\n                        -116,\n                        25,\n                        66,\n                        -111,\n                        -48,\n                        -125,\n                        66,\n                        86,\n                        -124,\n                        -119,\n                        65,\n                        88,\n                        -3,\n                        18,\n                        66,\n                        126,\n                        -53,\n                        -55,\n                        66,\n                        -122,\n                        76,\n                        40,\n                        66,\n                        -108,\n                        -4,\n                        -37,\n                        66,\n                        -91,\n                        -95,\n                        -35,\n                        66,\n                        115,\n                        -121,\n                        49,\n                        66,\n                        -92,\n                        -55,\n                        102,\n                        66,\n                        96,\n                        69,\n                        82,\n                        66,\n                        96,\n                        -1,\n                        98,\n                        66,\n                        -88,\n                        -72,\n                        76,\n                        66,\n                        -67,\n                        30,\n                        -29,\n                        66,\n                        -64,\n                        -70,\n                        80,\n                        66,\n                        -118,\n                        -105,\n                        7,\n                        66,\n                        80,\n                        -97,\n                        111,\n                        66,\n                        -100,\n                        -56,\n                        -21,\n                        66,\n                        -71,\n                        41,\n                        -91,\n                        66,\n                        -114,\n                        44,\n                        -88,\n                        66,\n                        103,\n                        -116,\n                        31,\n                        66,\n                        69,\n                        62,\n                        -4,\n                        66,\n                        86,\n                        -94,\n                        -120,\n                        66,\n                        -66,\n                        124,\n                        -51,\n                        66,\n                        -66,\n                        -93,\n                        -86,\n                        66,\n                        -116,\n                        -101,\n                        -78,\n                        66,\n                        -103,\n                        33,\n                        102,\n                        66,\n                        -79,\n                        -114,\n                        -114,\n                        66,\n                        -74,\n                        -52,\n                        -69,\n                        66,\n                        85,\n                        16,\n                        -30,\n                        66,\n                        -89,\n                        22,\n                        36,\n                        66,\n                        -68,\n                        -27,\n                        19,\n                        66,\n                        -111,\n                        -31,\n                        -102,\n                        66,\n                        86,\n                        -99,\n                        -21,\n                        66,\n                        72,\n                        123,\n                        -8,\n                        66,\n                        -100,\n                        77,\n                        -26,\n                        66,\n                        99,\n                        97,\n                        -105,\n                        66,\n                        -113,\n                        -38,\n                        -9,\n                        66,\n                        -64,\n                        -46,\n                        -26,\n                        66,\n                        70,\n                        7,\n                        83,\n                        66,\n                        -82,\n                        -56,\n                        -22,\n                        66,\n                        -117,\n                        53,\n                        89,\n                        66,\n                        -117,\n                        -17,\n                        80,\n                        66,\n                        -79,\n                        61,\n                        77,\n                        66,\n                        -84,\n                        -79,\n                        98,\n                        66,\n                        -95,\n                        -50,\n                        63,\n                        66,\n                        -120,\n                        62,\n                        72,\n                        66,\n                        76,\n                        98,\n                        43,\n                        66,\n                        -80,\n                        43,\n                        43,\n                        66,\n                        -108,\n                        -42,\n                        47,\n                        66,\n                        -62,\n                        69,\n                        -86,\n                        66,\n                        85,\n                        103,\n                        74,\n                        66,\n                        90,\n                        -95,\n                        88,\n                        66,\n                        102,\n                        57,\n                        45,\n                        66,\n                        -62,\n                        124,\n                        -80,\n                        66,\n                        -66,\n                        -57,\n                        -54,\n                        66,\n                        89,\n                        -1,\n                        22,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 226,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1119035087,\n                        586452526,\n                        731768003,\n                        1112101646,\n                        983611634,\n                        730190915,\n                        712121219,\n                        583259686,\n                        755627417,\n                        712477904,\n                        1102533682,\n                        65339941,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162084315,\n                        774289529,\n                        1117434518,\n                        1155167963,\n                        581684128,\n                        729935063,\n                        602647289,\n                        624965108,\n                        983615629,\n                        586682179,\n                        581307889,\n                        64592683,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 28,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 28,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 6333024226119915415,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        725305810,\n                        707364282,\n                        574150990,\n                        392820601,\n                        524205869,\n                        100595622,\n                        601299947,\n                        363825915,\n                        521203237,\n                        716237767,\n                        503184678,\n                        116342333,\n                        975918205,\n                        443637058,\n                        780005205,\n                        529975137,\n                        124696053,\n                        327093930,\n                        460105249,\n                        325913157,\n                        591047777,\n                        473073847,\n                        213079766,\n                        1033567925,\n                        66516678,\n                        259783970,\n                        401528123,\n                        737008446,\n                        439068133,\n                        514641389,\n                        757262194,\n                        857007535,\n                        513349057,\n                        1016106223,\n                        371975618,\n                        765893559,\n                        799241414,\n                        989324881,\n                        1050789593,\n                        23,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        66,\n                        -69,\n                        50,\n                        -36,\n                        68,\n                        -15,\n                        -80,\n                        -76,\n                        68,\n                        69,\n                        -46,\n                        2,\n                        68,\n                        -115,\n                        115,\n                        3,\n                        66,\n                        88,\n                        47,\n                        -2,\n                        66,\n                        -107,\n                        -60,\n                        59,\n                        66,\n                        -67,\n                        123,\n                        105,\n                        66,\n                        -104,\n                        -96,\n                        -74,\n                        66,\n                        -107,\n                        -88,\n                        10,\n                        66,\n                        108,\n                        -38,\n                        41,\n                        66,\n                        -67,\n                        102,\n                        -49,\n                        66,\n                        -116,\n                        -64,\n                        111,\n                        66,\n                        -64,\n                        -72,\n                        -109,\n                        66,\n                        -79,\n                        -46,\n                        72,\n                        66,\n                        -68,\n                        -64,\n                        28,\n                        66,\n                        -105,\n                        69,\n                        69,\n                        66,\n                        71,\n                        87,\n                        84,\n                        66,\n                        -111,\n                        9,\n                        36,\n                        66,\n                        -122,\n                        126,\n                        90,\n                        66,\n                        81,\n                        -107,\n                        -9,\n                        66,\n                        -98,\n                        28,\n                        51,\n                        66,\n                        78,\n                        105,\n                        -19,\n                        66,\n                        -60,\n                        43,\n                        32,\n                        66,\n                        119,\n                        -80,\n                        -59,\n                        66,\n                        -97,\n                        -37,\n                        -48,\n                        66,\n                        -112,\n                        39,\n                        78,\n                        66,\n                        124,\n                        90,\n                        -61,\n                        66,\n                        -93,\n                        -110,\n                        5,\n                        66,\n                        78,\n                        31,\n                        77,\n                        66,\n                        96,\n                        -14,\n                        -70,\n                        66,\n                        -81,\n                        -13,\n                        -74,\n                        66,\n                        -99,\n                        -28,\n                        -40,\n                        66,\n                        -106,\n                        81,\n                        -6,\n                        66,\n                        -95,\n                        27,\n                        104,\n                        66,\n                        89,\n                        -123,\n                        -2,\n                        66,\n                        -59,\n                        -50,\n                        68,\n                        66,\n                        95,\n                        30,\n                        -110,\n                        66,\n                        104,\n                        40,\n                        -12,\n                        66,\n                        -63,\n                        62,\n                        54,\n                        66,\n                        -83,\n                        50,\n                        -54,\n                        66,\n                        -122,\n                        25,\n                        60,\n                        66,\n                        -109,\n                        -50,\n                        -117,\n                        66,\n                        96,\n                        -22,\n                        -102,\n                        66,\n                        104,\n                        -115,\n                        90,\n                        66,\n                        -74,\n                        63,\n                        -60,\n                        66,\n                        79,\n                        26,\n                        39,\n                        66,\n                        -70,\n                        -62,\n                        -58,\n                        66,\n                        -80,\n                        -33,\n                        22,\n                        66,\n                        -109,\n                        67,\n                        68,\n                        66,\n                        -122,\n                        60,\n                        22,\n                        66,\n                        84,\n                        49,\n                        -17,\n                        66,\n                        -116,\n                        -84,\n                        -52,\n                        66,\n                        57,\n                        -22,\n                        119,\n                        66,\n                        46,\n                        11,\n                        18,\n                        66,\n                        68,\n                        26,\n                        -122,\n                        66,\n                        -70,\n                        -16,\n                        -100,\n                        66,\n                        -62,\n                        -117,\n                        66,\n                        66,\n                        -114,\n                        -111,\n                        -128,\n                        66,\n                        77,\n                        -18,\n                        11,\n                        66,\n                        -125,\n                        78,\n                        101,\n                        66,\n                        -64,\n                        68,\n                        100,\n                        66,\n                        -118,\n                        57,\n                        -17,\n                        66,\n                        84,\n                        29,\n                        -32,\n                        66,\n                        101,\n                        -90,\n                        2,\n                        65,\n                        43,\n                        10,\n                        -122,\n                        66,\n                        -79,\n                        63,\n                        -112,\n                        66,\n                        -92,\n                        91,\n                        84,\n                        66,\n                        -121,\n                        -114,\n                        -24,\n                        66,\n                        89,\n                        55,\n                        -15,\n                        66,\n                        -76,\n                        110,\n                        -54,\n                        66,\n                        -63,\n                        32,\n                        -43,\n                        66,\n                        76,\n                        38,\n                        -97,\n                        66,\n                        -97,\n                        119,\n                        86,\n                        66,\n                        83,\n                        -121,\n                        50,\n                        66,\n                        -97,\n                        -13,\n                        -126,\n                        66,\n                        -68,\n                        126,\n                        -78,\n                        66,\n                        -66,\n                        101,\n                        -86,\n                        66,\n                        -100,\n                        45,\n                        83,\n                        66,\n                        -84,\n                        -57,\n                        95,\n                        66,\n                        -74,\n                        100,\n                        -97,\n                        66,\n                        89,\n                        -109,\n                        77,\n                        66,\n                        -77,\n                        -85,\n                        -125,\n                        66,\n                        -112,\n                        121,\n                        -42,\n                        66,\n                        -115,\n                        54,\n                        111,\n                        66,\n                        -124,\n                        95,\n                        -96,\n                        66,\n                        -61,\n                        -106,\n                        -75,\n                        66,\n                        110,\n                        -5,\n                        119,\n                        66,\n                        -123,\n                        -9,\n                        -118,\n                        66,\n                        118,\n                        78,\n                        73,\n                        66,\n                        70,\n                        51,\n                        101,\n                        66,\n                        -102,\n                        -35,\n                        -93,\n                        66,\n                        91,\n                        72,\n                        -91,\n                        66,\n                        -98,\n                        -60,\n                        -107,\n                        66,\n                        -114,\n                        114,\n                        -28,\n                        66,\n                        -105,\n                        -13,\n                        26,\n                        66,\n                        83,\n                        -62,\n                        -75,\n                        66,\n                        -122,\n                        -91,\n                        54,\n                        66,\n                        72,\n                        -60,\n                        -75,\n                        66,\n                        -105,\n                        -91,\n                        -116,\n                        66,\n                        -120,\n                        97,\n                        2,\n                        66,\n                        -78,\n                        -27,\n                        -65,\n                        66,\n                        83,\n                        -8,\n                        66,\n                        66,\n                        -70,\n                        101,\n                        19,\n                        66,\n                        -109,\n                        66,\n                        -113,\n                        66,\n                        74,\n                        -24,\n                        -60,\n                        66,\n                        -65,\n                        20,\n                        -109,\n                        66,\n                        108,\n                        12,\n                        -38,\n                        66,\n                        -110,\n                        -39,\n                        49,\n                        66,\n                        -111,\n                        118,\n                        -25,\n                        66,\n                        -99,\n                        -96,\n                        -79,\n                        66,\n                        -120,\n                        -105,\n                        62,\n                        66,\n                        -107,\n                        33,\n                        -35,\n                        66,\n                        -61,\n                        -76,\n                        29,\n                        66,\n                        -113,\n                        -2,\n                        17,\n                        66,\n                        -115,\n                        -122,\n                        -75,\n                        66,\n                        -74,\n                        122,\n                        18,\n                        66,\n                        -67,\n                        -113,\n                        -102,\n                        66,\n                        -59,\n                        -92,\n                        15,\n                        66,\n                        -63,\n                        -28,\n                        31,\n                        66,\n                        -101,\n                        107,\n                        12,\n                        66,\n                        -97,\n                        -84,\n                        -77,\n                        66,\n                        109,\n                        46,\n                        -4,\n                        66,\n                        87,\n                        -119,\n                        -33,\n                        66,\n                        -114,\n                        -96,\n                        91,\n                        66,\n                        71,\n                        -73,\n                        -79,\n                        66,\n                        -115,\n                        110,\n                        36,\n                        66,\n                        88,\n                        33,\n                        118,\n                        66,\n                        -103,\n                        35,\n                        -28,\n                        66,\n                        -62,\n                        57,\n                        64,\n                        66,\n                        -111,\n                        -72,\n                        7,\n                        66,\n                        91,\n                        -62,\n                        -95,\n                        66,\n                        -70,\n                        -70,\n                        -60,\n                        66,\n                        -62,\n                        -25,\n                        -127,\n                        66,\n                        -57,\n                        -7,\n                        74,\n                        66,\n                        -112,\n                        26,\n                        -80,\n                        66,\n                        -82,\n                        -77,\n                        76,\n                        66,\n                        127,\n                        -56,\n                        -31,\n                        66,\n                        -59,\n                        9,\n                        -13,\n                        66,\n                        -114,\n                        -18,\n                        47,\n                        66,\n                        -128,\n                        -27,\n                        -53,\n                        66,\n                        97,\n                        67,\n                        112,\n                        66,\n                        -103,\n                        -43,\n                        60,\n                        66,\n                        -108,\n                        16,\n                        48,\n                        66,\n                        -72,\n                        -44,\n                        96,\n                        66,\n                        -76,\n                        -9,\n                        -9,\n                        66,\n                        -63,\n                        -1,\n                        -28,\n                        66,\n                        -117,\n                        45,\n                        103,\n                        66,\n                        -91,\n                        37,\n                        54,\n                        66,\n                        77,\n                        102,\n                        -80,\n                        66,\n                        -105,\n                        91,\n                        25,\n                        66,\n                        -80,\n                        -73,\n                        -104,\n                        66,\n                        -106,\n                        53,\n                        42,\n                        66,\n                        85,\n                        -116,\n                        -103,\n                        66,\n                        77,\n                        125,\n                        -116,\n                        66,\n                        78,\n                        -122,\n                        -86,\n                        66,\n                        97,\n                        106,\n                        58,\n                        66,\n                        105,\n                        -87,\n                        10,\n                        66,\n                        -113,\n                        67,\n                        -41,\n                        66,\n                        -128,\n                        99,\n                        46,\n                        66,\n                        -111,\n                        -1,\n                        62,\n                        66,\n                        -86,\n                        5,\n                        25,\n                        66,\n                        83,\n                        -79,\n                        24,\n                        66,\n                        -65,\n                        -11,\n                        -106,\n                        66,\n                        -111,\n                        -95,\n                        51,\n                        66,\n                        -65,\n                        -62,\n                        -89,\n                        66,\n                        90,\n                        -49,\n                        119,\n                        66,\n                        -75,\n                        90,\n                        20,\n                        66,\n                        -113,\n                        67,\n                        73,\n                        66,\n                        -107,\n                        -113,\n                        31,\n                        66,\n                        85,\n                        60,\n                        -127,\n                        66,\n                        -114,\n                        -90,\n                        -111,\n                        66,\n                        97,\n                        -9,\n                        0,\n                        66,\n                        -63,\n                        12,\n                        120,\n                        66,\n                        -118,\n                        114,\n                        -27,\n                        66,\n                        -111,\n                        101,\n                        12,\n                        66,\n                        74,\n                        -10,\n                        62,\n                        66,\n                        82,\n                        -28,\n                        109,\n                        66,\n                        -84,\n                        -115,\n                        -127,\n                        66,\n                        -61,\n                        -115,\n                        65,\n                        66,\n                        87,\n                        34,\n                        -103,\n                        66,\n                        -92,\n                        39,\n                        -59,\n                        66,\n                        83,\n                        -115,\n                        -99,\n                        66,\n                        -126,\n                        -111,\n                        88,\n                        66,\n                        -112,\n                        -75,\n                        -30,\n                        66,\n                        -64,\n                        100,\n                        -57,\n                        66,\n                        -73,\n                        18,\n                        15,\n                        66,\n                        -127,\n                        -72,\n                        -101,\n                        66,\n                        -115,\n                        124,\n                        63,\n                        66,\n                        -102,\n                        -121,\n                        40,\n                        66,\n                        -101,\n                        92,\n                        -82,\n                        66,\n                        -101,\n                        -90,\n                        -14,\n                        66,\n                        -124,\n                        4,\n                        17,\n                        66,\n                        -112,\n                        -43,\n                        114,\n                        66,\n                        -68,\n                        -34,\n                        24,\n                        66,\n                        -100,\n                        -44,\n                        -106,\n                        66,\n                        -62,\n                        -66,\n                        -104,\n                        66,\n                        -105,\n                        -66,\n                        106,\n                        66,\n                        84,\n                        113,\n                        -79,\n                        66,\n                        69,\n                        17,\n                        66,\n                        66,\n                        69,\n                        -125,\n                        37,\n                        66,\n                        86,\n                        -91,\n                        40,\n                        66,\n                        -105,\n                        -40,\n                        75,\n                        66,\n                        -98,\n                        -126,\n                        113,\n                        66,\n                        -67,\n                        119,\n                        -123,\n                        66,\n                        -60,\n                        -2,\n                        123,\n                        66,\n                        -70,\n                        33,\n                        -23,\n                        66,\n                        -114,\n                        89,\n                        121,\n                        66,\n                        -121,\n                        -124,\n                        -106,\n                        66,\n                        -70,\n                        -26,\n                        -121,\n                        66,\n                        107,\n                        105,\n                        -51,\n                        66,\n                        92,\n                        -114,\n                        33,\n                        66,\n                        -107,\n                        -32,\n                        24,\n                        66,\n                        -63,\n                        -107,\n                        -37,\n                        66,\n                        -116,\n                        -80,\n                        95,\n                        66,\n                        -79,\n                        -90,\n                        -58,\n                        66,\n                        -70,\n                        106,\n                        -32,\n                        66,\n                        -63,\n                        -102,\n                        70,\n                        66,\n                        -66,\n                        56,\n                        -36,\n                        66,\n                        -101,\n                        -9,\n                        -97,\n                        66,\n                        -79,\n                        37,\n                        76,\n                        66,\n                        -92,\n                        91,\n                        21,\n                        66,\n                        88,\n                        7,\n                        107,\n                        66,\n                        -120,\n                        -43,\n                        -74,\n                        66,\n                        -62,\n                        -118,\n                        -39,\n                        66,\n                        -128,\n                        -13,\n                        80,\n                        66,\n                        83,\n                        120,\n                        -24,\n                        66,\n                        99,\n                        -90,\n                        101,\n                        66,\n                        -123,\n                        27,\n                        114,\n                        66,\n                        -117,\n                        -70,\n                        126,\n                        66,\n                        -96,\n                        -69,\n                        -9,\n                        66,\n                        -95,\n                        48,\n                        105,\n                        66,\n                        93,\n                        120,\n                        -57,\n                        66,\n                        -93,\n                        104,\n                        44,\n                        66,\n                        73,\n                        15,\n                        53,\n                        66,\n                        81,\n                        -113,\n                        -13,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 235,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1031526980,\n                        1160647460,\n                        624953951,\n                        1147293544,\n                        985265089,\n                        629206132,\n                        717259129,\n                        631289606,\n                        597605485,\n                        1155152623,\n                        586091902,\n                        586526062,\n                        1093,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162261427,\n                        1157469749,\n                        629166770,\n                        1104313957,\n                        601859371,\n                        1140806993,\n                        729488185,\n                        712397668,\n                        640915378,\n                        1017098248,\n                        586149910,\n                        602647168,\n                        1174,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 19,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 19,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 7119250912786373213,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        371250761,\n                        54467019,\n                        526247358,\n                        733308751,\n                        274331871,\n                        123272041,\n                        90887897,\n                        799383353,\n                        212970423,\n                        765642462,\n                        607554857,\n                        599727953,\n                        1042733287,\n                        900966834,\n                        1067826743,\n                        47425093,\n                        366059497,\n                        212403539,\n                        865144499,\n                        475104463,\n                        317101918,\n                        571837785,\n                        477616993,\n                        762113089,\n                        880712445,\n                        588639557,\n                        249354317,\n                        37284826,\n                        873768693,\n                        764752943,\n                        662042155,\n                        492263526,\n                        45852887,\n                        595420995,\n                        934720617,\n                        121212257,\n                        778082150,\n                        33389169,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        67,\n                        -6,\n                        -78,\n                        -67,\n                        69,\n                        34,\n                        72,\n                        124,\n                        66,\n                        -127,\n                        86,\n                        -4,\n                        66,\n                        -110,\n                        98,\n                        63,\n                        68,\n                        -17,\n                        28,\n                        -118,\n                        66,\n                        74,\n                        -114,\n                        11,\n                        66,\n                        111,\n                        93,\n                        -34,\n                        69,\n                        124,\n                        120,\n                        100,\n                        66,\n                        -74,\n                        -124,\n                        -41,\n                        66,\n                        -67,\n                        -41,\n                        56,\n                        66,\n                        107,\n                        98,\n                        33,\n                        66,\n                        107,\n                        -11,\n                        121,\n                        66,\n                        -69,\n                        -1,\n                        -72,\n                        66,\n                        -115,\n                        110,\n                        -108,\n                        66,\n                        45,\n                        -80,\n                        34,\n                        66,\n                        77,\n                        -105,\n                        97,\n                        66,\n                        -122,\n                        28,\n                        -3,\n                        66,\n                        84,\n                        75,\n                        104,\n                        66,\n                        91,\n                        126,\n                        -22,\n                        66,\n                        -61,\n                        -112,\n                        -26,\n                        66,\n                        -109,\n                        -39,\n                        76,\n                        66,\n                        -70,\n                        -11,\n                        82,\n                        66,\n                        72,\n                        -14,\n                        8,\n                        66,\n                        -89,\n                        45,\n                        -106,\n                        66,\n                        72,\n                        3,\n                        -108,\n                        66,\n                        -73,\n                        -109,\n                        -85,\n                        66,\n                        -66,\n                        125,\n                        -111,\n                        66,\n                        124,\n                        99,\n                        57,\n                        66,\n                        6,\n                        -126,\n                        114,\n                        65,\n                        22,\n                        20,\n                        -49,\n                        66,\n                        -97,\n                        -20,\n                        -91,\n                        66,\n                        113,\n                        -83,\n                        15,\n                        66,\n                        -96,\n                        -76,\n                        -70,\n                        66,\n                        -90,\n                        42,\n                        54,\n                        66,\n                        -117,\n                        -94,\n                        -65,\n                        66,\n                        108,\n                        69,\n                        9,\n                        66,\n                        -127,\n                        114,\n                        111,\n                        66,\n                        -63,\n                        -35,\n                        -21,\n                        66,\n                        -121,\n                        22,\n                        101,\n                        66,\n                        -104,\n                        94,\n                        88,\n                        66,\n                        -86,\n                        126,\n                        90,\n                        66,\n                        -73,\n                        20,\n                        -21,\n                        66,\n                        -104,\n                        -29,\n                        -42,\n                        66,\n                        -110,\n                        -18,\n                        -128,\n                        66,\n                        70,\n                        45,\n                        -78,\n                        66,\n                        126,\n                        -16,\n                        -29,\n                        66,\n                        -60,\n                        -109,\n                        52,\n                        66,\n                        70,\n                        114,\n                        19,\n                        66,\n                        127,\n                        64,\n                        -27,\n                        66,\n                        -108,\n                        -115,\n                        -29,\n                        66,\n                        -83,\n                        61,\n                        114,\n                        66,\n                        4,\n                        -28,\n                        -1,\n                        66,\n                        70,\n                        -114,\n                        -16,\n                        66,\n                        -68,\n                        -101,\n                        116,\n                        66,\n                        -70,\n                        31,\n                        -122,\n                        66,\n                        -65,\n                        31,\n                        15,\n                        66,\n                        -107,\n                        -124,\n                        -40,\n                        66,\n                        -106,\n                        -49,\n                        -122,\n                        66,\n                        -76,\n                        16,\n                        -90,\n                        66,\n                        -60,\n                        99,\n                        -92,\n                        66,\n                        -118,\n                        -58,\n                        -112,\n                        66,\n                        -118,\n                        -118,\n                        -46,\n                        66,\n                        84,\n                        119,\n                        111,\n                        66,\n                        -98,\n                        19,\n                        -84,\n                        66,\n                        -126,\n                        106,\n                        75,\n                        66,\n                        -70,\n                        -59,\n                        31,\n                        66,\n                        -123,\n                        -77,\n                        101,\n                        66,\n                        -62,\n                        62,\n                        -128,\n                        66,\n                        72,\n                        66,\n                        109,\n                        66,\n                        -64,\n                        82,\n                        43,\n                        66,\n                        75,\n                        18,\n                        21,\n                        66,\n                        -108,\n                        42,\n                        -46,\n                        66,\n                        99,\n                        -57,\n                        99,\n                        66,\n                        127,\n                        -64,\n                        60,\n                        66,\n                        -67,\n                        109,\n                        81,\n                        66,\n                        -126,\n                        -114,\n                        123,\n                        66,\n                        -67,\n                        -33,\n                        8,\n                        66,\n                        121,\n                        45,\n                        -54,\n                        66,\n                        -78,\n                        51,\n                        5,\n                        66,\n                        -102,\n                        5,\n                        -85,\n                        66,\n                        -65,\n                        -34,\n                        -89,\n                        66,\n                        75,\n                        100,\n                        -17,\n                        66,\n                        68,\n                        51,\n                        -59,\n                        66,\n                        -66,\n                        117,\n                        98,\n                        66,\n                        93,\n                        -43,\n                        -50,\n                        66,\n                        -107,\n                        -127,\n                        -54,\n                        66,\n                        111,\n                        -56,\n                        -9,\n                        66,\n                        121,\n                        -11,\n                        42,\n                        66,\n                        -69,\n                        -11,\n                        104,\n                        66,\n                        68,\n                        58,\n                        -4,\n                        66,\n                        -108,\n                        28,\n                        56,\n                        66,\n                        -60,\n                        -39,\n                        -21,\n                        66,\n                        -115,\n                        -123,\n                        63,\n                        66,\n                        -122,\n                        -73,\n                        -91,\n                        66,\n                        -113,\n                        -4,\n                        -92,\n                        66,\n                        -104,\n                        -44,\n                        -82,\n                        66,\n                        -106,\n                        -14,\n                        3,\n                        66,\n                        103,\n                        2,\n                        -62,\n                        66,\n                        99,\n                        -33,\n                        -120,\n                        66,\n                        113,\n                        22,\n                        -92,\n                        66,\n                        -112,\n                        3,\n                        39,\n                        66,\n                        -65,\n                        -67,\n                        -21,\n                        66,\n                        86,\n                        -25,\n                        42,\n                        66,\n                        -66,\n                        19,\n                        6,\n                        66,\n                        -116,\n                        27,\n                        -92,\n                        66,\n                        -87,\n                        -118,\n                        -82,\n                        66,\n                        -60,\n                        35,\n                        -15,\n                        66,\n                        -90,\n                        -62,\n                        -108,\n                        66,\n                        98,\n                        98,\n                        47,\n                        66,\n                        -111,\n                        0,\n                        76,\n                        66,\n                        82,\n                        86,\n                        21,\n                        66,\n                        -85,\n                        80,\n                        -63,\n                        66,\n                        -114,\n                        -35,\n                        -124,\n                        66,\n                        -110,\n                        49,\n                        -84,\n                        66,\n                        102,\n                        -52,\n                        -121,\n                        66,\n                        -65,\n                        -1,\n                        121,\n                        66,\n                        -120,\n                        87,\n                        81,\n                        66,\n                        80,\n                        -36,\n                        -51,\n                        66,\n                        -93,\n                        -114,\n                        -59,\n                        66,\n                        -65,\n                        126,\n                        -33,\n                        66,\n                        -73,\n                        -49,\n                        -20,\n                        66,\n                        -84,\n                        -97,\n                        -52,\n                        66,\n                        -115,\n                        -67,\n                        -18,\n                        66,\n                        121,\n                        83,\n                        -74,\n                        66,\n                        -70,\n                        -43,\n                        13,\n                        66,\n                        -93,\n                        -45,\n                        -72,\n                        66,\n                        -101,\n                        98,\n                        -98,\n                        66,\n                        -73,\n                        -24,\n                        -42,\n                        66,\n                        115,\n                        77,\n                        -94,\n                        66,\n                        75,\n                        -127,\n                        12,\n                        66,\n                        -104,\n                        117,\n                        -77,\n                        66,\n                        -106,\n                        -24,\n                        112,\n                        66,\n                        -118,\n                        13,\n                        126,\n                        66,\n                        83,\n                        80,\n                        115,\n                        66,\n                        -68,\n                        90,\n                        -69,\n                        65,\n                        -43,\n                        -122,\n                        94,\n                        66,\n                        110,\n                        96,\n                        23,\n                        66,\n                        -63,\n                        47,\n                        -64,\n                        66,\n                        -121,\n                        -3,\n                        34,\n                        66,\n                        -81,\n                        44,\n                        100,\n                        66,\n                        72,\n                        46,\n                        -73,\n                        66,\n                        -93,\n                        -53,\n                        55,\n                        66,\n                        -79,\n                        -100,\n                        -48,\n                        66,\n                        -76,\n                        86,\n                        105,\n                        66,\n                        122,\n                        -84,\n                        -49,\n                        66,\n                        119,\n                        -119,\n                        122,\n                        66,\n                        -83,\n                        -83,\n                        -10,\n                        66,\n                        -120,\n                        15,\n                        -42,\n                        66,\n                        121,\n                        56,\n                        -59,\n                        66,\n                        -61,\n                        65,\n                        13,\n                        66,\n                        -99,\n                        -65,\n                        64,\n                        66,\n                        -82,\n                        -38,\n                        60,\n                        66,\n                        125,\n                        -43,\n                        -105,\n                        66,\n                        92,\n                        6,\n                        -93,\n                        66,\n                        -125,\n                        88,\n                        5,\n                        66,\n                        -118,\n                        -119,\n                        6,\n                        66,\n                        -103,\n                        -109,\n                        32,\n                        66,\n                        -66,\n                        -111,\n                        -62,\n                        66,\n                        -83,\n                        120,\n                        13,\n                        66,\n                        -103,\n                        -60,\n                        -63,\n                        66,\n                        -109,\n                        100,\n                        72,\n                        66,\n                        86,\n                        74,\n                        81,\n                        66,\n                        -87,\n                        -104,\n                        -89,\n                        66,\n                        -84,\n                        -61,\n                        -41,\n                        66,\n                        -67,\n                        -70,\n                        -111,\n                        66,\n                        -121,\n                        -106,\n                        -98,\n                        66,\n                        100,\n                        -73,\n                        107,\n                        66,\n                        -121,\n                        5,\n                        66,\n                        66,\n                        -103,\n                        40,\n                        -42,\n                        66,\n                        71,\n                        -98,\n                        43,\n                        66,\n                        -98,\n                        122,\n                        82,\n                        66,\n                        -103,\n                        -96,\n                        112,\n                        66,\n                        -120,\n                        119,\n                        -51,\n                        66,\n                        -61,\n                        -94,\n                        -31,\n                        66,\n                        69,\n                        61,\n                        84,\n                        66,\n                        -99,\n                        90,\n                        -2,\n                        66,\n                        -108,\n                        90,\n                        -85,\n                        66,\n                        -87,\n                        -52,\n                        -56,\n                        66,\n                        -119,\n                        -40,\n                        -127,\n                        66,\n                        -65,\n                        11,\n                        -108,\n                        66,\n                        71,\n                        70,\n                        -106,\n                        66,\n                        -117,\n                        -110,\n                        17,\n                        66,\n                        -105,\n                        3,\n                        117,\n                        66,\n                        84,\n                        -83,\n                        -128,\n                        66,\n                        72,\n                        -52,\n                        -81,\n                        66,\n                        93,\n                        -68,\n                        -109,\n                        66,\n                        -66,\n                        66,\n                        75,\n                        66,\n                        85,\n                        -105,\n                        37,\n                        66,\n                        -80,\n                        73,\n                        -8,\n                        66,\n                        -71,\n                        -117,\n                        61,\n                        66,\n                        -107,\n                        -115,\n                        -103,\n                        66,\n                        -77,\n                        -92,\n                        23,\n                        66,\n                        87,\n                        -90,\n                        61,\n                        66,\n                        -79,\n                        -68,\n                        -4,\n                        66,\n                        -71,\n                        -58,\n                        -122,\n                        66,\n                        -127,\n                        38,\n                        6,\n                        66,\n                        110,\n                        -118,\n                        -128,\n                        66,\n                        -121,\n                        -118,\n                        35,\n                        66,\n                        101,\n                        12,\n                        -94,\n                        66,\n                        -74,\n                        102,\n                        -68,\n                        66,\n                        -107,\n                        85,\n                        -70,\n                        66,\n                        -63,\n                        -64,\n                        102,\n                        66,\n                        70,\n                        91,\n                        -42,\n                        66,\n                        -116,\n                        -50,\n                        74,\n                        66,\n                        -103,\n                        39,\n                        -30,\n                        66,\n                        87,\n                        105,\n                        -23,\n                        66,\n                        -115,\n                        -100,\n                        51,\n                        66,\n                        -116,\n                        -55,\n                        -45,\n                        66,\n                        89,\n                        100,\n                        61,\n                        66,\n                        76,\n                        -58,\n                        25,\n                        66,\n                        -109,\n                        80,\n                        100,\n                        66,\n                        76,\n                        -73,\n                        8,\n                        66,\n                        111,\n                        -52,\n                        -41,\n                        66,\n                        83,\n                        0,\n                        -41,\n                        66,\n                        82,\n                        52,\n                        32,\n                        66,\n                        106,\n                        24,\n                        17,\n                        66,\n                        -67,\n                        -88,\n                        -96,\n                        66,\n                        75,\n                        65,\n                        -53,\n                        66,\n                        -116,\n                        -16,\n                        50,\n                        66,\n                        -109,\n                        -63,\n                        1,\n                        66,\n                        -63,\n                        -14,\n                        124,\n                        66,\n                        80,\n                        -113,\n                        1,\n                        66,\n                        -113,\n                        2,\n                        99,\n                        66,\n                        76,\n                        18,\n                        84,\n                        66,\n                        -66,\n                        -46,\n                        -40,\n                        66,\n                        -63,\n                        70,\n                        -4,\n                        66,\n                        100,\n                        50,\n                        -44,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 227,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1031500007,\n                        758875145,\n                        1155325478,\n                        715844138,\n                        1112243794,\n                        596273638,\n                        1142353088,\n                        729940802,\n                        1011620069,\n                        1016981216,\n                        1097935118,\n                        212845067,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1031497735,\n                        1142890465,\n                        1147827020,\n                        1104681797,\n                        983510630,\n                        586707592,\n                        1112651468,\n                        987879955,\n                        640359319,\n                        1012138873,\n                        753580870,\n                        208304176,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 27,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 27,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -5390658293349841947,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        790717654,\n                        588904867,\n                        474147898,\n                        228253029,\n                        120528719,\n                        338146909,\n                        531166966,\n                        60143701,\n                        882735958,\n                        787914407,\n                        1012267954,\n                        525558851,\n                        367957971,\n                        622143046,\n                        761484757,\n                        904996546,\n                        371112575,\n                        128625089,\n                        571927037,\n                        77573037,\n                        1054648271,\n                        796185802,\n                        333035051,\n                        102548827,\n                        733134645,\n                        599582421,\n                        460564471,\n                        534890677,\n                        530100033,\n                        535661734,\n                        198900787,\n                        619286362,\n                        213686742,\n                        584439022,\n                        534562089,\n                        708441634,\n                        311475017,\n                        99,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        67,\n                        -93,\n                        -27,\n                        69,\n                        47,\n                        80,\n                        -19,\n                        66,\n                        -86,\n                        71,\n                        56,\n                        66,\n                        -92,\n                        98,\n                        -46,\n                        66,\n                        -72,\n                        5,\n                        40,\n                        66,\n                        85,\n                        121,\n                        30,\n                        66,\n                        55,\n                        116,\n                        -7,\n                        66,\n                        -98,\n                        -102,\n                        -121,\n                        66,\n                        -73,\n                        -67,\n                        -52,\n                        66,\n                        -114,\n                        48,\n                        -72,\n                        66,\n                        -120,\n                        -113,\n                        -62,\n                        66,\n                        28,\n                        -59,\n                        -58,\n                        66,\n                        -64,\n                        -121,\n                        -55,\n                        66,\n                        -120,\n                        -28,\n                        72,\n                        66,\n                        45,\n                        113,\n                        49,\n                        66,\n                        69,\n                        -76,\n                        59,\n                        65,\n                        94,\n                        -96,\n                        -120,\n                        66,\n                        -62,\n                        85,\n                        100,\n                        66,\n                        -110,\n                        76,\n                        0,\n                        66,\n                        81,\n                        3,\n                        20,\n                        66,\n                        122,\n                        -99,\n                        -100,\n                        66,\n                        69,\n                        104,\n                        74,\n                        66,\n                        -102,\n                        16,\n                        30,\n                        66,\n                        -84,\n                        120,\n                        -87,\n                        66,\n                        56,\n                        -26,\n                        88,\n                        66,\n                        -77,\n                        68,\n                        54,\n                        66,\n                        78,\n                        -28,\n                        33,\n                        66,\n                        -72,\n                        20,\n                        -59,\n                        66,\n                        -70,\n                        96,\n                        121,\n                        66,\n                        71,\n                        -72,\n                        -94,\n                        66,\n                        -111,\n                        46,\n                        37,\n                        66,\n                        -88,\n                        34,\n                        -63,\n                        66,\n                        -98,\n                        125,\n                        126,\n                        66,\n                        85,\n                        56,\n                        37,\n                        66,\n                        -69,\n                        45,\n                        -44,\n                        66,\n                        -64,\n                        -40,\n                        63,\n                        66,\n                        -68,\n                        4,\n                        51,\n                        66,\n                        111,\n                        -71,\n                        62,\n                        66,\n                        -108,\n                        -8,\n                        20,\n                        66,\n                        -120,\n                        55,\n                        126,\n                        66,\n                        -80,\n                        116,\n                        39,\n                        66,\n                        79,\n                        -58,\n                        84,\n                        66,\n                        -110,\n                        -120,\n                        -14,\n                        66,\n                        -69,\n                        54,\n                        -119,\n                        66,\n                        -67,\n                        9,\n                        -123,\n                        66,\n                        89,\n                        -48,\n                        65,\n                        66,\n                        -123,\n                        -102,\n                        -39,\n                        66,\n                        -93,\n                        105,\n                        -89,\n                        66,\n                        -87,\n                        118,\n                        47,\n                        66,\n                        -80,\n                        -91,\n                        -99,\n                        66,\n                        -71,\n                        -105,\n                        -92,\n                        66,\n                        -84,\n                        113,\n                        15,\n                        66,\n                        -97,\n                        -12,\n                        104,\n                        66,\n                        -63,\n                        -51,\n                        123,\n                        66,\n                        89,\n                        -121,\n                        52,\n                        66,\n                        -117,\n                        23,\n                        -43,\n                        66,\n                        117,\n                        -9,\n                        -120,\n                        66,\n                        -114,\n                        -120,\n                        96,\n                        66,\n                        -126,\n                        113,\n                        -40,\n                        66,\n                        100,\n                        -76,\n                        32,\n                        66,\n                        -74,\n                        100,\n                        8,\n                        66,\n                        -104,\n                        26,\n                        120,\n                        66,\n                        -81,\n                        18,\n                        36,\n                        66,\n                        101,\n                        -122,\n                        87,\n                        66,\n                        -99,\n                        -66,\n                        -83,\n                        66,\n                        -73,\n                        58,\n                        -56,\n                        66,\n                        79,\n                        -12,\n                        -5,\n                        66,\n                        -68,\n                        18,\n                        117,\n                        66,\n                        -118,\n                        -5,\n                        -72,\n                        66,\n                        -64,\n                        38,\n                        120,\n                        66,\n                        -107,\n                        51,\n                        57,\n                        66,\n                        75,\n                        92,\n                        -118,\n                        66,\n                        74,\n                        27,\n                        -62,\n                        66,\n                        -74,\n                        -71,\n                        25,\n                        66,\n                        -97,\n                        111,\n                        -18,\n                        66,\n                        -116,\n                        -119,\n                        -84,\n                        66,\n                        -62,\n                        -58,\n                        31,\n                        66,\n                        -79,\n                        6,\n                        96,\n                        66,\n                        -77,\n                        -112,\n                        37,\n                        66,\n                        -79,\n                        18,\n                        -74,\n                        66,\n                        -107,\n                        8,\n                        54,\n                        66,\n                        -69,\n                        -64,\n                        106,\n                        66,\n                        -100,\n                        31,\n                        45,\n                        66,\n                        -62,\n                        -43,\n                        -100,\n                        66,\n                        -116,\n                        -53,\n                        0,\n                        66,\n                        -64,\n                        52,\n                        75,\n                        66,\n                        -113,\n                        -81,\n                        63,\n                        66,\n                        -70,\n                        -85,\n                        -71,\n                        66,\n                        -59,\n                        -62,\n                        -11,\n                        66,\n                        -59,\n                        -120,\n                        43,\n                        66,\n                        -64,\n                        2,\n                        -55,\n                        66,\n                        -77,\n                        76,\n                        76,\n                        66,\n                        -112,\n                        -27,\n                        56,\n                        66,\n                        -67,\n                        -49,\n                        108,\n                        66,\n                        77,\n                        -32,\n                        75,\n                        66,\n                        -64,\n                        61,\n                        112,\n                        66,\n                        84,\n                        -109,\n                        -96,\n                        66,\n                        98,\n                        1,\n                        -32,\n                        66,\n                        -87,\n                        -25,\n                        50,\n                        66,\n                        -121,\n                        -80,\n                        -102,\n                        66,\n                        -105,\n                        -43,\n                        104,\n                        66,\n                        94,\n                        -70,\n                        113,\n                        66,\n                        -122,\n                        92,\n                        -117,\n                        66,\n                        -75,\n                        -96,\n                        109,\n                        66,\n                        -72,\n                        104,\n                        -93,\n                        66,\n                        -114,\n                        -60,\n                        102,\n                        66,\n                        -60,\n                        -53,\n                        -107,\n                        66,\n                        105,\n                        125,\n                        97,\n                        66,\n                        -89,\n                        115,\n                        -65,\n                        66,\n                        91,\n                        -58,\n                        96,\n                        66,\n                        -60,\n                        92,\n                        107,\n                        66,\n                        -115,\n                        29,\n                        -64,\n                        66,\n                        -108,\n                        8,\n                        59,\n                        66,\n                        -112,\n                        -108,\n                        -31,\n                        66,\n                        -102,\n                        42,\n                        -38,\n                        66,\n                        -102,\n                        126,\n                        -9,\n                        66,\n                        -70,\n                        -25,\n                        65,\n                        66,\n                        86,\n                        -86,\n                        42,\n                        66,\n                        -108,\n                        116,\n                        38,\n                        66,\n                        -76,\n                        -18,\n                        -26,\n                        66,\n                        99,\n                        30,\n                        -68,\n                        66,\n                        -78,\n                        -115,\n                        69,\n                        66,\n                        -109,\n                        -68,\n                        -93,\n                        66,\n                        -118,\n                        -120,\n                        -87,\n                        66,\n                        -127,\n                        -68,\n                        55,\n                        66,\n                        74,\n                        71,\n                        108,\n                        66,\n                        -93,\n                        -91,\n                        -42,\n                        66,\n                        -76,\n                        -3,\n                        -70,\n                        66,\n                        -110,\n                        1,\n                        1,\n                        66,\n                        -114,\n                        -34,\n                        52,\n                        66,\n                        85,\n                        -85,\n                        -101,\n                        66,\n                        79,\n                        65,\n                        -100,\n                        66,\n                        94,\n                        -37,\n                        88,\n                        66,\n                        -99,\n                        92,\n                        -69,\n                        66,\n                        -94,\n                        83,\n                        67,\n                        66,\n                        99,\n                        -113,\n                        -27,\n                        66,\n                        -105,\n                        -113,\n                        46,\n                        66,\n                        -108,\n                        120,\n                        -100,\n                        66,\n                        82,\n                        -29,\n                        -63,\n                        66,\n                        -68,\n                        64,\n                        40,\n                        66,\n                        -97,\n                        -52,\n                        -122,\n                        66,\n                        -113,\n                        23,\n                        -110,\n                        66,\n                        -116,\n                        -100,\n                        22,\n                        66,\n                        76,\n                        93,\n                        23,\n                        66,\n                        -116,\n                        13,\n                        6,\n                        66,\n                        124,\n                        80,\n                        124,\n                        66,\n                        99,\n                        -81,\n                        78,\n                        66,\n                        -118,\n                        -127,\n                        -114,\n                        66,\n                        116,\n                        -94,\n                        74,\n                        66,\n                        -124,\n                        95,\n                        -56,\n                        66,\n                        -120,\n                        46,\n                        10,\n                        66,\n                        -68,\n                        95,\n                        -113,\n                        66,\n                        -101,\n                        6,\n                        -14,\n                        66,\n                        -116,\n                        -48,\n                        27,\n                        66,\n                        93,\n                        115,\n                        124,\n                        66,\n                        -113,\n                        112,\n                        -44,\n                        66,\n                        110,\n                        37,\n                        61,\n                        66,\n                        71,\n                        67,\n                        -46,\n                        66,\n                        -121,\n                        79,\n                        -53,\n                        66,\n                        114,\n                        66,\n                        -83,\n                        66,\n                        71,\n                        41,\n                        52,\n                        66,\n                        -115,\n                        -28,\n                        72,\n                        66,\n                        -98,\n                        121,\n                        -36,\n                        66,\n                        -102,\n                        -105,\n                        -30,\n                        66,\n                        -65,\n                        77,\n                        -113,\n                        66,\n                        72,\n                        105,\n                        -100,\n                        66,\n                        -68,\n                        -11,\n                        56,\n                        66,\n                        88,\n                        55,\n                        123,\n                        66,\n                        -124,\n                        -98,\n                        112,\n                        66,\n                        -69,\n                        -5,\n                        21,\n                        66,\n                        79,\n                        -84,\n                        -43,\n                        66,\n                        -106,\n                        74,\n                        -97,\n                        66,\n                        -102,\n                        -5,\n                        -1,\n                        66,\n                        93,\n                        107,\n                        88,\n                        66,\n                        -65,\n                        78,\n                        25,\n                        66,\n                        -119,\n                        -121,\n                        112,\n                        66,\n                        96,\n                        94,\n                        105,\n                        66,\n                        90,\n                        -13,\n                        -119,\n                        66,\n                        -62,\n                        86,\n                        -8,\n                        66,\n                        94,\n                        85,\n                        -100,\n                        66,\n                        80,\n                        41,\n                        -75,\n                        66,\n                        -114,\n                        90,\n                        -23,\n                        66,\n                        90,\n                        27,\n                        -41,\n                        66,\n                        -97,\n                        -48,\n                        -42,\n                        66,\n                        -115,\n                        49,\n                        -77,\n                        66,\n                        -94,\n                        49,\n                        -43,\n                        66,\n                        -67,\n                        -17,\n                        -30,\n                        66,\n                        -75,\n                        -101,\n                        -111,\n                        66,\n                        89,\n                        91,\n                        -40,\n                        66,\n                        70,\n                        -49,\n                        -65,\n                        66,\n                        -76,\n                        23,\n                        -115,\n                        66,\n                        -65,\n                        -118,\n                        -90,\n                        66,\n                        -75,\n                        16,\n                        68,\n                        66,\n                        -78,\n                        -81,\n                        -119,\n                        66,\n                        -68,\n                        70,\n                        25,\n                        66,\n                        -100,\n                        -88,\n                        63,\n                        66,\n                        73,\n                        124,\n                        -73,\n                        66,\n                        -72,\n                        48,\n                        -99,\n                        66,\n                        -71,\n                        68,\n                        -27,\n                        66,\n                        74,\n                        88,\n                        -80,\n                        66,\n                        -108,\n                        -17,\n                        86,\n                        66,\n                        114,\n                        90,\n                        8,\n                        66,\n                        -97,\n                        -40,\n                        87,\n                        66,\n                        -116,\n                        88,\n                        -81,\n                        66,\n                        -110,\n                        120,\n                        34,\n                        66,\n                        -116,\n                        14,\n                        11,\n                        66,\n                        -96,\n                        18,\n                        -10,\n                        66,\n                        -100,\n                        -68,\n                        116,\n                        66,\n                        -122,\n                        -128,\n                        110,\n                        66,\n                        78,\n                        -56,\n                        -13,\n                        66,\n                        -78,\n                        74,\n                        56,\n                        66,\n                        -101,\n                        -59,\n                        -46,\n                        66,\n                        -115,\n                        120,\n                        84,\n                        66,\n                        76,\n                        84,\n                        -19,\n                        66,\n                        77,\n                        -110,\n                        -49,\n                        66,\n                        -102,\n                        55,\n                        89,\n                        66,\n                        -111,\n                        -86,\n                        10,\n                        66,\n                        -72,\n                        19,\n                        -25,\n                        66,\n                        -76,\n                        55,\n                        51,\n                        66,\n                        -108,\n                        -22,\n                        115,\n                        66,\n                        -101,\n                        -50,\n                        75,\n                        66,\n                        -105,\n                        -57,\n                        70,\n                        66,\n                        81,\n                        70,\n                        -53,\n                        66,\n                        75,\n                        42,\n                        100,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 224,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1112237405,\n                        1157475338,\n                        970168472,\n                        730019626,\n                        1028095307,\n                        716905132,\n                        1118616631,\n                        597253418,\n                        1116904262,\n                        716670170,\n                        624177455,\n                        7174807,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1117066864,\n                        1157299163,\n                        583500320,\n                        1018060645,\n                        769821569,\n                        758697997,\n                        602646439,\n                        772646333,\n                        1098289409,\n                        1104127810,\n                        582728054,\n                        7371553,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 30,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 30,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -721376606247191852,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        993471586,\n                        768789051,\n                        802720882,\n                        240230183,\n                        724497905,\n                        876430671,\n                        1034626670,\n                        130002661,\n                        791844159,\n                        380193277,\n                        468146233,\n                        706381885,\n                        58403877,\n                        396199281,\n                        446029754,\n                        531492421,\n                        195618473,\n                        757852245,\n                        229068193,\n                        920984815,\n                        708438567,\n                        506430822,\n                        258858533,\n                        895073403,\n                        53792599,\n                        804555757,\n                        465537135,\n                        582404898,\n                        707302726,\n                        515423065,\n                        375700174,\n                        880518827,\n                        1059512290,\n                        875783977,\n                        1055211223,\n                        313476475,\n                        868188026,\n                        1288133,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        67,\n                        -63,\n                        5,\n                        -26,\n                        65,\n                        35,\n                        -92,\n                        -54,\n                        65,\n                        -101,\n                        -102,\n                        -126,\n                        66,\n                        -100,\n                        110,\n                        38,\n                        65,\n                        -37,\n                        42,\n                        -108,\n                        66,\n                        -118,\n                        83,\n                        97,\n                        65,\n                        44,\n                        -85,\n                        -22,\n                        66,\n                        -122,\n                        123,\n                        -28,\n                        66,\n                        -91,\n                        -111,\n                        0,\n                        66,\n                        -112,\n                        -44,\n                        -23,\n                        66,\n                        37,\n                        -20,\n                        -119,\n                        66,\n                        -66,\n                        -54,\n                        -75,\n                        66,\n                        -102,\n                        86,\n                        -14,\n                        66,\n                        71,\n                        -79,\n                        -58,\n                        66,\n                        79,\n                        -107,\n                        117,\n                        66,\n                        117,\n                        -81,\n                        54,\n                        66,\n                        -109,\n                        -70,\n                        4,\n                        66,\n                        100,\n                        -8,\n                        -98,\n                        66,\n                        11,\n                        -7,\n                        -122,\n                        66,\n                        -90,\n                        61,\n                        -51,\n                        66,\n                        100,\n                        39,\n                        16,\n                        66,\n                        76,\n                        -29,\n                        -76,\n                        66,\n                        -105,\n                        57,\n                        80,\n                        66,\n                        124,\n                        41,\n                        43,\n                        66,\n                        -110,\n                        50,\n                        96,\n                        66,\n                        16,\n                        -30,\n                        117,\n                        66,\n                        -106,\n                        32,\n                        -98,\n                        66,\n                        -117,\n                        -60,\n                        11,\n                        66,\n                        -63,\n                        7,\n                        23,\n                        66,\n                        62,\n                        -20,\n                        87,\n                        66,\n                        113,\n                        -112,\n                        85,\n                        66,\n                        -72,\n                        -39,\n                        118,\n                        66,\n                        -107,\n                        -95,\n                        47,\n                        66,\n                        -67,\n                        91,\n                        117,\n                        66,\n                        93,\n                        66,\n                        17,\n                        66,\n                        -79,\n                        3,\n                        100,\n                        66,\n                        -74,\n                        54,\n                        -63,\n                        66,\n                        124,\n                        -7,\n                        -87,\n                        66,\n                        -124,\n                        -73,\n                        -68,\n                        66,\n                        -59,\n                        -108,\n                        -73,\n                        66,\n                        -64,\n                        39,\n                        3,\n                        66,\n                        -93,\n                        -122,\n                        -86,\n                        66,\n                        -99,\n                        -63,\n                        -58,\n                        66,\n                        93,\n                        -36,\n                        102,\n                        66,\n                        104,\n                        16,\n                        -56,\n                        66,\n                        114,\n                        -32,\n                        16,\n                        66,\n                        109,\n                        24,\n                        -27,\n                        66,\n                        -125,\n                        125,\n                        -104,\n                        66,\n                        93,\n                        -120,\n                        -13,\n                        66,\n                        -113,\n                        -21,\n                        -57,\n                        66,\n                        -124,\n                        -21,\n                        -107,\n                        66,\n                        -122,\n                        20,\n                        111,\n                        66,\n                        -119,\n                        10,\n                        10,\n                        66,\n                        72,\n                        90,\n                        -7,\n                        66,\n                        -119,\n                        -88,\n                        36,\n                        66,\n                        68,\n                        115,\n                        -8,\n                        66,\n                        -70,\n                        -68,\n                        -81,\n                        66,\n                        -70,\n                        -127,\n                        78,\n                        66,\n                        -61,\n                        -101,\n                        -79,\n                        66,\n                        76,\n                        127,\n                        -78,\n                        66,\n                        -108,\n                        104,\n                        -40,\n                        66,\n                        -106,\n                        13,\n                        107,\n                        66,\n                        -67,\n                        -63,\n                        115,\n                        66,\n                        -77,\n                        -78,\n                        -122,\n                        66,\n                        -88,\n                        45,\n                        -71,\n                        66,\n                        -99,\n                        -32,\n                        -126,\n                        66,\n                        -111,\n                        -67,\n                        -37,\n                        66,\n                        -109,\n                        17,\n                        -46,\n                        66,\n                        -79,\n                        109,\n                        103,\n                        66,\n                        -95,\n                        -13,\n                        6,\n                        66,\n                        -103,\n                        -106,\n                        8,\n                        66,\n                        -89,\n                        -55,\n                        71,\n                        66,\n                        -126,\n                        -94,\n                        -56,\n                        66,\n                        -110,\n                        20,\n                        124,\n                        66,\n                        75,\n                        -29,\n                        -92,\n                        66,\n                        -73,\n                        80,\n                        69,\n                        66,\n                        84,\n                        -108,\n                        -125,\n                        66,\n                        -115,\n                        116,\n                        -56,\n                        66,\n                        -98,\n                        -84,\n                        -17,\n                        66,\n                        111,\n                        -15,\n                        120,\n                        66,\n                        -115,\n                        119,\n                        -20,\n                        66,\n                        108,\n                        119,\n                        -90,\n                        66,\n                        -111,\n                        -4,\n                        1,\n                        66,\n                        73,\n                        -104,\n                        -33,\n                        66,\n                        -80,\n                        -85,\n                        -115,\n                        66,\n                        -103,\n                        -121,\n                        88,\n                        66,\n                        108,\n                        -46,\n                        53,\n                        66,\n                        118,\n                        55,\n                        -70,\n                        66,\n                        -125,\n                        123,\n                        32,\n                        66,\n                        -81,\n                        106,\n                        82,\n                        66,\n                        -101,\n                        55,\n                        -120,\n                        66,\n                        -79,\n                        107,\n                        78,\n                        66,\n                        74,\n                        78,\n                        -89,\n                        66,\n                        71,\n                        -12,\n                        -53,\n                        66,\n                        -73,\n                        21,\n                        -24,\n                        66,\n                        74,\n                        42,\n                        -106,\n                        66,\n                        -118,\n                        -40,\n                        110,\n                        66,\n                        -107,\n                        2,\n                        -105,\n                        66,\n                        -114,\n                        -118,\n                        -9,\n                        66,\n                        -106,\n                        13,\n                        52,\n                        66,\n                        -65,\n                        116,\n                        -69,\n                        66,\n                        -106,\n                        26,\n                        23,\n                        66,\n                        -102,\n                        -112,\n                        -2,\n                        66,\n                        -62,\n                        106,\n                        104,\n                        66,\n                        -87,\n                        -30,\n                        -3,\n                        66,\n                        70,\n                        32,\n                        5,\n                        66,\n                        -62,\n                        -33,\n                        -118,\n                        66,\n                        -70,\n                        -70,\n                        50,\n                        66,\n                        -106,\n                        67,\n                        -24,\n                        66,\n                        -110,\n                        85,\n                        -73,\n                        66,\n                        95,\n                        -95,\n                        48,\n                        66,\n                        -77,\n                        96,\n                        35,\n                        66,\n                        -70,\n                        86,\n                        53,\n                        66,\n                        -59,\n                        -121,\n                        -104,\n                        66,\n                        80,\n                        -91,\n                        5,\n                        66,\n                        77,\n                        -125,\n                        -127,\n                        66,\n                        101,\n                        -121,\n                        55,\n                        66,\n                        -81,\n                        102,\n                        -91,\n                        66,\n                        -66,\n                        -6,\n                        -81,\n                        66,\n                        109,\n                        72,\n                        -45,\n                        66,\n                        78,\n                        71,\n                        58,\n                        66,\n                        -115,\n                        68,\n                        -4,\n                        66,\n                        -67,\n                        77,\n                        54,\n                        66,\n                        118,\n                        77,\n                        -49,\n                        66,\n                        126,\n                        -52,\n                        -43,\n                        66,\n                        -93,\n                        -47,\n                        -110,\n                        66,\n                        -62,\n                        -6,\n                        -102,\n                        66,\n                        75,\n                        -91,\n                        -92,\n                        66,\n                        -99,\n                        68,\n                        -126,\n                        66,\n                        115,\n                        51,\n                        -23,\n                        66,\n                        -66,\n                        99,\n                        0,\n                        66,\n                        96,\n                        -118,\n                        117,\n                        66,\n                        -115,\n                        -15,\n                        12,\n                        66,\n                        -115,\n                        -18,\n                        -64,\n                        66,\n                        88,\n                        -89,\n                        -71,\n                        66,\n                        81,\n                        -50,\n                        4,\n                        66,\n                        -65,\n                        111,\n                        -67,\n                        66,\n                        100,\n                        15,\n                        -22,\n                        66,\n                        98,\n                        124,\n                        92,\n                        66,\n                        110,\n                        -115,\n                        53,\n                        66,\n                        105,\n                        11,\n                        61,\n                        66,\n                        96,\n                        -115,\n                        29,\n                        66,\n                        -107,\n                        38,\n                        45,\n                        66,\n                        -88,\n                        17,\n                        -126,\n                        66,\n                        -119,\n                        69,\n                        -66,\n                        66,\n                        -64,\n                        6,\n                        60,\n                        66,\n                        101,\n                        119,\n                        -101,\n                        66,\n                        -106,\n                        25,\n                        -65,\n                        66,\n                        80,\n                        -127,\n                        -40,\n                        66,\n                        -109,\n                        87,\n                        44,\n                        66,\n                        -110,\n                        84,\n                        -43,\n                        66,\n                        104,\n                        -33,\n                        122,\n                        66,\n                        -63,\n                        -59,\n                        71,\n                        66,\n                        -110,\n                        -5,\n                        39,\n                        66,\n                        89,\n                        127,\n                        -80,\n                        66,\n                        89,\n                        -91,\n                        -96,\n                        66,\n                        83,\n                        105,\n                        28,\n                        66,\n                        120,\n                        -38,\n                        50,\n                        66,\n                        -81,\n                        -39,\n                        -21,\n                        66,\n                        -123,\n                        73,\n                        -109,\n                        66,\n                        105,\n                        -69,\n                        75,\n                        66,\n                        -105,\n                        49,\n                        -107,\n                        66,\n                        -72,\n                        -108,\n                        -104,\n                        66,\n                        -105,\n                        -6,\n                        50,\n                        66,\n                        -67,\n                        -114,\n                        -106,\n                        66,\n                        -109,\n                        -46,\n                        -1,\n                        66,\n                        93,\n                        80,\n                        -14,\n                        66,\n                        -125,\n                        -103,\n                        6,\n                        66,\n                        -66,\n                        -84,\n                        -24,\n                        66,\n                        -73,\n                        -73,\n                        -94,\n                        66,\n                        -114,\n                        32,\n                        -98,\n                        66,\n                        -120,\n                        123,\n                        72,\n                        66,\n                        -63,\n                        -10,\n                        63,\n                        66,\n                        -97,\n                        -95,\n                        90,\n                        66,\n                        -112,\n                        -99,\n                        -49,\n                        66,\n                        -61,\n                        117,\n                        0,\n                        66,\n                        -64,\n                        -94,\n                        27,\n                        66,\n                        -112,\n                        46,\n                        -97,\n                        66,\n                        86,\n                        19,\n                        -23,\n                        66,\n                        93,\n                        110,\n                        -79,\n                        66,\n                        -62,\n                        26,\n                        -127,\n                        66,\n                        -68,\n                        -107,\n                        -30,\n                        66,\n                        -78,\n                        65,\n                        -30,\n                        66,\n                        -104,\n                        -107,\n                        87,\n                        66,\n                        -79,\n                        77,\n                        34,\n                        66,\n                        84,\n                        102,\n                        58,\n                        66,\n                        119,\n                        -69,\n                        84,\n                        66,\n                        -111,\n                        -88,\n                        0,\n                        66,\n                        -116,\n                        -115,\n                        48,\n                        66,\n                        77,\n                        -110,\n                        2,\n                        66,\n                        89,\n                        109,\n                        -105,\n                        66,\n                        -71,\n                        110,\n                        109,\n                        66,\n                        -81,\n                        60,\n                        -76,\n                        66,\n                        73,\n                        -85,\n                        -32,\n                        66,\n                        111,\n                        71,\n                        -108,\n                        66,\n                        -79,\n                        -82,\n                        -83,\n                        66,\n                        -59,\n                        -45,\n                        -72,\n                        66,\n                        73,\n                        -117,\n                        25,\n                        66,\n                        -102,\n                        10,\n                        -7,\n                        66,\n                        -109,\n                        30,\n                        -103,\n                        66,\n                        -105,\n                        96,\n                        -110,\n                        66,\n                        -72,\n                        -48,\n                        53,\n                        66,\n                        86,\n                        -76,\n                        74,\n                        66,\n                        -73,\n                        31,\n                        5,\n                        66,\n                        87,\n                        -68,\n                        83,\n                        66,\n                        -65,\n                        -13,\n                        98,\n                        66,\n                        120,\n                        -74,\n                        6,\n                        66,\n                        -70,\n                        117,\n                        45,\n                        66,\n                        -63,\n                        -56,\n                        -125,\n                        66,\n                        119,\n                        28,\n                        92,\n                        66,\n                        88,\n                        67,\n                        98,\n                        66,\n                        90,\n                        -59,\n                        -77,\n                        66,\n                        -107,\n                        -106,\n                        77,\n                        66,\n                        -102,\n                        -64,\n                        -45,\n                        66,\n                        -62,\n                        -108,\n                        79,\n                        66,\n                        -101,\n                        13,\n                        116,\n                        66,\n                        -62,\n                        19,\n                        -89,\n                        66,\n                        97,\n                        -112,\n                        27,\n                        66,\n                        82,\n                        -54,\n                        86,\n                        66,\n                        -75,\n                        -17,\n                        -29,\n                        66,\n                        115,\n                        117,\n                        -95,\n                        66,\n                        -104,\n                        109,\n                        -93,\n                        66,\n                        -110,\n                        -25,\n                        -14,\n                        66,\n                        -64,\n                        33,\n                        -4,\n                        66,\n                        -102,\n                        -9,\n                        48,\n                        66,\n                        -128,\n                        -10,\n                        28,\n                        66,\n                        -109,\n                        -23,\n                        93,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 227,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        758363846,\n                        1033114000,\n                        602653985,\n                        1147673933,\n                        975107957,\n                        624945118,\n                        1030986049,\n                        1032580345,\n                        1018529530,\n                        1030985788,\n                        1027542172,\n                        238358821,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160487538,\n                        1033121209,\n                        1114411118,\n                        1147831316,\n                        1026543992,\n                        629748499,\n                        970154401,\n                        595538690,\n                        1142595601,\n                        1017155110,\n                        726748519,\n                        195313423,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 27,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 27,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 3064446777312786653,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        525536174,\n                        127760097,\n                        320911086,\n                        171375735,\n                        917425314,\n                        226089954,\n                        637343330,\n                        1016292691,\n                        1017764653,\n                        627886389,\n                        334409029,\n                        1071285746,\n                        465505389,\n                        236438983,\n                        1038801102,\n                        35575509,\n                        64149302,\n                        396273830,\n                        264093143,\n                        324760629,\n                        572319814,\n                        118798133,\n                        265658314,\n                        34915875,\n                        381288249,\n                        207036323,\n                        43732163,\n                        96458681,\n                        321687411,\n                        481089206,\n                        196128550,\n                        459990567,\n                        1038163145,\n                        215293857,\n                        996015725,\n                        866553421,\n                        872072570,\n                        509917042,\n                        10,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -66,\n                        32,\n                        -18,\n                        67,\n                        121,\n                        65,\n                        77,\n                        68,\n                        1,\n                        39,\n                        -22,\n                        69,\n                        117,\n                        -56,\n                        -3,\n                        66,\n                        -82,\n                        115,\n                        20,\n                        66,\n                        86,\n                        57,\n                        -3,\n                        66,\n                        -105,\n                        119,\n                        -57,\n                        65,\n                        -75,\n                        123,\n                        49,\n                        66,\n                        -88,\n                        11,\n                        -54,\n                        66,\n                        -92,\n                        -123,\n                        -72,\n                        66,\n                        -113,\n                        32,\n                        20,\n                        66,\n                        51,\n                        -88,\n                        3,\n                        66,\n                        -89,\n                        77,\n                        32,\n                        66,\n                        -128,\n                        -88,\n                        -74,\n                        66,\n                        124,\n                        124,\n                        125,\n                        65,\n                        -51,\n                        -13,\n                        69,\n                        66,\n                        -84,\n                        -78,\n                        60,\n                        66,\n                        -88,\n                        42,\n                        104,\n                        66,\n                        -118,\n                        51,\n                        105,\n                        66,\n                        76,\n                        -6,\n                        16,\n                        66,\n                        89,\n                        33,\n                        65,\n                        66,\n                        -110,\n                        -91,\n                        -56,\n                        66,\n                        90,\n                        -26,\n                        -123,\n                        66,\n                        -101,\n                        -55,\n                        -16,\n                        66,\n                        -65,\n                        51,\n                        -58,\n                        66,\n                        -88,\n                        108,\n                        -2,\n                        66,\n                        110,\n                        26,\n                        85,\n                        66,\n                        -111,\n                        -25,\n                        -96,\n                        66,\n                        -96,\n                        -6,\n                        33,\n                        66,\n                        -119,\n                        111,\n                        126,\n                        66,\n                        -84,\n                        -47,\n                        4,\n                        66,\n                        94,\n                        99,\n                        80,\n                        66,\n                        -82,\n                        35,\n                        -84,\n                        65,\n                        -128,\n                        -5,\n                        122,\n                        66,\n                        84,\n                        -2,\n                        -84,\n                        66,\n                        -94,\n                        33,\n                        20,\n                        66,\n                        -79,\n                        -85,\n                        35,\n                        65,\n                        109,\n                        105,\n                        -27,\n                        66,\n                        -116,\n                        -43,\n                        17,\n                        66,\n                        -71,\n                        -9,\n                        46,\n                        66,\n                        81,\n                        -116,\n                        21,\n                        66,\n                        -90,\n                        -93,\n                        124,\n                        66,\n                        85,\n                        -84,\n                        34,\n                        66,\n                        -74,\n                        53,\n                        0,\n                        66,\n                        -102,\n                        66,\n                        78,\n                        66,\n                        -97,\n                        99,\n                        3,\n                        66,\n                        -121,\n                        123,\n                        97,\n                        66,\n                        -71,\n                        18,\n                        -25,\n                        66,\n                        -115,\n                        -71,\n                        -55,\n                        66,\n                        -113,\n                        124,\n                        26,\n                        66,\n                        -64,\n                        -41,\n                        21,\n                        66,\n                        79,\n                        86,\n                        46,\n                        66,\n                        -91,\n                        99,\n                        74,\n                        66,\n                        -82,\n                        101,\n                        -27,\n                        66,\n                        -126,\n                        -50,\n                        3,\n                        66,\n                        -117,\n                        -52,\n                        -111,\n                        66,\n                        -64,\n                        -19,\n                        100,\n                        66,\n                        -108,\n                        74,\n                        -14,\n                        66,\n                        -71,\n                        7,\n                        -62,\n                        66,\n                        -74,\n                        75,\n                        -66,\n                        66,\n                        -94,\n                        5,\n                        93,\n                        66,\n                        -90,\n                        -119,\n                        37,\n                        66,\n                        -128,\n                        -28,\n                        3,\n                        66,\n                        -101,\n                        -124,\n                        7,\n                        66,\n                        -73,\n                        101,\n                        -6,\n                        66,\n                        -90,\n                        105,\n                        22,\n                        66,\n                        -69,\n                        100,\n                        86,\n                        66,\n                        93,\n                        30,\n                        -97,\n                        66,\n                        -85,\n                        101,\n                        -44,\n                        66,\n                        -117,\n                        -83,\n                        -115,\n                        66,\n                        -116,\n                        -98,\n                        -63,\n                        66,\n                        112,\n                        49,\n                        15,\n                        66,\n                        -102,\n                        -98,\n                        -117,\n                        66,\n                        95,\n                        -107,\n                        60,\n                        66,\n                        95,\n                        -53,\n                        -79,\n                        66,\n                        -67,\n                        69,\n                        47,\n                        66,\n                        113,\n                        103,\n                        -111,\n                        66,\n                        -103,\n                        42,\n                        96,\n                        66,\n                        115,\n                        48,\n                        63,\n                        66,\n                        -64,\n                        -109,\n                        -58,\n                        66,\n                        -117,\n                        -36,\n                        -121,\n                        66,\n                        101,\n                        -120,\n                        63,\n                        66,\n                        -100,\n                        89,\n                        -24,\n                        66,\n                        97,\n                        -37,\n                        -62,\n                        66,\n                        -87,\n                        -31,\n                        -126,\n                        66,\n                        -81,\n                        32,\n                        -110,\n                        66,\n                        -61,\n                        82,\n                        61,\n                        66,\n                        -101,\n                        -108,\n                        -114,\n                        66,\n                        -60,\n                        110,\n                        -93,\n                        66,\n                        -84,\n                        -69,\n                        94,\n                        66,\n                        -94,\n                        -38,\n                        -95,\n                        66,\n                        -60,\n                        -61,\n                        123,\n                        66,\n                        -106,\n                        120,\n                        -128,\n                        66,\n                        -116,\n                        110,\n                        27,\n                        66,\n                        -99,\n                        113,\n                        65,\n                        66,\n                        -91,\n                        106,\n                        71,\n                        66,\n                        -72,\n                        -38,\n                        -19,\n                        66,\n                        -96,\n                        -89,\n                        52,\n                        66,\n                        -122,\n                        116,\n                        -2,\n                        66,\n                        -100,\n                        94,\n                        44,\n                        66,\n                        -108,\n                        60,\n                        -50,\n                        66,\n                        -97,\n                        9,\n                        -128,\n                        66,\n                        -72,\n                        40,\n                        26,\n                        66,\n                        -107,\n                        -8,\n                        -19,\n                        66,\n                        -78,\n                        2,\n                        -71,\n                        66,\n                        -124,\n                        95,\n                        1,\n                        66,\n                        -126,\n                        79,\n                        -16,\n                        66,\n                        94,\n                        -105,\n                        40,\n                        66,\n                        72,\n                        -114,\n                        -106,\n                        66,\n                        -69,\n                        17,\n                        127,\n                        66,\n                        81,\n                        56,\n                        -77,\n                        66,\n                        80,\n                        119,\n                        -48,\n                        66,\n                        -127,\n                        -86,\n                        -89,\n                        66,\n                        89,\n                        60,\n                        -27,\n                        66,\n                        -104,\n                        106,\n                        65,\n                        66,\n                        -106,\n                        21,\n                        -115,\n                        66,\n                        -97,\n                        -25,\n                        -105,\n                        66,\n                        -84,\n                        -5,\n                        -108,\n                        66,\n                        -94,\n                        44,\n                        -91,\n                        66,\n                        -113,\n                        70,\n                        106,\n                        66,\n                        -83,\n                        -119,\n                        39,\n                        66,\n                        -74,\n                        54,\n                        42,\n                        66,\n                        -78,\n                        -10,\n                        127,\n                        66,\n                        -107,\n                        -81,\n                        110,\n                        66,\n                        -127,\n                        113,\n                        -95,\n                        66,\n                        -116,\n                        3,\n                        -47,\n                        66,\n                        -113,\n                        17,\n                        58,\n                        66,\n                        -123,\n                        75,\n                        13,\n                        66,\n                        -123,\n                        61,\n                        126,\n                        66,\n                        -117,\n                        -46,\n                        69,\n                        66,\n                        -105,\n                        -51,\n                        42,\n                        66,\n                        89,\n                        3,\n                        68,\n                        66,\n                        -75,\n                        84,\n                        -47,\n                        66,\n                        -64,\n                        72,\n                        51,\n                        66,\n                        83,\n                        -91,\n                        73,\n                        66,\n                        87,\n                        99,\n                        -47,\n                        66,\n                        -109,\n                        122,\n                        33,\n                        66,\n                        99,\n                        -103,\n                        -56,\n                        66,\n                        72,\n                        124,\n                        57,\n                        66,\n                        -106,\n                        58,\n                        -29,\n                        66,\n                        -125,\n                        -57,\n                        75,\n                        66,\n                        -98,\n                        -67,\n                        117,\n                        66,\n                        -107,\n                        -127,\n                        5,\n                        66,\n                        -111,\n                        -17,\n                        86,\n                        66,\n                        -111,\n                        18,\n                        104,\n                        66,\n                        -102,\n                        -63,\n                        -123,\n                        66,\n                        111,\n                        -5,\n                        -33,\n                        66,\n                        69,\n                        90,\n                        37,\n                        66,\n                        75,\n                        -34,\n                        41,\n                        66,\n                        73,\n                        -14,\n                        -69,\n                        66,\n                        79,\n                        50,\n                        -11,\n                        66,\n                        -122,\n                        -38,\n                        9,\n                        66,\n                        93,\n                        -124,\n                        -57,\n                        66,\n                        -61,\n                        116,\n                        -110,\n                        66,\n                        -117,\n                        -70,\n                        -79,\n                        66,\n                        -59,\n                        10,\n                        36,\n                        66,\n                        68,\n                        -74,\n                        38,\n                        66,\n                        -69,\n                        46,\n                        -54,\n                        66,\n                        107,\n                        36,\n                        78,\n                        66,\n                        -74,\n                        13,\n                        -14,\n                        66,\n                        -101,\n                        56,\n                        5,\n                        66,\n                        -110,\n                        34,\n                        -41,\n                        66,\n                        -107,\n                        -41,\n                        -128,\n                        66,\n                        -102,\n                        -77,\n                        -105,\n                        66,\n                        -119,\n                        76,\n                        125,\n                        66,\n                        122,\n                        49,\n                        94,\n                        66,\n                        82,\n                        92,\n                        94,\n                        66,\n                        -64,\n                        82,\n                        -9,\n                        66,\n                        79,\n                        98,\n                        89,\n                        66,\n                        91,\n                        13,\n                        -114,\n                        66,\n                        82,\n                        -113,\n                        -7,\n                        66,\n                        -89,\n                        55,\n                        -97,\n                        66,\n                        -76,\n                        11,\n                        27,\n                        66,\n                        -118,\n                        -2,\n                        -4,\n                        66,\n                        -84,\n                        53,\n                        -89,\n                        66,\n                        -110,\n                        -74,\n                        -127,\n                        66,\n                        -108,\n                        -38,\n                        -14,\n                        66,\n                        -118,\n                        88,\n                        -99,\n                        66,\n                        -80,\n                        -1,\n                        -106,\n                        66,\n                        -76,\n                        -99,\n                        29,\n                        66,\n                        -74,\n                        -79,\n                        65,\n                        66,\n                        -95,\n                        105,\n                        -83,\n                        66,\n                        68,\n                        -88,\n                        -98,\n                        66,\n                        -122,\n                        25,\n                        17,\n                        66,\n                        84,\n                        -50,\n                        63,\n                        66,\n                        -109,\n                        51,\n                        -8,\n                        66,\n                        78,\n                        84,\n                        9,\n                        66,\n                        -103,\n                        127,\n                        114,\n                        66,\n                        -112,\n                        75,\n                        -94,\n                        66,\n                        -109,\n                        -9,\n                        22,\n                        66,\n                        -62,\n                        25,\n                        65,\n                        66,\n                        -104,\n                        -8,\n                        -54,\n                        66,\n                        -101,\n                        -126,\n                        7,\n                        66,\n                        -92,\n                        8,\n                        -79,\n                        66,\n                        87,\n                        9,\n                        96,\n                        66,\n                        -65,\n                        -14,\n                        -40,\n                        66,\n                        -75,\n                        74,\n                        -5,\n                        66,\n                        -79,\n                        -121,\n                        117,\n                        66,\n                        -121,\n                        115,\n                        -43,\n                        66,\n                        -110,\n                        -122,\n                        -27,\n                        66,\n                        77,\n                        -59,\n                        -87,\n                        66,\n                        -63,\n                        -19,\n                        102,\n                        66,\n                        -121,\n                        -67,\n                        3,\n                        66,\n                        -64,\n                        59,\n                        -91,\n                        66,\n                        -119,\n                        -6,\n                        -65,\n                        66,\n                        87,\n                        -45,\n                        -115,\n                        66,\n                        102,\n                        -23,\n                        60,\n                        66,\n                        81,\n                        -81,\n                        -5,\n                        66,\n                        -118,\n                        -114,\n                        -26,\n                        66,\n                        -95,\n                        38,\n                        -22,\n                        66,\n                        -111,\n                        -49,\n                        -90,\n                        66,\n                        -65,\n                        -45,\n                        100,\n                        66,\n                        94,\n                        -52,\n                        -115,\n                        66,\n                        -106,\n                        38,\n                        -127,\n                        66,\n                        -65,\n                        -89,\n                        26,\n                        66,\n                        -109,\n                        -8,\n                        122,\n                        66,\n                        -59,\n                        107,\n                        -43,\n                        66,\n                        93,\n                        17,\n                        50,\n                        66,\n                        -105,\n                        77,\n                        43,\n                        66,\n                        -114,\n                        -119,\n                        -61,\n                        66,\n                        101,\n                        -35,\n                        -74,\n                        66,\n                        -116,\n                        -71,\n                        -42,\n                        66,\n                        -60,\n                        -20,\n                        -53,\n                        66,\n                        91,\n                        49,\n                        81,\n                        66,\n                        -99,\n                        45,\n                        -100,\n                        66,\n                        -112,\n                        -21,\n                        83,\n                        66,\n                        -59,\n                        -111,\n                        -36,\n                        66,\n                        113,\n                        73,\n                        -21,\n                        66,\n                        -69,\n                        97,\n                        -9,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 229,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        759940217,\n                        597851188,\n                        710862452,\n                        1147299142,\n                        1018214702,\n                        1141329659,\n                        638607614,\n                        626310868,\n                        724876739,\n                        629196647,\n                        753337382,\n                        754052459,\n                        1,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        759953299,\n                        1118623282,\n                        770038325,\n                        1013253622,\n                        1142418806,\n                        1013979832,\n                        645521126,\n                        1032858751,\n                        600324118,\n                        725328427,\n                        1097770954,\n                        581668981,\n                        1,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 25,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 25,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 3783375162460323258,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        242072258,\n                        191800929,\n                        203791159,\n                        454404915,\n                        505204563,\n                        656883261,\n                        519542867,\n                        195778297,\n                        775481585,\n                        735769639,\n                        476420433,\n                        761706846,\n                        922314183,\n                        630279338,\n                        737627169,\n                        446614269,\n                        603625393,\n                        664086629,\n                        1008561203,\n                        184010425,\n                        857847133,\n                        357600482,\n                        400676778,\n                        55664323,\n                        1034286397,\n                        237479118,\n                        215256139,\n                        311630131,\n                        1045764134,\n                        259042911,\n                        68938075,\n                        170233683,\n                        1065034913,\n                        124249926,\n                        578258254,\n                        1031211085,\n                        267216079,\n                        2239,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        24,\n                        -67,\n                        33,\n                        69,\n                        83,\n                        124,\n                        99,\n                        69,\n                        119,\n                        -25,\n                        -72,\n                        66,\n                        78,\n                        49,\n                        -116,\n                        69,\n                        -124,\n                        22,\n                        -128,\n                        66,\n                        27,\n                        123,\n                        82,\n                        66,\n                        28,\n                        93,\n                        -95,\n                        66,\n                        23,\n                        59,\n                        -80,\n                        66,\n                        57,\n                        65,\n                        9,\n                        66,\n                        -126,\n                        -52,\n                        43,\n                        66,\n                        -99,\n                        44,\n                        95,\n                        66,\n                        -105,\n                        49,\n                        -75,\n                        66,\n                        125,\n                        -88,\n                        -53,\n                        66,\n                        -95,\n                        82,\n                        36,\n                        66,\n                        -88,\n                        -11,\n                        90,\n                        66,\n                        113,\n                        -60,\n                        -93,\n                        66,\n                        -94,\n                        -57,\n                        123,\n                        66,\n                        -76,\n                        -65,\n                        103,\n                        66,\n                        -128,\n                        -45,\n                        -101,\n                        66,\n                        89,\n                        -56,\n                        19,\n                        66,\n                        -65,\n                        -103,\n                        -28,\n                        66,\n                        125,\n                        83,\n                        -20,\n                        66,\n                        -107,\n                        23,\n                        22,\n                        66,\n                        122,\n                        40,\n                        38,\n                        66,\n                        83,\n                        -67,\n                        -43,\n                        66,\n                        -90,\n                        -113,\n                        13,\n                        66,\n                        95,\n                        -68,\n                        36,\n                        66,\n                        -85,\n                        55,\n                        94,\n                        66,\n                        -109,\n                        8,\n                        -121,\n                        66,\n                        80,\n                        -109,\n                        13,\n                        66,\n                        -104,\n                        -21,\n                        -120,\n                        66,\n                        -114,\n                        -90,\n                        -98,\n                        66,\n                        109,\n                        -8,\n                        -62,\n                        66,\n                        -67,\n                        -114,\n                        -60,\n                        66,\n                        -77,\n                        14,\n                        -53,\n                        66,\n                        91,\n                        95,\n                        -18,\n                        66,\n                        113,\n                        29,\n                        -117,\n                        66,\n                        -63,\n                        -77,\n                        62,\n                        66,\n                        -68,\n                        98,\n                        -82,\n                        66,\n                        82,\n                        -6,\n                        48,\n                        66,\n                        -114,\n                        26,\n                        126,\n                        66,\n                        121,\n                        46,\n                        49,\n                        66,\n                        -99,\n                        103,\n                        46,\n                        66,\n                        119,\n                        -34,\n                        33,\n                        66,\n                        -107,\n                        -59,\n                        -123,\n                        66,\n                        -63,\n                        58,\n                        57,\n                        66,\n                        -59,\n                        10,\n                        -118,\n                        66,\n                        -106,\n                        -12,\n                        -84,\n                        66,\n                        -116,\n                        29,\n                        -97,\n                        64,\n                        -19,\n                        90,\n                        39,\n                        66,\n                        -80,\n                        81,\n                        -23,\n                        66,\n                        -119,\n                        16,\n                        50,\n                        66,\n                        -118,\n                        66,\n                        -114,\n                        66,\n                        104,\n                        -24,\n                        107,\n                        66,\n                        85,\n                        105,\n                        -41,\n                        66,\n                        -125,\n                        120,\n                        55,\n                        66,\n                        -106,\n                        12,\n                        126,\n                        66,\n                        -112,\n                        94,\n                        -30,\n                        66,\n                        -115,\n                        -21,\n                        62,\n                        66,\n                        -108,\n                        82,\n                        -90,\n                        66,\n                        -128,\n                        95,\n                        -128,\n                        66,\n                        -72,\n                        90,\n                        -4,\n                        66,\n                        -74,\n                        -72,\n                        78,\n                        66,\n                        83,\n                        124,\n                        -105,\n                        66,\n                        -82,\n                        -117,\n                        59,\n                        66,\n                        -60,\n                        0,\n                        23,\n                        66,\n                        -71,\n                        -117,\n                        -66,\n                        66,\n                        -67,\n                        -46,\n                        -26,\n                        66,\n                        -72,\n                        -84,\n                        114,\n                        66,\n                        -103,\n                        109,\n                        103,\n                        66,\n                        -63,\n                        -99,\n                        32,\n                        66,\n                        -62,\n                        82,\n                        -49,\n                        66,\n                        64,\n                        -14,\n                        127,\n                        66,\n                        -70,\n                        -68,\n                        69,\n                        66,\n                        -128,\n                        -54,\n                        54,\n                        66,\n                        -78,\n                        -122,\n                        -14,\n                        66,\n                        88,\n                        -36,\n                        -48,\n                        66,\n                        -127,\n                        -104,\n                        -108,\n                        66,\n                        -81,\n                        66,\n                        -46,\n                        66,\n                        124,\n                        -97,\n                        116,\n                        66,\n                        82,\n                        8,\n                        -45,\n                        66,\n                        -66,\n                        18,\n                        -67,\n                        66,\n                        -106,\n                        -68,\n                        -94,\n                        66,\n                        -73,\n                        115,\n                        -22,\n                        66,\n                        -115,\n                        -116,\n                        -29,\n                        66,\n                        -120,\n                        -44,\n                        -4,\n                        66,\n                        -128,\n                        -65,\n                        -110,\n                        66,\n                        -79,\n                        -99,\n                        -36,\n                        66,\n                        76,\n                        93,\n                        -45,\n                        66,\n                        -121,\n                        -1,\n                        -20,\n                        66,\n                        -108,\n                        85,\n                        -118,\n                        66,\n                        117,\n                        -86,\n                        -49,\n                        66,\n                        -73,\n                        60,\n                        -25,\n                        66,\n                        -123,\n                        94,\n                        -123,\n                        66,\n                        -106,\n                        115,\n                        -28,\n                        66,\n                        -107,\n                        -118,\n                        -39,\n                        66,\n                        -108,\n                        107,\n                        -89,\n                        66,\n                        -123,\n                        112,\n                        119,\n                        66,\n                        -124,\n                        -44,\n                        -80,\n                        66,\n                        -105,\n                        -105,\n                        -120,\n                        66,\n                        126,\n                        -28,\n                        101,\n                        66,\n                        -119,\n                        106,\n                        126,\n                        66,\n                        -102,\n                        59,\n                        -89,\n                        66,\n                        102,\n                        67,\n                        61,\n                        66,\n                        -59,\n                        -59,\n                        -120,\n                        66,\n                        -70,\n                        75,\n                        -26,\n                        66,\n                        -127,\n                        29,\n                        -82,\n                        66,\n                        -126,\n                        36,\n                        -30,\n                        66,\n                        86,\n                        96,\n                        41,\n                        66,\n                        -107,\n                        11,\n                        18,\n                        66,\n                        70,\n                        99,\n                        92,\n                        66,\n                        -65,\n                        8,\n                        51,\n                        66,\n                        -109,\n                        75,\n                        86,\n                        66,\n                        -78,\n                        -121,\n                        103,\n                        66,\n                        -96,\n                        108,\n                        -43,\n                        66,\n                        -117,\n                        45,\n                        26,\n                        66,\n                        -120,\n                        -126,\n                        99,\n                        66,\n                        103,\n                        -7,\n                        -40,\n                        66,\n                        78,\n                        -34,\n                        -63,\n                        66,\n                        -122,\n                        -128,\n                        33,\n                        66,\n                        -110,\n                        -120,\n                        74,\n                        66,\n                        -65,\n                        -88,\n                        108,\n                        66,\n                        -108,\n                        -125,\n                        126,\n                        66,\n                        -128,\n                        126,\n                        -96,\n                        66,\n                        -81,\n                        -101,\n                        -95,\n                        66,\n                        -126,\n                        -9,\n                        -48,\n                        66,\n                        -65,\n                        56,\n                        -104,\n                        66,\n                        105,\n                        7,\n                        -55,\n                        66,\n                        108,\n                        3,\n                        -85,\n                        66,\n                        -107,\n                        43,\n                        -85,\n                        66,\n                        -110,\n                        67,\n                        35,\n                        66,\n                        -76,\n                        126,\n                        -119,\n                        66,\n                        -66,\n                        84,\n                        -115,\n                        66,\n                        -118,\n                        111,\n                        -55,\n                        66,\n                        36,\n                        -11,\n                        -11,\n                        66,\n                        90,\n                        -16,\n                        -14,\n                        66,\n                        -66,\n                        50,\n                        -39,\n                        66,\n                        96,\n                        2,\n                        42,\n                        66,\n                        107,\n                        92,\n                        -39,\n                        66,\n                        -66,\n                        -60,\n                        -19,\n                        66,\n                        70,\n                        79,\n                        16,\n                        66,\n                        -65,\n                        -98,\n                        39,\n                        66,\n                        -110,\n                        63,\n                        50,\n                        66,\n                        -114,\n                        94,\n                        -58,\n                        66,\n                        -101,\n                        -6,\n                        -45,\n                        66,\n                        -106,\n                        -125,\n                        112,\n                        66,\n                        -120,\n                        -58,\n                        45,\n                        66,\n                        84,\n                        -101,\n                        -90,\n                        66,\n                        -63,\n                        76,\n                        -48,\n                        66,\n                        -79,\n                        -40,\n                        -31,\n                        66,\n                        -67,\n                        -116,\n                        58,\n                        66,\n                        -67,\n                        -100,\n                        -93,\n                        66,\n                        -121,\n                        -53,\n                        -17,\n                        66,\n                        75,\n                        -55,\n                        -106,\n                        66,\n                        -61,\n                        19,\n                        -128,\n                        66,\n                        78,\n                        -38,\n                        -97,\n                        66,\n                        103,\n                        -99,\n                        45,\n                        66,\n                        -66,\n                        84,\n                        -26,\n                        66,\n                        -124,\n                        36,\n                        -8,\n                        66,\n                        -110,\n                        -76,\n                        35,\n                        66,\n                        -105,\n                        113,\n                        97,\n                        66,\n                        -61,\n                        -36,\n                        56,\n                        66,\n                        117,\n                        -116,\n                        121,\n                        66,\n                        -116,\n                        -48,\n                        93,\n                        66,\n                        -68,\n                        58,\n                        81,\n                        66,\n                        -81,\n                        103,\n                        -54,\n                        66,\n                        -112,\n                        68,\n                        25,\n                        66,\n                        -106,\n                        -37,\n                        107,\n                        66,\n                        -78,\n                        -32,\n                        -54,\n                        66,\n                        -118,\n                        -58,\n                        28,\n                        66,\n                        -66,\n                        74,\n                        112,\n                        66,\n                        -69,\n                        9,\n                        22,\n                        66,\n                        -100,\n                        -43,\n                        -66,\n                        66,\n                        106,\n                        -12,\n                        112,\n                        66,\n                        87,\n                        46,\n                        -25,\n                        66,\n                        -80,\n                        -48,\n                        -103,\n                        66,\n                        73,\n                        125,\n                        12,\n                        66,\n                        -109,\n                        112,\n                        77,\n                        66,\n                        73,\n                        77,\n                        -75,\n                        66,\n                        115,\n                        99,\n                        -56,\n                        66,\n                        110,\n                        -108,\n                        -106,\n                        66,\n                        -90,\n                        101,\n                        110,\n                        66,\n                        -68,\n                        -10,\n                        -70,\n                        66,\n                        70,\n                        25,\n                        -94,\n                        66,\n                        -115,\n                        -22,\n                        -3,\n                        66,\n                        -76,\n                        -60,\n                        -42,\n                        66,\n                        109,\n                        -101,\n                        -52,\n                        66,\n                        -101,\n                        -42,\n                        -117,\n                        66,\n                        99,\n                        -47,\n                        33,\n                        66,\n                        84,\n                        -121,\n                        116,\n                        66,\n                        -63,\n                        -50,\n                        -57,\n                        66,\n                        -112,\n                        -90,\n                        -11,\n                        66,\n                        -119,\n                        23,\n                        -98,\n                        66,\n                        -107,\n                        35,\n                        117,\n                        66,\n                        -112,\n                        55,\n                        -78,\n                        66,\n                        -70,\n                        99,\n                        126,\n                        66,\n                        108,\n                        -93,\n                        28,\n                        66,\n                        92,\n                        -15,\n                        -21,\n                        66,\n                        -64,\n                        14,\n                        -112,\n                        66,\n                        -72,\n                        -35,\n                        48,\n                        66,\n                        -112,\n                        -39,\n                        39,\n                        66,\n                        74,\n                        18,\n                        -45,\n                        66,\n                        -70,\n                        -124,\n                        -50,\n                        66,\n                        89,\n                        33,\n                        -10,\n                        66,\n                        -61,\n                        -2,\n                        -26,\n                        66,\n                        -80,\n                        107,\n                        6,\n                        66,\n                        -127,\n                        73,\n                        -119,\n                        66,\n                        112,\n                        -18,\n                        11,\n                        66,\n                        86,\n                        -13,\n                        80,\n                        66,\n                        -109,\n                        -52,\n                        30,\n                        66,\n                        -116,\n                        36,\n                        -82,\n                        66,\n                        -72,\n                        -97,\n                        4,\n                        66,\n                        -71,\n                        15,\n                        55,\n                        66,\n                        -63,\n                        32,\n                        -108,\n                        66,\n                        77,\n                        -29,\n                        16,\n                        66,\n                        -61,\n                        -15,\n                        -114,\n                        66,\n                        85,\n                        58,\n                        -110,\n                        66,\n                        -72,\n                        89,\n                        -96,\n                        66,\n                        -110,\n                        -62,\n                        -5,\n                        66,\n                        -60,\n                        92,\n                        -26,\n                        66,\n                        -62,\n                        45,\n                        -14,\n                        66,\n                        93,\n                        68,\n                        -20,\n                        66,\n                        78,\n                        31,\n                        88,\n                        66,\n                        -104,\n                        33,\n                        -84,\n                        66,\n                        -83,\n                        -99,\n                        116,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 225,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        602397242,\n                        985291288,\n                        1117381055,\n                        631286267,\n                        1159871039,\n                        769877906,\n                        710290934,\n                        586643026,\n                        767911738,\n                        984495190,\n                        970343104,\n                        21583228,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        760295146,\n                        983690483,\n                        711067073,\n                        640852154,\n                        1141474031,\n                        729666032,\n                        1011663907,\n                        716671138,\n                        1143049792,\n                        587764751,\n                        1025973418,\n                        23295112,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 29,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 29,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -3471042119123364184,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        741444190,\n                        938385206,\n                        316895023,\n                        44092536,\n                        450799169,\n                        377427443,\n                        638928997,\n                        452949597,\n                        512919603,\n                        337998883,\n                        903566707,\n                        622771047,\n                        756776681,\n                        731166579,\n                        481097137,\n                        606252761,\n                        756853481,\n                        368532603,\n                        661968943,\n                        203769137,\n                        744733669,\n                        382914295,\n                        524069309,\n                        483703645,\n                        890564083,\n                        69856686,\n                        724561007,\n                        592623267,\n                        1012898549,\n                        94819938,\n                        113187541,\n                        497646006,\n                        195005658,\n                        1063779809,\n                        1010747097,\n                        1000913278,\n                        316377073,\n                        1052735469,\n                        26727,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        17,\n                        -116,\n                        45,\n                        68,\n                        -43,\n                        -11,\n                        48,\n                        68,\n                        -60,\n                        -8,\n                        49,\n                        66,\n                        68,\n                        -98,\n                        42,\n                        65,\n                        -6,\n                        -13,\n                        -69,\n                        69,\n                        113,\n                        -21,\n                        124,\n                        66,\n                        -61,\n                        -83,\n                        26,\n                        66,\n                        -102,\n                        51,\n                        39,\n                        66,\n                        121,\n                        83,\n                        116,\n                        66,\n                        80,\n                        -102,\n                        -53,\n                        66,\n                        -64,\n                        77,\n                        90,\n                        66,\n                        -127,\n                        -103,\n                        -65,\n                        66,\n                        107,\n                        -113,\n                        15,\n                        66,\n                        -117,\n                        80,\n                        76,\n                        66,\n                        124,\n                        101,\n                        78,\n                        66,\n                        -79,\n                        -110,\n                        -8,\n                        69,\n                        104,\n                        110,\n                        -126,\n                        66,\n                        -96,\n                        27,\n                        8,\n                        64,\n                        -68,\n                        -60,\n                        119,\n                        66,\n                        92,\n                        -34,\n                        79,\n                        66,\n                        97,\n                        -88,\n                        56,\n                        66,\n                        -118,\n                        -39,\n                        -12,\n                        66,\n                        -64,\n                        75,\n                        12,\n                        66,\n                        -110,\n                        -80,\n                        90,\n                        66,\n                        -99,\n                        74,\n                        -85,\n                        66,\n                        -63,\n                        124,\n                        -114,\n                        66,\n                        -108,\n                        -63,\n                        79,\n                        66,\n                        -94,\n                        -44,\n                        -28,\n                        66,\n                        -116,\n                        115,\n                        99,\n                        66,\n                        -88,\n                        18,\n                        -68,\n                        66,\n                        115,\n                        -123,\n                        20,\n                        66,\n                        113,\n                        -17,\n                        95,\n                        66,\n                        -124,\n                        -42,\n                        81,\n                        66,\n                        -84,\n                        110,\n                        72,\n                        66,\n                        106,\n                        12,\n                        65,\n                        66,\n                        93,\n                        115,\n                        -80,\n                        66,\n                        -106,\n                        -18,\n                        0,\n                        66,\n                        93,\n                        -31,\n                        -75,\n                        66,\n                        -76,\n                        115,\n                        -16,\n                        66,\n                        -87,\n                        114,\n                        -80,\n                        66,\n                        -112,\n                        -12,\n                        60,\n                        66,\n                        90,\n                        -114,\n                        117,\n                        66,\n                        -100,\n                        0,\n                        74,\n                        66,\n                        -72,\n                        127,\n                        75,\n                        66,\n                        -102,\n                        30,\n                        -8,\n                        66,\n                        -77,\n                        52,\n                        -74,\n                        66,\n                        78,\n                        45,\n                        82,\n                        66,\n                        -116,\n                        59,\n                        26,\n                        66,\n                        110,\n                        113,\n                        -88,\n                        66,\n                        -107,\n                        -44,\n                        -65,\n                        66,\n                        -62,\n                        -121,\n                        22,\n                        66,\n                        -104,\n                        110,\n                        99,\n                        66,\n                        -100,\n                        -17,\n                        57,\n                        65,\n                        55,\n                        -102,\n                        -8,\n                        66,\n                        89,\n                        113,\n                        116,\n                        66,\n                        -104,\n                        40,\n                        -46,\n                        66,\n                        -110,\n                        90,\n                        18,\n                        66,\n                        -72,\n                        109,\n                        -36,\n                        66,\n                        -95,\n                        -34,\n                        10,\n                        66,\n                        -67,\n                        -14,\n                        -110,\n                        66,\n                        78,\n                        -105,\n                        -42,\n                        66,\n                        86,\n                        -48,\n                        79,\n                        66,\n                        -99,\n                        69,\n                        -16,\n                        66,\n                        -76,\n                        98,\n                        105,\n                        66,\n                        -98,\n                        76,\n                        -41,\n                        66,\n                        -59,\n                        24,\n                        -96,\n                        66,\n                        78,\n                        -40,\n                        -50,\n                        66,\n                        116,\n                        7,\n                        -62,\n                        66,\n                        -61,\n                        122,\n                        -126,\n                        66,\n                        -91,\n                        113,\n                        3,\n                        66,\n                        -107,\n                        -15,\n                        94,\n                        66,\n                        -60,\n                        31,\n                        0,\n                        66,\n                        -104,\n                        -18,\n                        -65,\n                        66,\n                        114,\n                        116,\n                        66,\n                        66,\n                        82,\n                        76,\n                        45,\n                        66,\n                        -63,\n                        108,\n                        108,\n                        66,\n                        -105,\n                        -6,\n                        17,\n                        66,\n                        -61,\n                        92,\n                        104,\n                        66,\n                        115,\n                        34,\n                        -77,\n                        66,\n                        87,\n                        55,\n                        -126,\n                        66,\n                        -108,\n                        -49,\n                        89,\n                        66,\n                        -124,\n                        113,\n                        96,\n                        66,\n                        -103,\n                        72,\n                        15,\n                        66,\n                        -104,\n                        -53,\n                        105,\n                        66,\n                        -115,\n                        -56,\n                        -79,\n                        66,\n                        -122,\n                        -120,\n                        80,\n                        66,\n                        -122,\n                        -21,\n                        53,\n                        66,\n                        -99,\n                        -51,\n                        -97,\n                        66,\n                        -59,\n                        -16,\n                        -126,\n                        66,\n                        -69,\n                        -49,\n                        39,\n                        65,\n                        -50,\n                        112,\n                        -107,\n                        66,\n                        -93,\n                        -9,\n                        -33,\n                        66,\n                        93,\n                        105,\n                        -103,\n                        66,\n                        -102,\n                        -16,\n                        -64,\n                        66,\n                        -84,\n                        118,\n                        -65,\n                        66,\n                        -69,\n                        115,\n                        24,\n                        66,\n                        -100,\n                        106,\n                        19,\n                        66,\n                        118,\n                        35,\n                        -124,\n                        66,\n                        -74,\n                        98,\n                        -50,\n                        66,\n                        -104,\n                        -50,\n                        46,\n                        66,\n                        -94,\n                        24,\n                        92,\n                        66,\n                        -60,\n                        -118,\n                        -28,\n                        66,\n                        84,\n                        107,\n                        -56,\n                        66,\n                        -128,\n                        118,\n                        101,\n                        66,\n                        113,\n                        68,\n                        -128,\n                        66,\n                        -75,\n                        -113,\n                        -104,\n                        66,\n                        74,\n                        8,\n                        109,\n                        66,\n                        -72,\n                        -113,\n                        26,\n                        66,\n                        106,\n                        -62,\n                        -99,\n                        66,\n                        -116,\n                        -24,\n                        -120,\n                        66,\n                        -68,\n                        -5,\n                        -100,\n                        66,\n                        -87,\n                        -80,\n                        77,\n                        66,\n                        118,\n                        6,\n                        26,\n                        66,\n                        81,\n                        -55,\n                        -128,\n                        66,\n                        -103,\n                        -88,\n                        41,\n                        66,\n                        -113,\n                        44,\n                        36,\n                        66,\n                        -112,\n                        -19,\n                        11,\n                        66,\n                        -61,\n                        19,\n                        72,\n                        66,\n                        -70,\n                        33,\n                        15,\n                        66,\n                        -73,\n                        110,\n                        -103,\n                        66,\n                        -105,\n                        -65,\n                        55,\n                        66,\n                        98,\n                        44,\n                        26,\n                        66,\n                        -72,\n                        21,\n                        -93,\n                        66,\n                        73,\n                        79,\n                        31,\n                        66,\n                        -67,\n                        -43,\n                        92,\n                        66,\n                        -71,\n                        12,\n                        -102,\n                        66,\n                        91,\n                        -127,\n                        125,\n                        66,\n                        111,\n                        1,\n                        104,\n                        66,\n                        127,\n                        58,\n                        94,\n                        66,\n                        -109,\n                        50,\n                        79,\n                        66,\n                        -122,\n                        -41,\n                        97,\n                        66,\n                        90,\n                        -59,\n                        -64,\n                        66,\n                        -84,\n                        70,\n                        78,\n                        66,\n                        -112,\n                        112,\n                        -61,\n                        66,\n                        -71,\n                        90,\n                        -95,\n                        66,\n                        -120,\n                        85,\n                        111,\n                        66,\n                        99,\n                        59,\n                        126,\n                        66,\n                        80,\n                        -93,\n                        76,\n                        66,\n                        -108,\n                        6,\n                        64,\n                        66,\n                        -63,\n                        -35,\n                        53,\n                        66,\n                        -85,\n                        -41,\n                        31,\n                        66,\n                        52,\n                        84,\n                        -100,\n                        66,\n                        -107,\n                        -83,\n                        20,\n                        66,\n                        -72,\n                        112,\n                        -41,\n                        66,\n                        87,\n                        36,\n                        10,\n                        66,\n                        75,\n                        -115,\n                        80,\n                        66,\n                        93,\n                        6,\n                        125,\n                        66,\n                        -113,\n                        84,\n                        -61,\n                        66,\n                        -121,\n                        52,\n                        41,\n                        66,\n                        -63,\n                        85,\n                        -74,\n                        66,\n                        -75,\n                        -54,\n                        123,\n                        66,\n                        -120,\n                        -72,\n                        -40,\n                        66,\n                        92,\n                        44,\n                        6,\n                        66,\n                        -128,\n                        -54,\n                        23,\n                        66,\n                        -74,\n                        -108,\n                        -105,\n                        66,\n                        -69,\n                        -43,\n                        25,\n                        66,\n                        73,\n                        49,\n                        -98,\n                        66,\n                        85,\n                        -34,\n                        -27,\n                        66,\n                        81,\n                        -55,\n                        48,\n                        66,\n                        127,\n                        82,\n                        29,\n                        66,\n                        -80,\n                        64,\n                        63,\n                        66,\n                        -101,\n                        49,\n                        -6,\n                        66,\n                        -128,\n                        51,\n                        58,\n                        66,\n                        -97,\n                        28,\n                        63,\n                        66,\n                        -109,\n                        -100,\n                        -80,\n                        66,\n                        57,\n                        -25,\n                        5,\n                        66,\n                        -110,\n                        54,\n                        105,\n                        66,\n                        -100,\n                        -50,\n                        23,\n                        66,\n                        -123,\n                        -76,\n                        -20,\n                        66,\n                        109,\n                        84,\n                        113,\n                        66,\n                        -69,\n                        17,\n                        74,\n                        66,\n                        116,\n                        -122,\n                        -127,\n                        66,\n                        -122,\n                        -48,\n                        -115,\n                        66,\n                        -74,\n                        88,\n                        34,\n                        66,\n                        -72,\n                        38,\n                        78,\n                        66,\n                        83,\n                        -82,\n                        -81,\n                        66,\n                        -105,\n                        2,\n                        -10,\n                        66,\n                        -111,\n                        -24,\n                        -42,\n                        66,\n                        -64,\n                        37,\n                        -8,\n                        66,\n                        -65,\n                        40,\n                        78,\n                        66,\n                        -122,\n                        -61,\n                        126,\n                        66,\n                        -65,\n                        84,\n                        -40,\n                        66,\n                        -82,\n                        78,\n                        39,\n                        66,\n                        -59,\n                        29,\n                        -11,\n                        66,\n                        100,\n                        96,\n                        127,\n                        66,\n                        99,\n                        -37,\n                        91,\n                        66,\n                        -65,\n                        -42,\n                        91,\n                        66,\n                        -108,\n                        -79,\n                        27,\n                        66,\n                        -60,\n                        44,\n                        -106,\n                        66,\n                        -70,\n                        17,\n                        27,\n                        66,\n                        -83,\n                        9,\n                        80,\n                        66,\n                        -70,\n                        -42,\n                        -54,\n                        66,\n                        -98,\n                        -86,\n                        64,\n                        66,\n                        -69,\n                        -46,\n                        -31,\n                        66,\n                        76,\n                        89,\n                        -114,\n                        66,\n                        74,\n                        -39,\n                        -127,\n                        66,\n                        -99,\n                        70,\n                        -54,\n                        66,\n                        -96,\n                        83,\n                        102,\n                        66,\n                        -109,\n                        15,\n                        116,\n                        66,\n                        84,\n                        47,\n                        111,\n                        66,\n                        69,\n                        105,\n                        -26,\n                        66,\n                        85,\n                        -67,\n                        100,\n                        66,\n                        -72,\n                        116,\n                        -81,\n                        66,\n                        74,\n                        126,\n                        43,\n                        66,\n                        -103,\n                        -18,\n                        57,\n                        66,\n                        -64,\n                        2,\n                        77,\n                        66,\n                        -108,\n                        47,\n                        94,\n                        66,\n                        -107,\n                        -123,\n                        -49,\n                        66,\n                        101,\n                        11,\n                        -7,\n                        66,\n                        -67,\n                        126,\n                        -126,\n                        66,\n                        -61,\n                        -46,\n                        -40,\n                        66,\n                        89,\n                        -92,\n                        -109,\n                        66,\n                        -75,\n                        27,\n                        79,\n                        66,\n                        -103,\n                        -24,\n                        98,\n                        66,\n                        -70,\n                        120,\n                        -71,\n                        66,\n                        -116,\n                        125,\n                        -77,\n                        66,\n                        -109,\n                        -78,\n                        -107,\n                        66,\n                        114,\n                        100,\n                        33,\n                        66,\n                        -106,\n                        72,\n                        112,\n                        66,\n                        104,\n                        -87,\n                        60,\n                        66,\n                        -110,\n                        44,\n                        -123,\n                        66,\n                        -105,\n                        -39,\n                        -66,\n                        66,\n                        -110,\n                        86,\n                        104,\n                        66,\n                        100,\n                        53,\n                        93,\n                        66,\n                        -125,\n                        -18,\n                        -65,\n                        66,\n                        -78,\n                        -113,\n                        -84,\n                        66,\n                        91,\n                        29,\n                        -115,\n                        66,\n                        84,\n                        54,\n                        -3,\n                        66,\n                        88,\n                        98,\n                        -19,\n                        66,\n                        82,\n                        -28,\n                        -89,\n                        66,\n                        -73,\n                        52,\n                        69,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 231,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1162241054,\n                        1114254304,\n                        1104602657,\n                        727010285,\n                        626549009,\n                        1140765443,\n                        989461384,\n                        625774706,\n                        1103245001,\n                        753917179,\n                        1013378243,\n                        710454697,\n                        16,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        602653108,\n                        989300059,\n                        1118977784,\n                        582930724,\n                        600526102,\n                        1098282644,\n                        624243316,\n                        639138976,\n                        1104607445,\n                        1013454742,\n                        626323955,\n                        581150426,\n                        13,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 23,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 23,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -8641322014209361855,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        132573906,\n                        535248293,\n                        870950241,\n                        975039569,\n                        173225913,\n                        349495291,\n                        117793841,\n                        623462321,\n                        984259509,\n                        460237761,\n                        490000187,\n                        450074085,\n                        976857025,\n                        662434898,\n                        493005906,\n                        306087154,\n                        330993590,\n                        750497519,\n                        1042614487,\n                        662472669,\n                        309630133,\n                        311799482,\n                        1050507890,\n                        647199571,\n                        590923353,\n                        1055718205,\n                        363952701,\n                        354200681,\n                        715860198,\n                        522750030,\n                        728614695,\n                        529617982,\n                        728286510,\n                        603296801,\n                        317525931,\n                        188020851,\n                        748530730,\n                        31043,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        44,\n                        95,\n                        -90,\n                        69,\n                        88,\n                        110,\n                        111,\n                        68,\n                        -73,\n                        -20,\n                        78,\n                        67,\n                        -20,\n                        40,\n                        -108,\n                        69,\n                        2,\n                        27,\n                        30,\n                        64,\n                        -91,\n                        -48,\n                        101,\n                        66,\n                        29,\n                        91,\n                        118,\n                        66,\n                        -128,\n                        -34,\n                        -19,\n                        65,\n                        -86,\n                        -37,\n                        22,\n                        66,\n                        -97,\n                        121,\n                        -34,\n                        66,\n                        -59,\n                        54,\n                        74,\n                        66,\n                        -121,\n                        60,\n                        83,\n                        66,\n                        -107,\n                        22,\n                        125,\n                        66,\n                        -117,\n                        -114,\n                        -51,\n                        66,\n                        -100,\n                        104,\n                        80,\n                        66,\n                        86,\n                        95,\n                        -55,\n                        66,\n                        -81,\n                        85,\n                        107,\n                        65,\n                        -51,\n                        114,\n                        80,\n                        66,\n                        -111,\n                        -29,\n                        29,\n                        66,\n                        -66,\n                        -17,\n                        -10,\n                        66,\n                        99,\n                        106,\n                        -29,\n                        66,\n                        9,\n                        68,\n                        -32,\n                        66,\n                        -107,\n                        -59,\n                        -48,\n                        66,\n                        -104,\n                        117,\n                        1,\n                        66,\n                        -86,\n                        117,\n                        -83,\n                        66,\n                        -121,\n                        -74,\n                        16,\n                        66,\n                        -89,\n                        116,\n                        37,\n                        66,\n                        -76,\n                        -81,\n                        33,\n                        66,\n                        -107,\n                        -48,\n                        -1,\n                        66,\n                        -116,\n                        68,\n                        -125,\n                        66,\n                        123,\n                        102,\n                        -58,\n                        66,\n                        -123,\n                        111,\n                        16,\n                        66,\n                        90,\n                        119,\n                        84,\n                        66,\n                        -100,\n                        47,\n                        125,\n                        66,\n                        -114,\n                        -119,\n                        1,\n                        66,\n                        -65,\n                        116,\n                        -60,\n                        66,\n                        -122,\n                        -3,\n                        -41,\n                        66,\n                        -93,\n                        -48,\n                        95,\n                        66,\n                        -103,\n                        100,\n                        -25,\n                        66,\n                        -82,\n                        -94,\n                        -26,\n                        64,\n                        -87,\n                        -103,\n                        42,\n                        66,\n                        106,\n                        71,\n                        -105,\n                        66,\n                        -127,\n                        125,\n                        89,\n                        66,\n                        -89,\n                        -99,\n                        -61,\n                        66,\n                        -119,\n                        1,\n                        -45,\n                        66,\n                        -74,\n                        6,\n                        113,\n                        66,\n                        -63,\n                        58,\n                        112,\n                        66,\n                        -80,\n                        -73,\n                        25,\n                        66,\n                        -112,\n                        124,\n                        -61,\n                        66,\n                        63,\n                        -31,\n                        -53,\n                        66,\n                        -68,\n                        -106,\n                        83,\n                        66,\n                        -108,\n                        -88,\n                        45,\n                        66,\n                        -67,\n                        -37,\n                        -91,\n                        66,\n                        122,\n                        -108,\n                        61,\n                        66,\n                        -108,\n                        -44,\n                        -14,\n                        66,\n                        -66,\n                        125,\n                        -111,\n                        66,\n                        -63,\n                        -87,\n                        107,\n                        66,\n                        -103,\n                        -15,\n                        99,\n                        66,\n                        -82,\n                        38,\n                        78,\n                        66,\n                        -99,\n                        -1,\n                        -69,\n                        66,\n                        80,\n                        22,\n                        -64,\n                        66,\n                        -107,\n                        71,\n                        -101,\n                        66,\n                        109,\n                        -54,\n                        -97,\n                        66,\n                        -110,\n                        60,\n                        13,\n                        66,\n                        76,\n                        39,\n                        -81,\n                        66,\n                        -61,\n                        -40,\n                        -80,\n                        66,\n                        -122,\n                        -80,\n                        4,\n                        66,\n                        98,\n                        -32,\n                        7,\n                        66,\n                        -111,\n                        5,\n                        -79,\n                        66,\n                        113,\n                        -104,\n                        96,\n                        66,\n                        -110,\n                        67,\n                        -86,\n                        66,\n                        -104,\n                        -76,\n                        77,\n                        66,\n                        -109,\n                        -70,\n                        -32,\n                        66,\n                        -61,\n                        -37,\n                        25,\n                        66,\n                        -106,\n                        35,\n                        -18,\n                        66,\n                        82,\n                        99,\n                        93,\n                        66,\n                        92,\n                        -105,\n                        -102,\n                        66,\n                        -111,\n                        10,\n                        121,\n                        66,\n                        -61,\n                        80,\n                        99,\n                        66,\n                        -72,\n                        -113,\n                        -67,\n                        66,\n                        -105,\n                        -26,\n                        -78,\n                        66,\n                        74,\n                        29,\n                        112,\n                        66,\n                        72,\n                        -113,\n                        -34,\n                        66,\n                        91,\n                        34,\n                        -25,\n                        66,\n                        -72,\n                        -1,\n                        96,\n                        66,\n                        -62,\n                        -127,\n                        3,\n                        66,\n                        76,\n                        40,\n                        -52,\n                        66,\n                        -108,\n                        61,\n                        -65,\n                        66,\n                        -70,\n                        -111,\n                        -92,\n                        66,\n                        -64,\n                        -9,\n                        -39,\n                        66,\n                        -66,\n                        -123,\n                        -1,\n                        66,\n                        108,\n                        57,\n                        -22,\n                        66,\n                        -119,\n                        -102,\n                        -122,\n                        66,\n                        -97,\n                        110,\n                        -123,\n                        66,\n                        70,\n                        -128,\n                        -106,\n                        66,\n                        -121,\n                        80,\n                        23,\n                        66,\n                        -82,\n                        43,\n                        81,\n                        66,\n                        -113,\n                        -87,\n                        -47,\n                        66,\n                        82,\n                        -57,\n                        41,\n                        66,\n                        -115,\n                        89,\n                        -90,\n                        66,\n                        85,\n                        -35,\n                        -120,\n                        66,\n                        -117,\n                        50,\n                        11,\n                        66,\n                        102,\n                        -48,\n                        -6,\n                        66,\n                        91,\n                        76,\n                        -55,\n                        66,\n                        93,\n                        23,\n                        -32,\n                        66,\n                        83,\n                        114,\n                        -123,\n                        66,\n                        127,\n                        92,\n                        121,\n                        66,\n                        -81,\n                        -14,\n                        -64,\n                        66,\n                        68,\n                        99,\n                        -41,\n                        66,\n                        -64,\n                        108,\n                        92,\n                        66,\n                        -73,\n                        13,\n                        -75,\n                        66,\n                        -85,\n                        108,\n                        123,\n                        66,\n                        -67,\n                        -55,\n                        -11,\n                        66,\n                        100,\n                        123,\n                        -98,\n                        66,\n                        126,\n                        73,\n                        68,\n                        66,\n                        -65,\n                        -19,\n                        -64,\n                        66,\n                        -95,\n                        -124,\n                        122,\n                        66,\n                        -123,\n                        56,\n                        114,\n                        66,\n                        70,\n                        -114,\n                        -127,\n                        66,\n                        69,\n                        19,\n                        82,\n                        66,\n                        -108,\n                        94,\n                        1,\n                        66,\n                        -110,\n                        -73,\n                        -33,\n                        66,\n                        -106,\n                        -71,\n                        -13,\n                        66,\n                        -116,\n                        110,\n                        74,\n                        66,\n                        80,\n                        115,\n                        -66,\n                        66,\n                        -109,\n                        16,\n                        81,\n                        66,\n                        -60,\n                        -27,\n                        -67,\n                        66,\n                        -109,\n                        64,\n                        -112,\n                        66,\n                        101,\n                        -54,\n                        -109,\n                        66,\n                        125,\n                        39,\n                        -39,\n                        66,\n                        -102,\n                        -118,\n                        49,\n                        66,\n                        -95,\n                        120,\n                        120,\n                        66,\n                        -61,\n                        4,\n                        8,\n                        66,\n                        84,\n                        118,\n                        95,\n                        66,\n                        -76,\n                        -34,\n                        -58,\n                        66,\n                        -80,\n                        -50,\n                        -57,\n                        66,\n                        -106,\n                        40,\n                        -23,\n                        66,\n                        91,\n                        -72,\n                        102,\n                        66,\n                        88,\n                        -95,\n                        117,\n                        66,\n                        -87,\n                        115,\n                        65,\n                        66,\n                        -84,\n                        114,\n                        49,\n                        66,\n                        -64,\n                        -40,\n                        -84,\n                        66,\n                        -114,\n                        -109,\n                        -4,\n                        66,\n                        68,\n                        62,\n                        -47,\n                        66,\n                        -120,\n                        94,\n                        53,\n                        66,\n                        -62,\n                        53,\n                        -101,\n                        66,\n                        -123,\n                        -2,\n                        117,\n                        66,\n                        -91,\n                        -2,\n                        28,\n                        66,\n                        89,\n                        45,\n                        71,\n                        66,\n                        -106,\n                        -28,\n                        122,\n                        66,\n                        -120,\n                        33,\n                        58,\n                        66,\n                        -100,\n                        -58,\n                        -22,\n                        66,\n                        -61,\n                        8,\n                        -3,\n                        66,\n                        -104,\n                        -42,\n                        -78,\n                        66,\n                        -59,\n                        61,\n                        104,\n                        66,\n                        -128,\n                        42,\n                        -100,\n                        66,\n                        -110,\n                        100,\n                        69,\n                        66,\n                        -122,\n                        -111,\n                        -47,\n                        66,\n                        -89,\n                        88,\n                        -69,\n                        66,\n                        -64,\n                        -63,\n                        59,\n                        66,\n                        77,\n                        -39,\n                        115,\n                        66,\n                        -78,\n                        -7,\n                        93,\n                        66,\n                        -106,\n                        89,\n                        -58,\n                        66,\n                        78,\n                        46,\n                        61,\n                        66,\n                        87,\n                        -4,\n                        -28,\n                        66,\n                        -107,\n                        4,\n                        20,\n                        66,\n                        -111,\n                        -53,\n                        -37,\n                        66,\n                        -59,\n                        -9,\n                        -58,\n                        66,\n                        -72,\n                        81,\n                        31,\n                        66,\n                        91,\n                        103,\n                        -118,\n                        66,\n                        -70,\n                        -108,\n                        -81,\n                        66,\n                        -71,\n                        111,\n                        48,\n                        66,\n                        -81,\n                        35,\n                        -121,\n                        66,\n                        -122,\n                        -85,\n                        19,\n                        66,\n                        -61,\n                        121,\n                        49,\n                        66,\n                        -88,\n                        -13,\n                        16,\n                        66,\n                        -89,\n                        -110,\n                        83,\n                        66,\n                        -113,\n                        24,\n                        108,\n                        66,\n                        -81,\n                        -28,\n                        90,\n                        66,\n                        77,\n                        -102,\n                        -29,\n                        66,\n                        72,\n                        90,\n                        -44,\n                        66,\n                        -120,\n                        -94,\n                        119,\n                        66,\n                        -118,\n                        -80,\n                        -63,\n                        66,\n                        77,\n                        -72,\n                        -40,\n                        66,\n                        -65,\n                        41,\n                        -66,\n                        66,\n                        -101,\n                        -16,\n                        58,\n                        66,\n                        -67,\n                        23,\n                        -81,\n                        66,\n                        -120,\n                        -103,\n                        -32,\n                        66,\n                        -118,\n                        -87,\n                        33,\n                        66,\n                        -63,\n                        -101,\n                        -58,\n                        66,\n                        -112,\n                        74,\n                        -128,\n                        66,\n                        106,\n                        -66,\n                        0,\n                        66,\n                        -64,\n                        47,\n                        62,\n                        66,\n                        -105,\n                        -117,\n                        -119,\n                        66,\n                        -105,\n                        79,\n                        -106,\n                        66,\n                        -104,\n                        -124,\n                        120,\n                        66,\n                        -62,\n                        -9,\n                        -86,\n                        66,\n                        -115,\n                        125,\n                        86,\n                        66,\n                        -116,\n                        62,\n                        117,\n                        66,\n                        -115,\n                        -103,\n                        -73,\n                        66,\n                        -113,\n                        121,\n                        118,\n                        66,\n                        38,\n                        36,\n                        118,\n                        66,\n                        71,\n                        -32,\n                        -24,\n                        66,\n                        -100,\n                        -53,\n                        -83,\n                        66,\n                        93,\n                        41,\n                        21,\n                        66,\n                        -115,\n                        -26,\n                        -78,\n                        66,\n                        116,\n                        31,\n                        93,\n                        66,\n                        -64,\n                        -46,\n                        -61,\n                        66,\n                        -68,\n                        25,\n                        54,\n                        66,\n                        -99,\n                        -105,\n                        -45,\n                        66,\n                        87,\n                        -44,\n                        26,\n                        66,\n                        -125,\n                        -3,\n                        -76,\n                        66,\n                        -73,\n                        -16,\n                        101,\n                        66,\n                        -106,\n                        46,\n                        2,\n                        66,\n                        91,\n                        -53,\n                        -108,\n                        66,\n                        -108,\n                        13,\n                        35,\n                        66,\n                        -67,\n                        89,\n                        32,\n                        66,\n                        -123,\n                        -124,\n                        -5,\n                        66,\n                        76,\n                        -59,\n                        -39,\n                        66,\n                        78,\n                        61,\n                        119,\n                        66,\n                        -123,\n                        -44,\n                        -23,\n                        66,\n                        -68,\n                        -88,\n                        109,\n                        66,\n                        86,\n                        51,\n                        64,\n                        66,\n                        -75,\n                        -87,\n                        -62,\n                        66,\n                        -69,\n                        -22,\n                        -29,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 225,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        583495001,\n                        1162202407,\n                        602398130,\n                        1160646646,\n                        602119403,\n                        975106741,\n                        1160467126,\n                        982978898,\n                        596269241,\n                        588040204,\n                        625791545,\n                        21605011,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        755649931,\n                        767932000,\n                        597674267,\n                        1025946878,\n                        601943065,\n                        755470705,\n                        1141291529,\n                        582990422,\n                        595745080,\n                        711927458,\n                        597271643,\n                        22253821,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 29,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 29,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 3689043991782272360,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        262758222,\n                        1005436393,\n                        917574752,\n                        1066177649,\n                        514159657,\n                        59980517,\n                        332782026,\n                        358988389,\n                        228128063,\n                        735937598,\n                        322887361,\n                        83008051,\n                        1045599270,\n                        648394923,\n                        656314069,\n                        606527421,\n                        129042914,\n                        733469031,\n                        189992371,\n                        191841351,\n                        184476717,\n                        795006183,\n                        1059144738,\n                        56047987,\n                        232466402,\n                        1013951987,\n                        366041271,\n                        232328641,\n                        659909335,\n                        904439593,\n                        880277601,\n                        268277061,\n                        456726518,\n                        1071208101,\n                        878024191,\n                        521440453,\n                        443865535,\n                        383727041,\n                        11001765,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        54,\n                        -78,\n                        126,\n                        69,\n                        25,\n                        9,\n                        53,\n                        66,\n                        6,\n                        106,\n                        52,\n                        66,\n                        -63,\n                        -7,\n                        70,\n                        66,\n                        -76,\n                        44,\n                        98,\n                        65,\n                        50,\n                        -39,\n                        17,\n                        66,\n                        -82,\n                        -27,\n                        -120,\n                        66,\n                        31,\n                        81,\n                        -124,\n                        66,\n                        -113,\n                        -106,\n                        82,\n                        66,\n                        116,\n                        -126,\n                        3,\n                        66,\n                        -99,\n                        -122,\n                        -120,\n                        66,\n                        -108,\n                        -49,\n                        82,\n                        64,\n                        -99,\n                        65,\n                        -83,\n                        66,\n                        79,\n                        118,\n                        96,\n                        66,\n                        -98,\n                        -49,\n                        -103,\n                        66,\n                        -68,\n                        -82,\n                        10,\n                        66,\n                        109,\n                        70,\n                        106,\n                        66,\n                        91,\n                        24,\n                        31,\n                        66,\n                        -115,\n                        -43,\n                        17,\n                        66,\n                        74,\n                        -78,\n                        -18,\n                        66,\n                        -97,\n                        -33,\n                        -39,\n                        66,\n                        -95,\n                        -81,\n                        106,\n                        65,\n                        19,\n                        122,\n                        -54,\n                        66,\n                        72,\n                        81,\n                        100,\n                        66,\n                        -105,\n                        19,\n                        80,\n                        66,\n                        -119,\n                        68,\n                        89,\n                        66,\n                        -83,\n                        -84,\n                        77,\n                        66,\n                        -68,\n                        -88,\n                        -93,\n                        66,\n                        -74,\n                        37,\n                        -9,\n                        66,\n                        -125,\n                        25,\n                        -116,\n                        66,\n                        -83,\n                        25,\n                        -23,\n                        66,\n                        91,\n                        88,\n                        4,\n                        66,\n                        -66,\n                        122,\n                        -106,\n                        66,\n                        -60,\n                        48,\n                        121,\n                        66,\n                        -119,\n                        -1,\n                        -104,\n                        66,\n                        -112,\n                        -10,\n                        -101,\n                        66,\n                        -76,\n                        78,\n                        5,\n                        66,\n                        -70,\n                        92,\n                        -124,\n                        66,\n                        -63,\n                        -75,\n                        -101,\n                        66,\n                        85,\n                        1,\n                        41,\n                        66,\n                        -101,\n                        97,\n                        93,\n                        66,\n                        36,\n                        119,\n                        116,\n                        66,\n                        -100,\n                        112,\n                        -59,\n                        66,\n                        96,\n                        52,\n                        -114,\n                        66,\n                        -71,\n                        3,\n                        15,\n                        66,\n                        80,\n                        124,\n                        80,\n                        66,\n                        -78,\n                        122,\n                        32,\n                        66,\n                        -71,\n                        -28,\n                        -10,\n                        66,\n                        76,\n                        52,\n                        -15,\n                        66,\n                        -109,\n                        92,\n                        -120,\n                        66,\n                        -109,\n                        28,\n                        -30,\n                        66,\n                        83,\n                        -9,\n                        -10,\n                        66,\n                        -110,\n                        83,\n                        -31,\n                        66,\n                        -84,\n                        109,\n                        20,\n                        66,\n                        -83,\n                        74,\n                        -40,\n                        66,\n                        -111,\n                        75,\n                        -41,\n                        66,\n                        -98,\n                        81,\n                        -15,\n                        66,\n                        72,\n                        4,\n                        3,\n                        66,\n                        -110,\n                        0,\n                        97,\n                        66,\n                        -90,\n                        -45,\n                        -110,\n                        66,\n                        -119,\n                        57,\n                        16,\n                        66,\n                        -65,\n                        -23,\n                        -96,\n                        66,\n                        89,\n                        -29,\n                        -122,\n                        66,\n                        -102,\n                        58,\n                        20,\n                        66,\n                        95,\n                        -60,\n                        114,\n                        66,\n                        -98,\n                        61,\n                        35,\n                        66,\n                        101,\n                        -28,\n                        -14,\n                        66,\n                        -97,\n                        45,\n                        -52,\n                        66,\n                        -80,\n                        -19,\n                        51,\n                        66,\n                        -91,\n                        -58,\n                        25,\n                        66,\n                        94,\n                        -113,\n                        81,\n                        66,\n                        -67,\n                        -15,\n                        123,\n                        66,\n                        -67,\n                        69,\n                        -78,\n                        66,\n                        -91,\n                        -70,\n                        -64,\n                        66,\n                        -121,\n                        -114,\n                        -66,\n                        66,\n                        -103,\n                        48,\n                        4,\n                        66,\n                        -119,\n                        -89,\n                        -26,\n                        66,\n                        104,\n                        -59,\n                        45,\n                        66,\n                        81,\n                        2,\n                        96,\n                        66,\n                        124,\n                        -110,\n                        -74,\n                        66,\n                        -78,\n                        36,\n                        7,\n                        66,\n                        79,\n                        -12,\n                        127,\n                        66,\n                        -61,\n                        109,\n                        78,\n                        66,\n                        91,\n                        45,\n                        -1,\n                        66,\n                        -107,\n                        82,\n                        -70,\n                        66,\n                        -68,\n                        -49,\n                        126,\n                        66,\n                        113,\n                        126,\n                        47,\n                        66,\n                        -100,\n                        -112,\n                        -29,\n                        66,\n                        -111,\n                        50,\n                        -68,\n                        66,\n                        121,\n                        -99,\n                        -64,\n                        66,\n                        -99,\n                        -80,\n                        13,\n                        66,\n                        -117,\n                        11,\n                        -62,\n                        66,\n                        80,\n                        43,\n                        -33,\n                        66,\n                        -111,\n                        70,\n                        15,\n                        66,\n                        -77,\n                        -57,\n                        -11,\n                        66,\n                        -85,\n                        -31,\n                        -59,\n                        66,\n                        -78,\n                        96,\n                        67,\n                        66,\n                        104,\n                        48,\n                        70,\n                        66,\n                        -100,\n                        -122,\n                        34,\n                        66,\n                        -90,\n                        83,\n                        -41,\n                        66,\n                        88,\n                        -35,\n                        -13,\n                        66,\n                        71,\n                        84,\n                        -34,\n                        66,\n                        69,\n                        -76,\n                        103,\n                        66,\n                        75,\n                        89,\n                        35,\n                        66,\n                        -60,\n                        -119,\n                        -96,\n                        66,\n                        99,\n                        95,\n                        -8,\n                        66,\n                        105,\n                        5,\n                        -58,\n                        66,\n                        -104,\n                        -120,\n                        -110,\n                        66,\n                        83,\n                        20,\n                        99,\n                        66,\n                        -99,\n                        79,\n                        14,\n                        66,\n                        68,\n                        45,\n                        24,\n                        66,\n                        -79,\n                        30,\n                        -116,\n                        66,\n                        -105,\n                        80,\n                        -20,\n                        66,\n                        -111,\n                        39,\n                        61,\n                        66,\n                        92,\n                        -77,\n                        45,\n                        66,\n                        -85,\n                        -95,\n                        124,\n                        66,\n                        -110,\n                        -119,\n                        -96,\n                        66,\n                        -62,\n                        79,\n                        -82,\n                        66,\n                        -75,\n                        54,\n                        -64,\n                        66,\n                        -115,\n                        94,\n                        -124,\n                        66,\n                        -111,\n                        26,\n                        -118,\n                        66,\n                        -122,\n                        16,\n                        -41,\n                        66,\n                        -99,\n                        4,\n                        -64,\n                        66,\n                        -118,\n                        -36,\n                        27,\n                        66,\n                        92,\n                        26,\n                        41,\n                        66,\n                        -103,\n                        94,\n                        -15,\n                        66,\n                        84,\n                        -99,\n                        -107,\n                        66,\n                        78,\n                        -34,\n                        -34,\n                        66,\n                        -104,\n                        -38,\n                        -95,\n                        66,\n                        -105,\n                        -99,\n                        124,\n                        66,\n                        -63,\n                        117,\n                        61,\n                        66,\n                        114,\n                        40,\n                        116,\n                        66,\n                        -59,\n                        62,\n                        -111,\n                        66,\n                        -123,\n                        -67,\n                        -7,\n                        66,\n                        -127,\n                        45,\n                        -55,\n                        66,\n                        -62,\n                        107,\n                        -119,\n                        66,\n                        -67,\n                        -13,\n                        -106,\n                        66,\n                        107,\n                        -86,\n                        -93,\n                        66,\n                        91,\n                        6,\n                        -96,\n                        66,\n                        97,\n                        32,\n                        -77,\n                        66,\n                        -94,\n                        -33,\n                        -29,\n                        66,\n                        -69,\n                        108,\n                        45,\n                        66,\n                        -103,\n                        -47,\n                        107,\n                        66,\n                        -103,\n                        67,\n                        40,\n                        66,\n                        -79,\n                        -77,\n                        -94,\n                        66,\n                        101,\n                        -40,\n                        67,\n                        66,\n                        -103,\n                        -7,\n                        126,\n                        66,\n                        -97,\n                        56,\n                        -83,\n                        66,\n                        -123,\n                        101,\n                        -123,\n                        66,\n                        -82,\n                        5,\n                        -69,\n                        66,\n                        72,\n                        20,\n                        -119,\n                        66,\n                        71,\n                        2,\n                        125,\n                        66,\n                        81,\n                        -87,\n                        -22,\n                        66,\n                        92,\n                        114,\n                        116,\n                        66,\n                        -82,\n                        64,\n                        92,\n                        66,\n                        -65,\n                        30,\n                        61,\n                        66,\n                        101,\n                        -49,\n                        123,\n                        66,\n                        -121,\n                        -67,\n                        -60,\n                        66,\n                        -73,\n                        -127,\n                        -106,\n                        66,\n                        -61,\n                        -71,\n                        4,\n                        66,\n                        -107,\n                        73,\n                        -45,\n                        66,\n                        -83,\n                        -65,\n                        -57,\n                        66,\n                        -111,\n                        -13,\n                        -75,\n                        66,\n                        -81,\n                        100,\n                        2,\n                        66,\n                        113,\n                        119,\n                        4,\n                        66,\n                        -75,\n                        103,\n                        -68,\n                        66,\n                        -106,\n                        -106,\n                        -81,\n                        66,\n                        -83,\n                        119,\n                        -83,\n                        66,\n                        100,\n                        -51,\n                        44,\n                        66,\n                        -83,\n                        107,\n                        -69,\n                        66,\n                        -68,\n                        -33,\n                        -12,\n                        66,\n                        -59,\n                        77,\n                        -35,\n                        66,\n                        -106,\n                        76,\n                        3,\n                        66,\n                        -128,\n                        -28,\n                        -125,\n                        66,\n                        -109,\n                        107,\n                        34,\n                        66,\n                        -120,\n                        68,\n                        74,\n                        66,\n                        -95,\n                        -5,\n                        73,\n                        66,\n                        -114,\n                        6,\n                        48,\n                        66,\n                        -62,\n                        -49,\n                        77,\n                        66,\n                        -64,\n                        61,\n                        -42,\n                        66,\n                        -119,\n                        -109,\n                        46,\n                        66,\n                        74,\n                        8,\n                        -25,\n                        66,\n                        -65,\n                        29,\n                        114,\n                        66,\n                        109,\n                        -21,\n                        -50,\n                        66,\n                        81,\n                        102,\n                        43,\n                        66,\n                        -70,\n                        100,\n                        -86,\n                        66,\n                        -123,\n                        74,\n                        -103,\n                        66,\n                        -82,\n                        41,\n                        -37,\n                        66,\n                        40,\n                        26,\n                        50,\n                        66,\n                        86,\n                        -95,\n                        15,\n                        66,\n                        97,\n                        -49,\n                        51,\n                        66,\n                        79,\n                        -107,\n                        -126,\n                        66,\n                        -78,\n                        -79,\n                        -42,\n                        66,\n                        94,\n                        86,\n                        45,\n                        66,\n                        -124,\n                        -41,\n                        3,\n                        66,\n                        -74,\n                        -27,\n                        81,\n                        66,\n                        118,\n                        -120,\n                        64,\n                        66,\n                        -105,\n                        40,\n                        -23,\n                        66,\n                        -101,\n                        -35,\n                        51,\n                        66,\n                        -96,\n                        -123,\n                        43,\n                        66,\n                        -113,\n                        101,\n                        57,\n                        66,\n                        -87,\n                        -121,\n                        86,\n                        66,\n                        -102,\n                        81,\n                        -112,\n                        66,\n                        109,\n                        -127,\n                        -38,\n                        66,\n                        105,\n                        12,\n                        34,\n                        66,\n                        68,\n                        124,\n                        78,\n                        66,\n                        -89,\n                        -117,\n                        25,\n                        66,\n                        69,\n                        73,\n                        93,\n                        66,\n                        -94,\n                        33,\n                        35,\n                        66,\n                        -64,\n                        -71,\n                        -88,\n                        66,\n                        -107,\n                        -113,\n                        -29,\n                        66,\n                        -64,\n                        -40,\n                        83,\n                        66,\n                        76,\n                        87,\n                        -21,\n                        66,\n                        -113,\n                        -22,\n                        -25,\n                        66,\n                        -115,\n                        -108,\n                        -128,\n                        66,\n                        72,\n                        -55,\n                        4,\n                        66,\n                        72,\n                        88,\n                        53,\n                        66,\n                        -117,\n                        -93,\n                        -7,\n                        66,\n                        -61,\n                        59,\n                        -78,\n                        66,\n                        -115,\n                        -89,\n                        -7,\n                        66,\n                        107,\n                        107,\n                        39,\n                        66,\n                        -111,\n                        88,\n                        -66,\n                        66,\n                        -113,\n                        -10,\n                        -60,\n                        66,\n                        -63,\n                        -65,\n                        -8,\n                        66,\n                        -123,\n                        -14,\n                        -10,\n                        66,\n                        -69,\n                        75,\n                        47,\n                        66,\n                        -104,\n                        110,\n                        -37,\n                        66,\n                        83,\n                        67,\n                        2,\n                        66,\n                        -94,\n                        -6,\n                        -30,\n                        66,\n                        -98,\n                        96,\n                        97,\n                        66,\n                        77,\n                        -65,\n                        63,\n                        66,\n                        78,\n                        -99,\n                        -46,\n                        66,\n                        -59,\n                        123,\n                        109,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 233,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        774779471,\n                        1100060917,\n                        715317128,\n                        588284744,\n                        1104308044,\n                        1146317395,\n                        1160407984,\n                        710802688,\n                        638526365,\n                        626546704,\n                        1147656587,\n                        582908873,\n                        134,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1032562858,\n                        1141355825,\n                        760491007,\n                        712130692,\n                        1032561403,\n                        1018588352,\n                        1031283211,\n                        772981298,\n                        767687206,\n                        772535596,\n                        581196676,\n                        625831187,\n                        121,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 21,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 21,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 6539383793818569112,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        379423442,\n                        938577079,\n                        199174183,\n                        982996923,\n                        632628559,\n                        446142685,\n                        259345726,\n                        527763286,\n                        766426417,\n                        601459806,\n                        355648947,\n                        463720061,\n                        186049849,\n                        342617578,\n                        634497351,\n                        725542733,\n                        204310701,\n                        1051670899,\n                        175413837,\n                        241794773,\n                        633304679,\n                        886225718,\n                        794341030,\n                        766171102,\n                        879540979,\n                        579929779,\n                        261576805,\n                        1054049447,\n                        527619513,\n                        515292593,\n                        865857201,\n                        231853267,\n                        592661842,\n                        1060551363,\n                        754435422,\n                        657585651,\n                        303761315,\n                        108189046,\n                        39628461,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        -121,\n                        126,\n                        -2,\n                        68,\n                        47,\n                        59,\n                        68,\n                        66,\n                        -76,\n                        126,\n                        56,\n                        65,\n                        -118,\n                        -97,\n                        70,\n                        67,\n                        78,\n                        -110,\n                        -115,\n                        66,\n                        52,\n                        97,\n                        51,\n                        66,\n                        23,\n                        -5,\n                        -2,\n                        66,\n                        -113,\n                        -48,\n                        -119,\n                        66,\n                        112,\n                        48,\n                        -47,\n                        65,\n                        -124,\n                        3,\n                        20,\n                        66,\n                        93,\n                        -14,\n                        92,\n                        66,\n                        78,\n                        -47,\n                        1,\n                        66,\n                        55,\n                        -40,\n                        -100,\n                        66,\n                        75,\n                        45,\n                        -4,\n                        66,\n                        -68,\n                        114,\n                        48,\n                        66,\n                        -67,\n                        -125,\n                        90,\n                        66,\n                        -97,\n                        -5,\n                        -62,\n                        66,\n                        13,\n                        -84,\n                        100,\n                        66,\n                        114,\n                        20,\n                        -6,\n                        66,\n                        -115,\n                        80,\n                        -10,\n                        66,\n                        -123,\n                        84,\n                        -62,\n                        66,\n                        -75,\n                        -47,\n                        -113,\n                        66,\n                        -103,\n                        -78,\n                        -105,\n                        66,\n                        -114,\n                        89,\n                        124,\n                        66,\n                        88,\n                        -109,\n                        -103,\n                        66,\n                        -88,\n                        99,\n                        -56,\n                        65,\n                        -79,\n                        42,\n                        27,\n                        66,\n                        -76,\n                        126,\n                        23,\n                        66,\n                        78,\n                        87,\n                        -75,\n                        66,\n                        -71,\n                        34,\n                        31,\n                        66,\n                        -116,\n                        -37,\n                        27,\n                        66,\n                        -59,\n                        83,\n                        53,\n                        66,\n                        -69,\n                        -53,\n                        85,\n                        66,\n                        120,\n                        50,\n                        3,\n                        66,\n                        -128,\n                        96,\n                        52,\n                        66,\n                        -99,\n                        113,\n                        -98,\n                        66,\n                        -77,\n                        -38,\n                        -38,\n                        66,\n                        -112,\n                        -21,\n                        109,\n                        66,\n                        89,\n                        11,\n                        -84,\n                        66,\n                        -67,\n                        10,\n                        34,\n                        66,\n                        102,\n                        -35,\n                        -8,\n                        66,\n                        76,\n                        121,\n                        120,\n                        66,\n                        -62,\n                        122,\n                        -15,\n                        66,\n                        -62,\n                        10,\n                        -45,\n                        66,\n                        -119,\n                        84,\n                        88,\n                        66,\n                        -73,\n                        -36,\n                        38,\n                        66,\n                        89,\n                        39,\n                        -88,\n                        66,\n                        70,\n                        69,\n                        29,\n                        66,\n                        -126,\n                        34,\n                        -60,\n                        66,\n                        -91,\n                        45,\n                        -128,\n                        66,\n                        107,\n                        8,\n                        -55,\n                        66,\n                        -107,\n                        -76,\n                        12,\n                        66,\n                        -75,\n                        103,\n                        50,\n                        66,\n                        -68,\n                        45,\n                        -102,\n                        66,\n                        -96,\n                        50,\n                        -102,\n                        66,\n                        -104,\n                        76,\n                        -84,\n                        66,\n                        72,\n                        23,\n                        -89,\n                        66,\n                        -118,\n                        -21,\n                        -16,\n                        66,\n                        -122,\n                        -110,\n                        65,\n                        66,\n                        -113,\n                        -27,\n                        70,\n                        66,\n                        75,\n                        28,\n                        -65,\n                        66,\n                        -118,\n                        21,\n                        110,\n                        66,\n                        -116,\n                        -119,\n                        52,\n                        66,\n                        -128,\n                        -114,\n                        -67,\n                        66,\n                        78,\n                        -21,\n                        83,\n                        66,\n                        -77,\n                        -29,\n                        -69,\n                        66,\n                        -121,\n                        60,\n                        19,\n                        66,\n                        100,\n                        -87,\n                        61,\n                        66,\n                        96,\n                        104,\n                        28,\n                        66,\n                        104,\n                        37,\n                        -15,\n                        66,\n                        -90,\n                        123,\n                        2,\n                        66,\n                        -109,\n                        8,\n                        45,\n                        66,\n                        -91,\n                        -47,\n                        -73,\n                        66,\n                        -107,\n                        89,\n                        35,\n                        66,\n                        -103,\n                        68,\n                        2,\n                        66,\n                        -105,\n                        86,\n                        121,\n                        66,\n                        -118,\n                        -7,\n                        -47,\n                        66,\n                        -108,\n                        -122,\n                        -32,\n                        66,\n                        -69,\n                        11,\n                        122,\n                        66,\n                        -114,\n                        32,\n                        -30,\n                        66,\n                        80,\n                        -93,\n                        -91,\n                        66,\n                        72,\n                        -14,\n                        125,\n                        66,\n                        -99,\n                        107,\n                        107,\n                        66,\n                        -76,\n                        37,\n                        -73,\n                        66,\n                        91,\n                        68,\n                        -91,\n                        66,\n                        -82,\n                        64,\n                        -33,\n                        66,\n                        -94,\n                        -32,\n                        -85,\n                        66,\n                        94,\n                        2,\n                        -122,\n                        65,\n                        -51,\n                        -57,\n                        -55,\n                        66,\n                        -68,\n                        -44,\n                        40,\n                        66,\n                        -100,\n                        51,\n                        119,\n                        66,\n                        -70,\n                        46,\n                        12,\n                        66,\n                        -101,\n                        -60,\n                        -6,\n                        66,\n                        -100,\n                        -71,\n                        -81,\n                        65,\n                        -112,\n                        -92,\n                        98,\n                        66,\n                        -107,\n                        -23,\n                        -82,\n                        66,\n                        -106,\n                        45,\n                        -59,\n                        66,\n                        -109,\n                        16,\n                        -31,\n                        66,\n                        -64,\n                        -119,\n                        -89,\n                        66,\n                        104,\n                        -99,\n                        62,\n                        66,\n                        -66,\n                        -40,\n                        -71,\n                        66,\n                        -60,\n                        18,\n                        -104,\n                        63,\n                        -67,\n                        -118,\n                        36,\n                        66,\n                        101,\n                        -68,\n                        10,\n                        66,\n                        -65,\n                        34,\n                        -23,\n                        66,\n                        -76,\n                        98,\n                        86,\n                        66,\n                        -86,\n                        -99,\n                        107,\n                        66,\n                        70,\n                        23,\n                        -81,\n                        66,\n                        -107,\n                        120,\n                        -26,\n                        66,\n                        -65,\n                        105,\n                        15,\n                        66,\n                        -64,\n                        -103,\n                        -9,\n                        66,\n                        -120,\n                        -121,\n                        -14,\n                        66,\n                        82,\n                        -46,\n                        52,\n                        66,\n                        -97,\n                        24,\n                        55,\n                        66,\n                        -94,\n                        -87,\n                        40,\n                        66,\n                        -100,\n                        -27,\n                        -100,\n                        66,\n                        97,\n                        -125,\n                        44,\n                        66,\n                        -70,\n                        -127,\n                        32,\n                        66,\n                        -59,\n                        -113,\n                        118,\n                        66,\n                        -127,\n                        -52,\n                        -83,\n                        66,\n                        90,\n                        106,\n                        70,\n                        66,\n                        -117,\n                        41,\n                        -39,\n                        66,\n                        -128,\n                        -21,\n                        93,\n                        66,\n                        -66,\n                        -60,\n                        -92,\n                        66,\n                        71,\n                        -34,\n                        65,\n                        66,\n                        -62,\n                        110,\n                        -29,\n                        66,\n                        -78,\n                        28,\n                        110,\n                        66,\n                        -100,\n                        -119,\n                        9,\n                        66,\n                        -66,\n                        119,\n                        68,\n                        66,\n                        -120,\n                        72,\n                        -85,\n                        66,\n                        -114,\n                        -126,\n                        6,\n                        66,\n                        -65,\n                        114,\n                        -100,\n                        66,\n                        -63,\n                        98,\n                        -103,\n                        66,\n                        -125,\n                        51,\n                        -114,\n                        66,\n                        109,\n                        79,\n                        50,\n                        66,\n                        -110,\n                        80,\n                        39,\n                        66,\n                        -120,\n                        100,\n                        -94,\n                        66,\n                        116,\n                        59,\n                        114,\n                        66,\n                        -73,\n                        11,\n                        -42,\n                        66,\n                        -93,\n                        -121,\n                        94,\n                        66,\n                        -108,\n                        92,\n                        77,\n                        66,\n                        -99,\n                        64,\n                        63,\n                        66,\n                        -86,\n                        -52,\n                        -90,\n                        66,\n                        -63,\n                        45,\n                        34,\n                        66,\n                        100,\n                        53,\n                        -63,\n                        66,\n                        74,\n                        -73,\n                        84,\n                        66,\n                        -73,\n                        -32,\n                        -11,\n                        66,\n                        -128,\n                        95,\n                        -3,\n                        66,\n                        -91,\n                        -28,\n                        81,\n                        66,\n                        -59,\n                        -52,\n                        -34,\n                        66,\n                        86,\n                        -121,\n                        -62,\n                        66,\n                        -121,\n                        -35,\n                        -106,\n                        66,\n                        -111,\n                        78,\n                        60,\n                        66,\n                        -59,\n                        126,\n                        -128,\n                        66,\n                        -107,\n                        -91,\n                        49,\n                        66,\n                        -108,\n                        78,\n                        -72,\n                        66,\n                        -106,\n                        105,\n                        24,\n                        66,\n                        113,\n                        8,\n                        22,\n                        66,\n                        -78,\n                        -31,\n                        -91,\n                        66,\n                        -73,\n                        -6,\n                        -126,\n                        66,\n                        -119,\n                        -124,\n                        -109,\n                        66,\n                        104,\n                        82,\n                        -81,\n                        66,\n                        107,\n                        -74,\n                        -104,\n                        66,\n                        -119,\n                        -99,\n                        25,\n                        66,\n                        -109,\n                        -85,\n                        48,\n                        66,\n                        117,\n                        -21,\n                        36,\n                        66,\n                        -119,\n                        44,\n                        -108,\n                        66,\n                        88,\n                        -97,\n                        31,\n                        66,\n                        -115,\n                        -75,\n                        109,\n                        66,\n                        -106,\n                        -13,\n                        116,\n                        66,\n                        -116,\n                        0,\n                        -48,\n                        66,\n                        -109,\n                        -60,\n                        79,\n                        66,\n                        94,\n                        104,\n                        100,\n                        66,\n                        97,\n                        -26,\n                        25,\n                        66,\n                        -103,\n                        62,\n                        79,\n                        66,\n                        -102,\n                        -66,\n                        68,\n                        66,\n                        -127,\n                        -12,\n                        85,\n                        66,\n                        -99,\n                        33,\n                        126,\n                        66,\n                        80,\n                        -104,\n                        58,\n                        66,\n                        72,\n                        -38,\n                        122,\n                        66,\n                        -113,\n                        58,\n                        -91,\n                        66,\n                        -103,\n                        -23,\n                        -31,\n                        66,\n                        91,\n                        90,\n                        105,\n                        66,\n                        77,\n                        -15,\n                        53,\n                        66,\n                        -106,\n                        -82,\n                        92,\n                        66,\n                        -115,\n                        64,\n                        102,\n                        66,\n                        77,\n                        -19,\n                        -79,\n                        66,\n                        -63,\n                        -7,\n                        -53,\n                        66,\n                        83,\n                        -95,\n                        97,\n                        66,\n                        -126,\n                        -43,\n                        80,\n                        66,\n                        -106,\n                        48,\n                        103,\n                        66,\n                        -60,\n                        -93,\n                        42,\n                        66,\n                        -74,\n                        31,\n                        -6,\n                        66,\n                        -67,\n                        83,\n                        -41,\n                        66,\n                        88,\n                        -26,\n                        -30,\n                        66,\n                        -99,\n                        -45,\n                        21,\n                        66,\n                        -115,\n                        66,\n                        -80,\n                        66,\n                        -124,\n                        -107,\n                        -100,\n                        66,\n                        105,\n                        -53,\n                        121,\n                        66,\n                        -62,\n                        -97,\n                        58,\n                        66,\n                        -59,\n                        -101,\n                        -3,\n                        66,\n                        -115,\n                        46,\n                        -22,\n                        66,\n                        90,\n                        112,\n                        46,\n                        66,\n                        68,\n                        23,\n                        -1,\n                        66,\n                        -78,\n                        29,\n                        -72,\n                        66,\n                        -61,\n                        -17,\n                        -63,\n                        66,\n                        -87,\n                        -9,\n                        65,\n                        66,\n                        109,\n                        125,\n                        -62,\n                        66,\n                        93,\n                        -101,\n                        83,\n                        66,\n                        -68,\n                        -101,\n                        -21,\n                        66,\n                        71,\n                        -128,\n                        -116,\n                        66,\n                        80,\n                        -122,\n                        115,\n                        66,\n                        -112,\n                        103,\n                        -65,\n                        66,\n                        90,\n                        122,\n                        -36,\n                        66,\n                        -125,\n                        -5,\n                        70,\n                        66,\n                        82,\n                        -77,\n                        34,\n                        66,\n                        -120,\n                        75,\n                        -69,\n                        66,\n                        -92,\n                        77,\n                        125,\n                        66,\n                        -107,\n                        2,\n                        100,\n                        66,\n                        -66,\n                        116,\n                        93,\n                        66,\n                        -94,\n                        97,\n                        -79,\n                        66,\n                        -97,\n                        27,\n                        -66,\n                        66,\n                        -61,\n                        60,\n                        -59,\n                        66,\n                        88,\n                        -58,\n                        -1,\n                        66,\n                        -106,\n                        17,\n                        -88,\n                        66,\n                        -116,\n                        -89,\n                        -46,\n                        66,\n                        82,\n                        -21,\n                        -71,\n                        66,\n                        81,\n                        -97,\n                        116,\n                        66,\n                        -108,\n                        2,\n                        -89,\n                        66,\n                        -116,\n                        31,\n                        -11,\n                        66,\n                        104,\n                        -36,\n                        39,\n                        66,\n                        -118,\n                        87,\n                        113,\n                        66,\n                        -110,\n                        20,\n                        -25,\n                        66,\n                        -116,\n                        68,\n                        24,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 234,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160640629,\n                        772987820,\n                        969321077,\n                        1013900867,\n                        640140485,\n                        1140824237,\n                        1161729995,\n                        753376669,\n                        1099489100,\n                        1157272180,\n                        581216848,\n                        600348886,\n                        365,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        990067936,\n                        731616341,\n                        1013274026,\n                        755154808,\n                        645672113,\n                        1147381025,\n                        774818054,\n                        1116902020,\n                        1160585132,\n                        769261171,\n                        600291391,\n                        624767948,\n                        392,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 20,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 20,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -7479358087348062431,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        888224293,\n                        76652475,\n                        182159171,\n                        637983398,\n                        773400121,\n                        976854783,\n                        1067228629,\n                        892939501,\n                        526060583,\n                        599221425,\n                        49191529,\n                        853784681,\n                        787926090,\n                        750427586,\n                        191829673,\n                        504980517,\n                        752175741,\n                        228897834,\n                        745219377,\n                        366664030,\n                        921802310,\n                        233686191,\n                        321347507,\n                        606661993,\n                        499103098,\n                        535136857,\n                        802225269,\n                        451631786,\n                        1012501174,\n                        98741855,\n                        232303321,\n                        515874173,\n                        733280861,\n                        265989738,\n                        1043498569,\n                        191170399,\n                        870250978,\n                        753610413,\n                        2599,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        67,\n                        -85,\n                        37,\n                        63,\n                        67,\n                        -35,\n                        121,\n                        126,\n                        68,\n                        -32,\n                        -78,\n                        127,\n                        69,\n                        72,\n                        32,\n                        1,\n                        66,\n                        -125,\n                        -126,\n                        -16,\n                        66,\n                        -66,\n                        11,\n                        -35,\n                        66,\n                        101,\n                        8,\n                        -127,\n                        66,\n                        83,\n                        19,\n                        83,\n                        66,\n                        66,\n                        126,\n                        -69,\n                        66,\n                        82,\n                        88,\n                        -104,\n                        66,\n                        -102,\n                        60,\n                        99,\n                        66,\n                        -84,\n                        -1,\n                        66,\n                        66,\n                        82,\n                        121,\n                        108,\n                        66,\n                        -73,\n                        11,\n                        -64,\n                        66,\n                        -122,\n                        124,\n                        114,\n                        65,\n                        78,\n                        12,\n                        -39,\n                        66,\n                        -120,\n                        -123,\n                        51,\n                        66,\n                        -110,\n                        104,\n                        -57,\n                        66,\n                        -81,\n                        -4,\n                        -97,\n                        66,\n                        -107,\n                        -119,\n                        -75,\n                        66,\n                        -74,\n                        66,\n                        -59,\n                        66,\n                        49,\n                        -105,\n                        45,\n                        62,\n                        120,\n                        -8,\n                        98,\n                        65,\n                        -75,\n                        -13,\n                        86,\n                        66,\n                        -92,\n                        68,\n                        -71,\n                        66,\n                        -97,\n                        63,\n                        -127,\n                        66,\n                        -110,\n                        44,\n                        -119,\n                        66,\n                        -66,\n                        -68,\n                        103,\n                        66,\n                        -94,\n                        69,\n                        35,\n                        66,\n                        111,\n                        38,\n                        102,\n                        66,\n                        -111,\n                        -26,\n                        100,\n                        66,\n                        81,\n                        -65,\n                        78,\n                        66,\n                        69,\n                        -14,\n                        41,\n                        66,\n                        47,\n                        -83,\n                        -67,\n                        66,\n                        86,\n                        -86,\n                        -93,\n                        66,\n                        -91,\n                        -126,\n                        -123,\n                        66,\n                        -128,\n                        -90,\n                        48,\n                        66,\n                        -83,\n                        98,\n                        48,\n                        66,\n                        84,\n                        -68,\n                        88,\n                        66,\n                        -92,\n                        52,\n                        79,\n                        66,\n                        -108,\n                        -14,\n                        52,\n                        66,\n                        114,\n                        -122,\n                        -109,\n                        66,\n                        -83,\n                        7,\n                        41,\n                        66,\n                        84,\n                        -68,\n                        30,\n                        66,\n                        103,\n                        52,\n                        75,\n                        66,\n                        -101,\n                        37,\n                        -31,\n                        66,\n                        81,\n                        64,\n                        -18,\n                        66,\n                        -78,\n                        -120,\n                        15,\n                        66,\n                        73,\n                        97,\n                        119,\n                        66,\n                        -105,\n                        -124,\n                        77,\n                        64,\n                        -33,\n                        -117,\n                        -44,\n                        66,\n                        -71,\n                        -73,\n                        95,\n                        66,\n                        -121,\n                        -72,\n                        36,\n                        66,\n                        92,\n                        121,\n                        54,\n                        66,\n                        -108,\n                        -30,\n                        7,\n                        66,\n                        -93,\n                        33,\n                        11,\n                        66,\n                        -115,\n                        -44,\n                        100,\n                        66,\n                        -67,\n                        36,\n                        -106,\n                        66,\n                        99,\n                        -79,\n                        56,\n                        66,\n                        -122,\n                        87,\n                        92,\n                        66,\n                        -119,\n                        5,\n                        -13,\n                        66,\n                        72,\n                        14,\n                        12,\n                        66,\n                        -74,\n                        58,\n                        84,\n                        66,\n                        -125,\n                        -51,\n                        -6,\n                        66,\n                        -68,\n                        -104,\n                        39,\n                        66,\n                        -122,\n                        -60,\n                        -111,\n                        66,\n                        -94,\n                        102,\n                        72,\n                        66,\n                        95,\n                        -41,\n                        -37,\n                        66,\n                        -69,\n                        120,\n                        -57,\n                        66,\n                        106,\n                        -61,\n                        -44,\n                        66,\n                        -74,\n                        56,\n                        62,\n                        66,\n                        -122,\n                        120,\n                        119,\n                        66,\n                        -74,\n                        -39,\n                        -80,\n                        66,\n                        -86,\n                        -75,\n                        -83,\n                        66,\n                        110,\n                        76,\n                        -121,\n                        66,\n                        -109,\n                        25,\n                        98,\n                        65,\n                        15,\n                        -3,\n                        43,\n                        66,\n                        97,\n                        -25,\n                        113,\n                        66,\n                        -64,\n                        -55,\n                        -112,\n                        66,\n                        -78,\n                        -122,\n                        -23,\n                        66,\n                        80,\n                        -56,\n                        -93,\n                        66,\n                        -109,\n                        20,\n                        40,\n                        66,\n                        87,\n                        14,\n                        -9,\n                        66,\n                        -64,\n                        -13,\n                        16,\n                        66,\n                        -111,\n                        -126,\n                        -16,\n                        66,\n                        -103,\n                        21,\n                        -54,\n                        66,\n                        -125,\n                        -84,\n                        -77,\n                        66,\n                        -71,\n                        -69,\n                        65,\n                        66,\n                        -67,\n                        60,\n                        -128,\n                        66,\n                        -103,\n                        78,\n                        121,\n                        66,\n                        -110,\n                        113,\n                        17,\n                        66,\n                        -117,\n                        -35,\n                        -10,\n                        66,\n                        -98,\n                        62,\n                        -54,\n                        66,\n                        -62,\n                        126,\n                        31,\n                        66,\n                        -98,\n                        118,\n                        -27,\n                        66,\n                        76,\n                        37,\n                        -8,\n                        66,\n                        -107,\n                        -44,\n                        -45,\n                        66,\n                        101,\n                        -63,\n                        122,\n                        66,\n                        -59,\n                        -58,\n                        29,\n                        66,\n                        -74,\n                        6,\n                        52,\n                        66,\n                        -121,\n                        -71,\n                        -70,\n                        66,\n                        -64,\n                        -50,\n                        -6,\n                        66,\n                        -62,\n                        -67,\n                        -39,\n                        66,\n                        -105,\n                        -123,\n                        -119,\n                        66,\n                        -100,\n                        -105,\n                        -16,\n                        66,\n                        -115,\n                        126,\n                        -47,\n                        66,\n                        -89,\n                        39,\n                        5,\n                        66,\n                        -90,\n                        -10,\n                        -54,\n                        66,\n                        -111,\n                        33,\n                        68,\n                        66,\n                        -102,\n                        -4,\n                        -58,\n                        66,\n                        -98,\n                        -48,\n                        -30,\n                        66,\n                        -71,\n                        -96,\n                        -71,\n                        66,\n                        -75,\n                        -30,\n                        -69,\n                        66,\n                        -62,\n                        25,\n                        52,\n                        66,\n                        -70,\n                        78,\n                        -102,\n                        66,\n                        -96,\n                        80,\n                        -46,\n                        66,\n                        -69,\n                        -1,\n                        34,\n                        66,\n                        -115,\n                        -115,\n                        40,\n                        66,\n                        -97,\n                        -109,\n                        -87,\n                        66,\n                        -66,\n                        -33,\n                        -85,\n                        66,\n                        -67,\n                        45,\n                        19,\n                        66,\n                        -72,\n                        -6,\n                        46,\n                        66,\n                        -113,\n                        -6,\n                        38,\n                        66,\n                        96,\n                        -31,\n                        -41,\n                        66,\n                        118,\n                        32,\n                        53,\n                        66,\n                        92,\n                        52,\n                        48,\n                        66,\n                        85,\n                        -113,\n                        96,\n                        66,\n                        -117,\n                        -65,\n                        13,\n                        66,\n                        -94,\n                        3,\n                        -86,\n                        66,\n                        93,\n                        -45,\n                        86,\n                        66,\n                        -72,\n                        124,\n                        45,\n                        66,\n                        -67,\n                        -92,\n                        57,\n                        66,\n                        81,\n                        -91,\n                        57,\n                        66,\n                        -109,\n                        -69,\n                        112,\n                        66,\n                        78,\n                        -45,\n                        -87,\n                        66,\n                        -65,\n                        22,\n                        -61,\n                        66,\n                        -74,\n                        -77,\n                        -43,\n                        66,\n                        -104,\n                        93,\n                        60,\n                        66,\n                        -97,\n                        -93,\n                        -113,\n                        66,\n                        90,\n                        47,\n                        44,\n                        66,\n                        -121,\n                        126,\n                        -85,\n                        66,\n                        -106,\n                        74,\n                        -115,\n                        66,\n                        -85,\n                        -20,\n                        60,\n                        66,\n                        -64,\n                        -126,\n                        -91,\n                        66,\n                        -62,\n                        20,\n                        3,\n                        66,\n                        73,\n                        -17,\n                        122,\n                        66,\n                        -114,\n                        -113,\n                        -29,\n                        66,\n                        91,\n                        39,\n                        -52,\n                        66,\n                        75,\n                        77,\n                        47,\n                        66,\n                        -60,\n                        69,\n                        34,\n                        66,\n                        111,\n                        85,\n                        -41,\n                        66,\n                        -60,\n                        -44,\n                        -18,\n                        66,\n                        -80,\n                        -88,\n                        -94,\n                        66,\n                        77,\n                        -4,\n                        -35,\n                        66,\n                        -79,\n                        -93,\n                        -81,\n                        66,\n                        108,\n                        85,\n                        -43,\n                        66,\n                        -95,\n                        -62,\n                        107,\n                        66,\n                        77,\n                        -8,\n                        42,\n                        66,\n                        87,\n                        92,\n                        88,\n                        66,\n                        -106,\n                        -38,\n                        43,\n                        66,\n                        -114,\n                        -20,\n                        10,\n                        66,\n                        69,\n                        100,\n                        -128,\n                        66,\n                        -61,\n                        38,\n                        -102,\n                        66,\n                        -108,\n                        63,\n                        -72,\n                        66,\n                        -63,\n                        53,\n                        87,\n                        66,\n                        -72,\n                        93,\n                        -125,\n                        66,\n                        -67,\n                        99,\n                        2,\n                        66,\n                        -112,\n                        -80,\n                        29,\n                        66,\n                        -63,\n                        -87,\n                        -45,\n                        66,\n                        -111,\n                        102,\n                        -78,\n                        66,\n                        -68,\n                        18,\n                        -98,\n                        66,\n                        -128,\n                        64,\n                        91,\n                        66,\n                        -93,\n                        -74,\n                        48,\n                        66,\n                        -89,\n                        107,\n                        45,\n                        66,\n                        86,\n                        -97,\n                        121,\n                        66,\n                        -68,\n                        92,\n                        -67,\n                        66,\n                        77,\n                        -120,\n                        -70,\n                        66,\n                        -107,\n                        -32,\n                        27,\n                        66,\n                        -80,\n                        -125,\n                        -51,\n                        66,\n                        -61,\n                        119,\n                        106,\n                        66,\n                        -107,\n                        98,\n                        19,\n                        66,\n                        -60,\n                        -3,\n                        -116,\n                        66,\n                        -73,\n                        3,\n                        62,\n                        66,\n                        -110,\n                        -91,\n                        -87,\n                        66,\n                        -98,\n                        43,\n                        -74,\n                        66,\n                        -62,\n                        -17,\n                        79,\n                        66,\n                        -119,\n                        63,\n                        -28,\n                        66,\n                        101,\n                        104,\n                        -81,\n                        66,\n                        84,\n                        -30,\n                        64,\n                        66,\n                        74,\n                        -32,\n                        -75,\n                        66,\n                        97,\n                        -14,\n                        37,\n                        66,\n                        93,\n                        97,\n                        -1,\n                        66,\n                        -103,\n                        -58,\n                        109,\n                        66,\n                        -67,\n                        -76,\n                        -103,\n                        66,\n                        -66,\n                        84,\n                        -46,\n                        66,\n                        -117,\n                        -75,\n                        72,\n                        66,\n                        98,\n                        58,\n                        -56,\n                        66,\n                        -111,\n                        73,\n                        -120,\n                        66,\n                        -88,\n                        -49,\n                        70,\n                        66,\n                        106,\n                        30,\n                        95,\n                        66,\n                        125,\n                        -8,\n                        -54,\n                        66,\n                        -110,\n                        69,\n                        -68,\n                        66,\n                        -102,\n                        -54,\n                        92,\n                        66,\n                        74,\n                        -46,\n                        -91,\n                        66,\n                        -110,\n                        -121,\n                        -71,\n                        66,\n                        -67,\n                        -98,\n                        -72,\n                        66,\n                        -103,\n                        -15,\n                        92,\n                        66,\n                        -102,\n                        33,\n                        -34,\n                        66,\n                        79,\n                        126,\n                        88,\n                        66,\n                        87,\n                        -20,\n                        -60,\n                        66,\n                        89,\n                        12,\n                        -12,\n                        66,\n                        -73,\n                        72,\n                        46,\n                        66,\n                        -108,\n                        97,\n                        66,\n                        66,\n                        -68,\n                        0,\n                        110,\n                        66,\n                        -72,\n                        -21,\n                        18,\n                        66,\n                        -96,\n                        -18,\n                        -50,\n                        66,\n                        -66,\n                        124,\n                        88,\n                        66,\n                        72,\n                        -13,\n                        -118,\n                        66,\n                        -63,\n                        27,\n                        -106,\n                        66,\n                        -101,\n                        64,\n                        -115,\n                        66,\n                        -115,\n                        65,\n                        14,\n                        66,\n                        -106,\n                        -57,\n                        55,\n                        66,\n                        -117,\n                        -60,\n                        -61,\n                        66,\n                        -87,\n                        -79,\n                        -57,\n                        66,\n                        106,\n                        -122,\n                        122,\n                        66,\n                        -71,\n                        46,\n                        51,\n                        66,\n                        -93,\n                        -35,\n                        117,\n                        66,\n                        -59,\n                        26,\n                        -58,\n                        66,\n                        90,\n                        2,\n                        43,\n                        66,\n                        -116,\n                        -58,\n                        102,\n                        66,\n                        -64,\n                        -19,\n                        76,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 231,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        773178857,\n                        1012386262,\n                        1145699305,\n                        630754096,\n                        645638578,\n                        1147351603,\n                        1016645539,\n                        973599872,\n                        1104069010,\n                        626487287,\n                        767747537,\n                        581688418,\n                        13,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160588371,\n                        1160658367,\n                        1016915542,\n                        645169343,\n                        639322541,\n                        760462628,\n                        1018181065,\n                        1104274363,\n                        602565614,\n                        624947605,\n                        970342468,\n                        725151335,\n                        13,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 23,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 23,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -1946928779114242913,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        382683842,\n                        35568365,\n                        743685422,\n                        909763832,\n                        743648325,\n                        313485543,\n                        129614911,\n                        903543093,\n                        529629177,\n                        111118042,\n                        259431401,\n                        110860469,\n                        1063904305,\n                        757033013,\n                        795981997,\n                        460770546,\n                        215414982,\n                        731319789,\n                        190227902,\n                        641649747,\n                        475746101,\n                        70375666,\n                        179386167,\n                        466999759,\n                        1059399863,\n                        593955141,\n                        223647674,\n                        1023085263,\n                        743499999,\n                        178998983,\n                        658101574,\n                        40729343,\n                        1035685738,\n                        452953662,\n                        588859707,\n                        911337771,\n                        304384234,\n                        800819963,\n                        6227,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        116,\n                        53,\n                        -110,\n                        69,\n                        62,\n                        -16,\n                        117,\n                        69,\n                        123,\n                        61,\n                        -95,\n                        69,\n                        28,\n                        62,\n                        -26,\n                        65,\n                        19,\n                        16,\n                        -32,\n                        66,\n                        70,\n                        98,\n                        -29,\n                        65,\n                        -55,\n                        -30,\n                        -36,\n                        66,\n                        76,\n                        -76,\n                        -69,\n                        66,\n                        -99,\n                        -116,\n                        30,\n                        66,\n                        -100,\n                        -69,\n                        -78,\n                        66,\n                        49,\n                        -51,\n                        -98,\n                        66,\n                        -128,\n                        65,\n                        98,\n                        66,\n                        -64,\n                        -76,\n                        -98,\n                        66,\n                        79,\n                        65,\n                        -77,\n                        66,\n                        113,\n                        -102,\n                        -105,\n                        65,\n                        -31,\n                        -92,\n                        -103,\n                        66,\n                        75,\n                        108,\n                        -73,\n                        66,\n                        -64,\n                        52,\n                        -12,\n                        64,\n                        -54,\n                        33,\n                        -10,\n                        66,\n                        87,\n                        -10,\n                        -8,\n                        66,\n                        -105,\n                        14,\n                        20,\n                        66,\n                        115,\n                        15,\n                        106,\n                        66,\n                        91,\n                        -3,\n                        -87,\n                        66,\n                        97,\n                        -20,\n                        119,\n                        66,\n                        -107,\n                        -126,\n                        -61,\n                        66,\n                        -86,\n                        28,\n                        3,\n                        66,\n                        72,\n                        -77,\n                        -98,\n                        66,\n                        -90,\n                        -70,\n                        64,\n                        66,\n                        -102,\n                        -67,\n                        95,\n                        66,\n                        -62,\n                        -104,\n                        -68,\n                        66,\n                        81,\n                        -88,\n                        92,\n                        66,\n                        85,\n                        70,\n                        122,\n                        66,\n                        -59,\n                        6,\n                        17,\n                        66,\n                        -75,\n                        115,\n                        -111,\n                        66,\n                        -69,\n                        -90,\n                        -23,\n                        66,\n                        -90,\n                        -3,\n                        59,\n                        66,\n                        83,\n                        -52,\n                        38,\n                        66,\n                        -121,\n                        -82,\n                        90,\n                        66,\n                        -102,\n                        -8,\n                        40,\n                        66,\n                        113,\n                        44,\n                        -123,\n                        66,\n                        102,\n                        -66,\n                        -45,\n                        66,\n                        79,\n                        37,\n                        -116,\n                        66,\n                        -108,\n                        21,\n                        14,\n                        66,\n                        -111,\n                        87,\n                        47,\n                        66,\n                        -61,\n                        22,\n                        -118,\n                        66,\n                        -99,\n                        93,\n                        -127,\n                        66,\n                        -109,\n                        -112,\n                        9,\n                        66,\n                        -69,\n                        16,\n                        -83,\n                        66,\n                        -127,\n                        122,\n                        -48,\n                        66,\n                        81,\n                        -78,\n                        112,\n                        66,\n                        -118,\n                        -13,\n                        -59,\n                        66,\n                        -76,\n                        42,\n                        -73,\n                        66,\n                        -111,\n                        -52,\n                        -42,\n                        66,\n                        100,\n                        106,\n                        39,\n                        66,\n                        -79,\n                        -105,\n                        -79,\n                        66,\n                        -57,\n                        106,\n                        -92,\n                        66,\n                        -104,\n                        81,\n                        -117,\n                        66,\n                        78,\n                        -25,\n                        -88,\n                        66,\n                        -103,\n                        117,\n                        -113,\n                        66,\n                        -118,\n                        -90,\n                        122,\n                        66,\n                        -125,\n                        47,\n                        87,\n                        66,\n                        111,\n                        -93,\n                        51,\n                        66,\n                        -92,\n                        -124,\n                        57,\n                        66,\n                        -111,\n                        6,\n                        -104,\n                        66,\n                        -114,\n                        89,\n                        -53,\n                        66,\n                        102,\n                        95,\n                        -119,\n                        66,\n                        -109,\n                        -85,\n                        54,\n                        66,\n                        -107,\n                        54,\n                        16,\n                        66,\n                        -76,\n                        -51,\n                        74,\n                        66,\n                        99,\n                        -91,\n                        -11,\n                        66,\n                        -123,\n                        -70,\n                        -29,\n                        66,\n                        74,\n                        80,\n                        60,\n                        66,\n                        15,\n                        -14,\n                        -9,\n                        66,\n                        -92,\n                        119,\n                        -28,\n                        66,\n                        -117,\n                        9,\n                        -56,\n                        66,\n                        89,\n                        94,\n                        -83,\n                        66,\n                        -70,\n                        90,\n                        -95,\n                        66,\n                        -126,\n                        -114,\n                        -15,\n                        66,\n                        -85,\n                        36,\n                        -55,\n                        66,\n                        -97,\n                        -21,\n                        15,\n                        66,\n                        -63,\n                        -111,\n                        125,\n                        66,\n                        -61,\n                        13,\n                        7,\n                        66,\n                        -120,\n                        -32,\n                        36,\n                        66,\n                        -71,\n                        68,\n                        126,\n                        66,\n                        124,\n                        -88,\n                        -112,\n                        66,\n                        -105,\n                        99,\n                        -75,\n                        66,\n                        -63,\n                        80,\n                        102,\n                        66,\n                        -121,\n                        -69,\n                        -82,\n                        66,\n                        83,\n                        -8,\n                        32,\n                        66,\n                        97,\n                        -23,\n                        60,\n                        66,\n                        -68,\n                        127,\n                        -29,\n                        66,\n                        86,\n                        81,\n                        -58,\n                        66,\n                        115,\n                        -54,\n                        -34,\n                        66,\n                        -112,\n                        -11,\n                        -70,\n                        66,\n                        86,\n                        -20,\n                        -71,\n                        66,\n                        -123,\n                        -106,\n                        88,\n                        66,\n                        -78,\n                        -96,\n                        -20,\n                        66,\n                        -72,\n                        112,\n                        65,\n                        66,\n                        -90,\n                        -60,\n                        -119,\n                        66,\n                        -106,\n                        -83,\n                        10,\n                        66,\n                        -123,\n                        -69,\n                        4,\n                        66,\n                        -63,\n                        -89,\n                        46,\n                        66,\n                        -107,\n                        36,\n                        57,\n                        66,\n                        77,\n                        96,\n                        -46,\n                        66,\n                        127,\n                        81,\n                        -88,\n                        66,\n                        -80,\n                        96,\n                        -107,\n                        66,\n                        -121,\n                        51,\n                        112,\n                        66,\n                        -105,\n                        5,\n                        -7,\n                        66,\n                        -79,\n                        -10,\n                        37,\n                        66,\n                        -124,\n                        -68,\n                        -104,\n                        66,\n                        -104,\n                        38,\n                        -66,\n                        66,\n                        -119,\n                        -16,\n                        6,\n                        66,\n                        -98,\n                        29,\n                        79,\n                        66,\n                        119,\n                        -116,\n                        82,\n                        66,\n                        88,\n                        -16,\n                        57,\n                        66,\n                        -67,\n                        -102,\n                        85,\n                        66,\n                        86,\n                        -6,\n                        -41,\n                        66,\n                        -107,\n                        54,\n                        -91,\n                        66,\n                        84,\n                        124,\n                        -82,\n                        66,\n                        119,\n                        115,\n                        -79,\n                        66,\n                        -120,\n                        -78,\n                        -18,\n                        66,\n                        -110,\n                        1,\n                        117,\n                        66,\n                        87,\n                        -27,\n                        -5,\n                        66,\n                        -70,\n                        117,\n                        -91,\n                        66,\n                        -108,\n                        -88,\n                        -114,\n                        66,\n                        -61,\n                        100,\n                        -94,\n                        66,\n                        -65,\n                        75,\n                        66,\n                        66,\n                        121,\n                        -19,\n                        -78,\n                        66,\n                        -65,\n                        85,\n                        -105,\n                        66,\n                        77,\n                        -21,\n                        33,\n                        66,\n                        68,\n                        -95,\n                        -75,\n                        66,\n                        -77,\n                        66,\n                        60,\n                        66,\n                        105,\n                        111,\n                        -84,\n                        66,\n                        -102,\n                        -10,\n                        -11,\n                        66,\n                        -110,\n                        -26,\n                        45,\n                        66,\n                        -84,\n                        33,\n                        -44,\n                        66,\n                        110,\n                        33,\n                        -32,\n                        66,\n                        -119,\n                        -46,\n                        -2,\n                        66,\n                        84,\n                        94,\n                        -56,\n                        66,\n                        -69,\n                        -96,\n                        -126,\n                        66,\n                        -66,\n                        -106,\n                        -35,\n                        66,\n                        89,\n                        -109,\n                        107,\n                        66,\n                        -94,\n                        -26,\n                        62,\n                        66,\n                        -117,\n                        -106,\n                        -82,\n                        66,\n                        82,\n                        91,\n                        14,\n                        66,\n                        -102,\n                        16,\n                        -6,\n                        66,\n                        -60,\n                        -81,\n                        -109,\n                        66,\n                        -73,\n                        35,\n                        92,\n                        66,\n                        -74,\n                        62,\n                        -54,\n                        66,\n                        110,\n                        117,\n                        -5,\n                        66,\n                        -107,\n                        86,\n                        -80,\n                        66,\n                        -60,\n                        11,\n                        -112,\n                        66,\n                        -61,\n                        -78,\n                        120,\n                        66,\n                        -65,\n                        78,\n                        -43,\n                        66,\n                        -73,\n                        -25,\n                        -102,\n                        66,\n                        -126,\n                        -81,\n                        -73,\n                        66,\n                        -66,\n                        -123,\n                        -73,\n                        66,\n                        -123,\n                        104,\n                        -76,\n                        66,\n                        -121,\n                        -94,\n                        -89,\n                        66,\n                        -124,\n                        74,\n                        -99,\n                        66,\n                        -116,\n                        -2,\n                        -76,\n                        66,\n                        -68,\n                        -2,\n                        -89,\n                        66,\n                        75,\n                        -120,\n                        -116,\n                        66,\n                        -65,\n                        -100,\n                        -43,\n                        66,\n                        -61,\n                        102,\n                        86,\n                        66,\n                        -78,\n                        68,\n                        -98,\n                        66,\n                        72,\n                        37,\n                        30,\n                        66,\n                        -74,\n                        -15,\n                        -123,\n                        66,\n                        116,\n                        7,\n                        -82,\n                        66,\n                        -80,\n                        -101,\n                        6,\n                        66,\n                        -78,\n                        -91,\n                        -25,\n                        66,\n                        -114,\n                        117,\n                        -71,\n                        66,\n                        -94,\n                        -109,\n                        -32,\n                        66,\n                        -76,\n                        125,\n                        55,\n                        66,\n                        -127,\n                        55,\n                        -106,\n                        66,\n                        -85,\n                        75,\n                        32,\n                        66,\n                        74,\n                        127,\n                        -34,\n                        66,\n                        -86,\n                        67,\n                        -16,\n                        66,\n                        -61,\n                        -43,\n                        -27,\n                        66,\n                        -114,\n                        35,\n                        -85,\n                        66,\n                        -74,\n                        70,\n                        -101,\n                        66,\n                        -73,\n                        -13,\n                        -87,\n                        66,\n                        -108,\n                        3,\n                        -65,\n                        66,\n                        74,\n                        -60,\n                        53,\n                        66,\n                        104,\n                        -108,\n                        -13,\n                        66,\n                        103,\n                        -39,\n                        10,\n                        66,\n                        110,\n                        58,\n                        50,\n                        66,\n                        88,\n                        108,\n                        110,\n                        66,\n                        -64,\n                        119,\n                        93,\n                        66,\n                        -87,\n                        104,\n                        76,\n                        66,\n                        -81,\n                        5,\n                        94,\n                        66,\n                        -116,\n                        92,\n                        13,\n                        66,\n                        -67,\n                        -25,\n                        -1,\n                        66,\n                        90,\n                        19,\n                        -18,\n                        66,\n                        77,\n                        39,\n                        18,\n                        66,\n                        -83,\n                        -33,\n                        24,\n                        66,\n                        84,\n                        -41,\n                        -115,\n                        66,\n                        -90,\n                        -78,\n                        -30,\n                        66,\n                        -62,\n                        74,\n                        -31,\n                        66,\n                        -106,\n                        -120,\n                        90,\n                        66,\n                        -94,\n                        -45,\n                        -96,\n                        66,\n                        124,\n                        22,\n                        -8,\n                        66,\n                        93,\n                        65,\n                        13,\n                        66,\n                        -120,\n                        57,\n                        -103,\n                        66,\n                        88,\n                        96,\n                        -37,\n                        66,\n                        -100,\n                        -23,\n                        71,\n                        66,\n                        -70,\n                        96,\n                        75,\n                        66,\n                        -71,\n                        83,\n                        -109,\n                        66,\n                        -120,\n                        -36,\n                        -41,\n                        66,\n                        -108,\n                        103,\n                        19,\n                        66,\n                        88,\n                        14,\n                        -63,\n                        66,\n                        -119,\n                        -114,\n                        57,\n                        66,\n                        -64,\n                        112,\n                        41,\n                        66,\n                        88,\n                        53,\n                        119,\n                        66,\n                        -116,\n                        -86,\n                        12,\n                        66,\n                        84,\n                        -53,\n                        -9,\n                        66,\n                        -65,\n                        44,\n                        40,\n                        66,\n                        74,\n                        -61,\n                        71,\n                        66,\n                        -65,\n                        50,\n                        107,\n                        66,\n                        -109,\n                        82,\n                        114,\n                        66,\n                        -68,\n                        45,\n                        -110,\n                        66,\n                        -111,\n                        -61,\n                        -33,\n                        66,\n                        83,\n                        -31,\n                        124,\n                        66,\n                        77,\n                        75,\n                        122,\n                        66,\n                        -59,\n                        38,\n                        -13,\n                        66,\n                        82,\n                        10,\n                        127,\n                        66,\n                        102,\n                        -47,\n                        -61,\n                        66,\n                        69,\n                        27,\n                        66,\n                        66,\n                        -124,\n                        -31,\n                        17,\n                        66,\n                        -69,\n                        -21,\n                        40,\n                        66,\n                        -68,\n                        -75,\n                        -10,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 231,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1103205176,\n                        1018712510,\n                        1160588330,\n                        1140797273,\n                        759950828,\n                        597274046,\n                        1142932018,\n                        1099482568,\n                        626303302,\n                        1157212808,\n                        638762557,\n                        631089349,\n                        13,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        770037556,\n                        989995012,\n                        1027784929,\n                        1159935863,\n                        1116909284,\n                        989477531,\n                        1155822910,\n                        600459560,\n                        602053855,\n                        985046548,\n                        581842249,\n                        755445598,\n                        13,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 23,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 23,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -506526637525828854,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        576280926,\n                        927163981,\n                        605657545,\n                        36542934,\n                        979282387,\n                        513316351,\n                        75349067,\n                        527426881,\n                        594341451,\n                        215076597,\n                        44371942,\n                        610711145,\n                        722929218,\n                        727435191,\n                        250609621,\n                        39364477,\n                        670951999,\n                        497525985,\n                        791842027,\n                        383748555,\n                        74503847,\n                        131771513,\n                        195497686,\n                        871192506,\n                        471404001,\n                        481101102,\n                        43350835,\n                        897546731,\n                        241166303,\n                        746235349,\n                        753518170,\n                        748063309,\n                        110970575,\n                        997932150,\n                        387185630,\n                        862779169,\n                        181844805,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -39,\n                        -78,\n                        -59,\n                        69,\n                        39,\n                        -89,\n                        22,\n                        68,\n                        -109,\n                        100,\n                        125,\n                        69,\n                        104,\n                        -38,\n                        -124,\n                        66,\n                        60,\n                        39,\n                        -25,\n                        66,\n                        -97,\n                        -128,\n                        45,\n                        66,\n                        58,\n                        -104,\n                        27,\n                        66,\n                        -77,\n                        -26,\n                        49,\n                        66,\n                        -101,\n                        -12,\n                        -99,\n                        68,\n                        -122,\n                        92,\n                        68,\n                        64,\n                        -25,\n                        -86,\n                        -18,\n                        64,\n                        104,\n                        47,\n                        -36,\n                        66,\n                        4,\n                        119,\n                        -91,\n                        66,\n                        -59,\n                        -105,\n                        57,\n                        66,\n                        -58,\n                        -17,\n                        -11,\n                        66,\n                        1,\n                        63,\n                        -61,\n                        66,\n                        26,\n                        120,\n                        -45,\n                        66,\n                        -84,\n                        -44,\n                        -89,\n                        66,\n                        -63,\n                        -77,\n                        74,\n                        66,\n                        -92,\n                        41,\n                        -97,\n                        66,\n                        -67,\n                        16,\n                        -104,\n                        66,\n                        122,\n                        93,\n                        -5,\n                        66,\n                        -76,\n                        64,\n                        113,\n                        66,\n                        4,\n                        28,\n                        71,\n                        66,\n                        98,\n                        65,\n                        8,\n                        66,\n                        -96,\n                        55,\n                        104,\n                        66,\n                        -71,\n                        100,\n                        -87,\n                        66,\n                        -112,\n                        17,\n                        -11,\n                        66,\n                        -100,\n                        -101,\n                        109,\n                        66,\n                        126,\n                        -119,\n                        76,\n                        66,\n                        76,\n                        -81,\n                        -1,\n                        66,\n                        78,\n                        110,\n                        -35,\n                        66,\n                        -107,\n                        -117,\n                        74,\n                        66,\n                        -125,\n                        -106,\n                        -79,\n                        66,\n                        -104,\n                        14,\n                        -44,\n                        66,\n                        110,\n                        -99,\n                        -47,\n                        66,\n                        96,\n                        -89,\n                        -11,\n                        66,\n                        -62,\n                        54,\n                        -66,\n                        66,\n                        73,\n                        0,\n                        -93,\n                        66,\n                        83,\n                        106,\n                        -57,\n                        66,\n                        91,\n                        42,\n                        -85,\n                        66,\n                        -74,\n                        -9,\n                        127,\n                        66,\n                        -97,\n                        -81,\n                        29,\n                        66,\n                        -82,\n                        -106,\n                        15,\n                        66,\n                        -101,\n                        -22,\n                        74,\n                        66,\n                        93,\n                        9,\n                        -54,\n                        66,\n                        -62,\n                        120,\n                        114,\n                        66,\n                        117,\n                        -108,\n                        -128,\n                        66,\n                        -123,\n                        99,\n                        -56,\n                        66,\n                        -62,\n                        47,\n                        -86,\n                        66,\n                        -108,\n                        -124,\n                        -121,\n                        66,\n                        -106,\n                        -80,\n                        78,\n                        66,\n                        -60,\n                        88,\n                        -56,\n                        66,\n                        -108,\n                        -73,\n                        -30,\n                        66,\n                        -100,\n                        54,\n                        -37,\n                        66,\n                        99,\n                        -107,\n                        -76,\n                        66,\n                        108,\n                        -109,\n                        -21,\n                        66,\n                        113,\n                        -128,\n                        80,\n                        66,\n                        -110,\n                        101,\n                        14,\n                        66,\n                        -67,\n                        -108,\n                        124,\n                        66,\n                        -78,\n                        -11,\n                        -88,\n                        66,\n                        125,\n                        85,\n                        -95,\n                        66,\n                        84,\n                        36,\n                        87,\n                        66,\n                        -72,\n                        111,\n                        -79,\n                        66,\n                        -71,\n                        -91,\n                        21,\n                        66,\n                        -99,\n                        -86,\n                        -2,\n                        66,\n                        -97,\n                        -13,\n                        -110,\n                        66,\n                        73,\n                        28,\n                        -78,\n                        66,\n                        -88,\n                        101,\n                        34,\n                        66,\n                        -117,\n                        40,\n                        39,\n                        66,\n                        -65,\n                        -115,\n                        106,\n                        66,\n                        -81,\n                        -13,\n                        65,\n                        66,\n                        -60,\n                        125,\n                        -118,\n                        66,\n                        -79,\n                        57,\n                        71,\n                        66,\n                        -107,\n                        41,\n                        111,\n                        66,\n                        -65,\n                        18,\n                        -60,\n                        66,\n                        -106,\n                        95,\n                        102,\n                        66,\n                        -100,\n                        119,\n                        25,\n                        66,\n                        81,\n                        -41,\n                        -45,\n                        66,\n                        -120,\n                        -44,\n                        -2,\n                        66,\n                        -122,\n                        -94,\n                        87,\n                        66,\n                        101,\n                        86,\n                        1,\n                        66,\n                        -103,\n                        72,\n                        -66,\n                        66,\n                        -98,\n                        100,\n                        -64,\n                        66,\n                        -95,\n                        -14,\n                        41,\n                        66,\n                        -65,\n                        100,\n                        4,\n                        66,\n                        87,\n                        60,\n                        100,\n                        66,\n                        107,\n                        -99,\n                        -128,\n                        66,\n                        -91,\n                        55,\n                        -38,\n                        66,\n                        98,\n                        -48,\n                        -33,\n                        66,\n                        -117,\n                        -98,\n                        104,\n                        66,\n                        108,\n                        55,\n                        121,\n                        66,\n                        -118,\n                        -73,\n                        -13,\n                        66,\n                        -111,\n                        95,\n                        19,\n                        66,\n                        -120,\n                        -65,\n                        -88,\n                        66,\n                        -108,\n                        -91,\n                        110,\n                        66,\n                        104,\n                        -61,\n                        11,\n                        66,\n                        -90,\n                        -68,\n                        71,\n                        66,\n                        -61,\n                        -66,\n                        21,\n                        66,\n                        84,\n                        27,\n                        -48,\n                        66,\n                        113,\n                        -5,\n                        -29,\n                        66,\n                        68,\n                        45,\n                        1,\n                        66,\n                        -103,\n                        37,\n                        -34,\n                        66,\n                        70,\n                        42,\n                        -63,\n                        66,\n                        -117,\n                        -59,\n                        -38,\n                        66,\n                        79,\n                        88,\n                        -94,\n                        66,\n                        -62,\n                        119,\n                        -44,\n                        66,\n                        -73,\n                        8,\n                        9,\n                        66,\n                        80,\n                        69,\n                        36,\n                        66,\n                        127,\n                        15,\n                        34,\n                        66,\n                        100,\n                        -6,\n                        -73,\n                        66,\n                        -123,\n                        -93,\n                        -19,\n                        66,\n                        79,\n                        -26,\n                        -57,\n                        66,\n                        108,\n                        54,\n                        -105,\n                        66,\n                        101,\n                        -65,\n                        -22,\n                        66,\n                        -64,\n                        -4,\n                        34,\n                        66,\n                        -64,\n                        -105,\n                        -101,\n                        66,\n                        123,\n                        114,\n                        46,\n                        66,\n                        -122,\n                        -50,\n                        -125,\n                        66,\n                        76,\n                        34,\n                        -116,\n                        66,\n                        77,\n                        -71,\n                        -2,\n                        66,\n                        -104,\n                        104,\n                        -79,\n                        66,\n                        -96,\n                        101,\n                        -120,\n                        66,\n                        -119,\n                        112,\n                        -16,\n                        66,\n                        115,\n                        -96,\n                        56,\n                        66,\n                        -70,\n                        68,\n                        90,\n                        66,\n                        -98,\n                        -95,\n                        -20,\n                        66,\n                        78,\n                        13,\n                        -120,\n                        66,\n                        81,\n                        72,\n                        74,\n                        66,\n                        -106,\n                        -5,\n                        45,\n                        66,\n                        -111,\n                        -95,\n                        -10,\n                        66,\n                        98,\n                        15,\n                        -72,\n                        66,\n                        -71,\n                        -121,\n                        92,\n                        66,\n                        -65,\n                        -58,\n                        63,\n                        66,\n                        104,\n                        15,\n                        67,\n                        66,\n                        -72,\n                        -112,\n                        47,\n                        66,\n                        -89,\n                        -1,\n                        -36,\n                        66,\n                        -113,\n                        99,\n                        -19,\n                        66,\n                        -69,\n                        103,\n                        -2,\n                        66,\n                        -125,\n                        42,\n                        118,\n                        66,\n                        -117,\n                        93,\n                        -128,\n                        66,\n                        -62,\n                        -21,\n                        -67,\n                        66,\n                        -62,\n                        -74,\n                        -93,\n                        66,\n                        -94,\n                        78,\n                        82,\n                        66,\n                        -105,\n                        108,\n                        -122,\n                        66,\n                        106,\n                        -77,\n                        40,\n                        66,\n                        72,\n                        -102,\n                        47,\n                        66,\n                        -73,\n                        -36,\n                        87,\n                        66,\n                        -117,\n                        -16,\n                        86,\n                        66,\n                        -61,\n                        90,\n                        -1,\n                        66,\n                        -64,\n                        103,\n                        49,\n                        66,\n                        -116,\n                        -105,\n                        77,\n                        66,\n                        -122,\n                        -58,\n                        78,\n                        66,\n                        -70,\n                        16,\n                        44,\n                        66,\n                        -73,\n                        18,\n                        -53,\n                        66,\n                        -67,\n                        68,\n                        -27,\n                        66,\n                        86,\n                        14,\n                        29,\n                        66,\n                        -107,\n                        -96,\n                        8,\n                        66,\n                        -73,\n                        -23,\n                        52,\n                        66,\n                        -64,\n                        85,\n                        88,\n                        66,\n                        -113,\n                        26,\n                        -126,\n                        66,\n                        -111,\n                        21,\n                        88,\n                        66,\n                        88,\n                        -126,\n                        8,\n                        66,\n                        100,\n                        -40,\n                        -103,\n                        66,\n                        -69,\n                        62,\n                        -13,\n                        66,\n                        -72,\n                        79,\n                        95,\n                        66,\n                        103,\n                        -56,\n                        -51,\n                        66,\n                        -80,\n                        -58,\n                        -34,\n                        66,\n                        97,\n                        -54,\n                        46,\n                        66,\n                        -61,\n                        -1,\n                        73,\n                        66,\n                        -109,\n                        121,\n                        27,\n                        66,\n                        95,\n                        -74,\n                        -20,\n                        66,\n                        -117,\n                        -66,\n                        27,\n                        66,\n                        69,\n                        -35,\n                        -49,\n                        66,\n                        -110,\n                        -86,\n                        89,\n                        66,\n                        -77,\n                        17,\n                        -21,\n                        66,\n                        -121,\n                        -7,\n                        120,\n                        66,\n                        -118,\n                        40,\n                        -84,\n                        66,\n                        94,\n                        104,\n                        43,\n                        66,\n                        -72,\n                        -66,\n                        52,\n                        66,\n                        -63,\n                        -67,\n                        50,\n                        66,\n                        -67,\n                        -5,\n                        -30,\n                        66,\n                        -118,\n                        108,\n                        -119,\n                        66,\n                        104,\n                        100,\n                        21,\n                        66,\n                        -72,\n                        17,\n                        -91,\n                        66,\n                        -77,\n                        -107,\n                        110,\n                        66,\n                        -94,\n                        72,\n                        101,\n                        66,\n                        -66,\n                        123,\n                        -10,\n                        66,\n                        -67,\n                        11,\n                        -111,\n                        66,\n                        -112,\n                        13,\n                        18,\n                        66,\n                        -117,\n                        75,\n                        73,\n                        66,\n                        -72,\n                        -94,\n                        -118,\n                        66,\n                        85,\n                        39,\n                        -124,\n                        66,\n                        -62,\n                        -41,\n                        12,\n                        66,\n                        -122,\n                        101,\n                        83,\n                        66,\n                        -65,\n                        -51,\n                        -89,\n                        66,\n                        -100,\n                        -38,\n                        -27,\n                        66,\n                        125,\n                        33,\n                        47,\n                        66,\n                        -66,\n                        10,\n                        -126,\n                        66,\n                        84,\n                        31,\n                        88,\n                        66,\n                        -125,\n                        127,\n                        -101,\n                        66,\n                        -63,\n                        16,\n                        85,\n                        66,\n                        74,\n                        -101,\n                        -63,\n                        66,\n                        -109,\n                        -126,\n                        91,\n                        66,\n                        -64,\n                        -109,\n                        -111,\n                        66,\n                        -77,\n                        -98,\n                        -25,\n                        66,\n                        -64,\n                        3,\n                        -5,\n                        66,\n                        87,\n                        23,\n                        107,\n                        66,\n                        -99,\n                        -51,\n                        101,\n                        66,\n                        87,\n                        17,\n                        123,\n                        66,\n                        -124,\n                        -75,\n                        -78,\n                        66,\n                        -123,\n                        -72,\n                        -34,\n                        66,\n                        -107,\n                        66,\n                        -13,\n                        66,\n                        -86,\n                        -121,\n                        -5,\n                        66,\n                        -62,\n                        -114,\n                        -2,\n                        66,\n                        -114,\n                        -5,\n                        83,\n                        66,\n                        -103,\n                        77,\n                        124,\n                        66,\n                        -58,\n                        22,\n                        116,\n                        66,\n                        -66,\n                        -103,\n                        32,\n                        66,\n                        -97,\n                        -50,\n                        81,\n                        66,\n                        -118,\n                        -94,\n                        -84,\n                        66,\n                        -121,\n                        95,\n                        -72,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 222,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1018055060,\n                        1146258376,\n                        1143063005,\n                        773243645,\n                        624258709,\n                        985054328,\n                        774052592,\n                        1155856840,\n                        629206420,\n                        626507348,\n                        624794434,\n                        819031,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1033033540,\n                        717445259,\n                        1156704290,\n                        1162081312,\n                        1146137927,\n                        1013745580,\n                        1140760097,\n                        1018568642,\n                        968619758,\n                        586117183,\n                        581682838,\n                        994003,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 32,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 32,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -26643182478891948,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        52102590,\n                        803441321,\n                        845014002,\n                        402434489,\n                        452533423,\n                        857576527,\n                        1038034607,\n                        905684145,\n                        488373930,\n                        214396250,\n                        355904417,\n                        530702145,\n                        1046798206,\n                        332791595,\n                        504888034,\n                        777824675,\n                        892833502,\n                        861728509,\n                        660034926,\n                        70744530,\n                        501807487,\n                        761722539,\n                        519539025,\n                        182281293,\n                        353970593,\n                        187506475,\n                        578410162,\n                        1050517306,\n                        729400670,\n                        325250227,\n                        714712277,\n                        1039845225,\n                        590912757,\n                        313752750,\n                        848418633,\n                        775234665,\n                        907772981,\n                        651257981,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -18,\n                        62,\n                        -6,\n                        67,\n                        -19,\n                        -107,\n                        114,\n                        66,\n                        60,\n                        -76,\n                        113,\n                        66,\n                        -78,\n                        -82,\n                        -78,\n                        66,\n                        2,\n                        57,\n                        -79,\n                        65,\n                        -30,\n                        -89,\n                        -101,\n                        65,\n                        -108,\n                        87,\n                        79,\n                        66,\n                        -128,\n                        -29,\n                        -89,\n                        66,\n                        -74,\n                        5,\n                        -111,\n                        65,\n                        118,\n                        -110,\n                        62,\n                        66,\n                        -85,\n                        -128,\n                        -33,\n                        65,\n                        -121,\n                        120,\n                        29,\n                        66,\n                        -75,\n                        118,\n                        68,\n                        66,\n                        114,\n                        -108,\n                        -46,\n                        63,\n                        -27,\n                        -25,\n                        62,\n                        66,\n                        106,\n                        -111,\n                        30,\n                        66,\n                        -112,\n                        101,\n                        -86,\n                        66,\n                        -105,\n                        -30,\n                        127,\n                        66,\n                        -113,\n                        -55,\n                        -35,\n                        66,\n                        -93,\n                        -101,\n                        49,\n                        66,\n                        -71,\n                        94,\n                        -56,\n                        66,\n                        124,\n                        -21,\n                        106,\n                        66,\n                        115,\n                        63,\n                        -121,\n                        66,\n                        115,\n                        75,\n                        81,\n                        66,\n                        121,\n                        4,\n                        45,\n                        66,\n                        -113,\n                        107,\n                        -73,\n                        66,\n                        91,\n                        36,\n                        -32,\n                        66,\n                        -81,\n                        14,\n                        -6,\n                        66,\n                        86,\n                        29,\n                        -93,\n                        66,\n                        -113,\n                        -90,\n                        67,\n                        66,\n                        99,\n                        -20,\n                        31,\n                        66,\n                        -58,\n                        -74,\n                        -105,\n                        66,\n                        -122,\n                        23,\n                        44,\n                        66,\n                        88,\n                        -99,\n                        4,\n                        66,\n                        -125,\n                        -68,\n                        14,\n                        66,\n                        -107,\n                        -60,\n                        -32,\n                        66,\n                        109,\n                        120,\n                        22,\n                        66,\n                        -99,\n                        -67,\n                        -5,\n                        66,\n                        -113,\n                        -21,\n                        113,\n                        66,\n                        -60,\n                        122,\n                        -107,\n                        66,\n                        -112,\n                        -37,\n                        8,\n                        66,\n                        -65,\n                        -102,\n                        42,\n                        66,\n                        -96,\n                        48,\n                        54,\n                        66,\n                        -126,\n                        4,\n                        66,\n                        66,\n                        -102,\n                        80,\n                        61,\n                        66,\n                        108,\n                        56,\n                        -34,\n                        66,\n                        69,\n                        -17,\n                        -117,\n                        66,\n                        -67,\n                        -38,\n                        -21,\n                        66,\n                        -67,\n                        -68,\n                        -29,\n                        66,\n                        -112,\n                        51,\n                        70,\n                        66,\n                        -120,\n                        -81,\n                        -38,\n                        66,\n                        97,\n                        -115,\n                        -57,\n                        66,\n                        -113,\n                        -71,\n                        -47,\n                        66,\n                        -69,\n                        74,\n                        -76,\n                        66,\n                        -94,\n                        -87,\n                        -124,\n                        66,\n                        -75,\n                        -64,\n                        75,\n                        65,\n                        -73,\n                        112,\n                        16,\n                        66,\n                        -60,\n                        56,\n                        40,\n                        63,\n                        -18,\n                        63,\n                        74,\n                        66,\n                        -80,\n                        -73,\n                        -40,\n                        66,\n                        -115,\n                        -75,\n                        25,\n                        66,\n                        -117,\n                        63,\n                        -94,\n                        66,\n                        -78,\n                        22,\n                        -32,\n                        66,\n                        -90,\n                        66,\n                        67,\n                        66,\n                        108,\n                        -60,\n                        2,\n                        66,\n                        -67,\n                        -102,\n                        -117,\n                        66,\n                        -113,\n                        14,\n                        112,\n                        66,\n                        -76,\n                        94,\n                        -118,\n                        66,\n                        125,\n                        22,\n                        74,\n                        66,\n                        96,\n                        -28,\n                        70,\n                        66,\n                        -72,\n                        0,\n                        -113,\n                        66,\n                        94,\n                        95,\n                        31,\n                        66,\n                        -63,\n                        107,\n                        -9,\n                        66,\n                        79,\n                        -45,\n                        -52,\n                        66,\n                        73,\n                        4,\n                        121,\n                        66,\n                        -103,\n                        84,\n                        -107,\n                        66,\n                        -74,\n                        -46,\n                        39,\n                        66,\n                        75,\n                        48,\n                        27,\n                        66,\n                        73,\n                        -48,\n                        19,\n                        66,\n                        -124,\n                        -82,\n                        70,\n                        66,\n                        102,\n                        81,\n                        -41,\n                        66,\n                        104,\n                        93,\n                        39,\n                        66,\n                        -116,\n                        6,\n                        -18,\n                        66,\n                        -117,\n                        52,\n                        -9,\n                        66,\n                        -89,\n                        -81,\n                        36,\n                        66,\n                        95,\n                        43,\n                        -49,\n                        66,\n                        -75,\n                        124,\n                        6,\n                        66,\n                        107,\n                        -62,\n                        84,\n                        66,\n                        -115,\n                        -109,\n                        1,\n                        66,\n                        91,\n                        16,\n                        97,\n                        66,\n                        -124,\n                        82,\n                        -26,\n                        66,\n                        -121,\n                        -15,\n                        -122,\n                        66,\n                        -71,\n                        47,\n                        89,\n                        66,\n                        -107,\n                        61,\n                        -127,\n                        66,\n                        -121,\n                        -81,\n                        17,\n                        66,\n                        107,\n                        59,\n                        -105,\n                        66,\n                        -77,\n                        -62,\n                        -112,\n                        66,\n                        -65,\n                        -40,\n                        14,\n                        66,\n                        86,\n                        83,\n                        -46,\n                        66,\n                        80,\n                        -35,\n                        40,\n                        66,\n                        37,\n                        -87,\n                        61,\n                        66,\n                        -77,\n                        -121,\n                        31,\n                        66,\n                        -98,\n                        -7,\n                        15,\n                        66,\n                        -128,\n                        47,\n                        -44,\n                        66,\n                        78,\n                        9,\n                        32,\n                        66,\n                        -98,\n                        -121,\n                        -37,\n                        66,\n                        -89,\n                        -121,\n                        -75,\n                        66,\n                        -99,\n                        -52,\n                        121,\n                        66,\n                        -62,\n                        10,\n                        124,\n                        66,\n                        -128,\n                        -64,\n                        -24,\n                        66,\n                        -108,\n                        12,\n                        101,\n                        66,\n                        -72,\n                        -92,\n                        54,\n                        66,\n                        -116,\n                        18,\n                        -12,\n                        66,\n                        86,\n                        23,\n                        3,\n                        66,\n                        -71,\n                        67,\n                        -92,\n                        66,\n                        -75,\n                        124,\n                        -82,\n                        66,\n                        -64,\n                        -110,\n                        -69,\n                        66,\n                        -64,\n                        -66,\n                        -83,\n                        66,\n                        91,\n                        44,\n                        24,\n                        66,\n                        -92,\n                        105,\n                        -114,\n                        66,\n                        125,\n                        -127,\n                        -55,\n                        66,\n                        91,\n                        24,\n                        -62,\n                        66,\n                        -73,\n                        -24,\n                        -125,\n                        66,\n                        -100,\n                        -55,\n                        41,\n                        66,\n                        -71,\n                        73,\n                        107,\n                        66,\n                        -61,\n                        77,\n                        -24,\n                        66,\n                        86,\n                        63,\n                        -67,\n                        66,\n                        -103,\n                        37,\n                        16,\n                        66,\n                        -118,\n                        -109,\n                        44,\n                        66,\n                        -105,\n                        -9,\n                        -33,\n                        66,\n                        -79,\n                        48,\n                        -19,\n                        66,\n                        -74,\n                        97,\n                        9,\n                        66,\n                        -96,\n                        73,\n                        -69,\n                        66,\n                        -59,\n                        65,\n                        120,\n                        66,\n                        -88,\n                        103,\n                        -36,\n                        66,\n                        88,\n                        -30,\n                        -115,\n                        66,\n                        82,\n                        -114,\n                        29,\n                        66,\n                        104,\n                        -37,\n                        -89,\n                        66,\n                        -99,\n                        -41,\n                        1,\n                        66,\n                        -77,\n                        -16,\n                        -48,\n                        66,\n                        -99,\n                        24,\n                        -117,\n                        66,\n                        -59,\n                        -32,\n                        -9,\n                        66,\n                        -117,\n                        62,\n                        83,\n                        66,\n                        -99,\n                        43,\n                        -54,\n                        66,\n                        -111,\n                        115,\n                        -37,\n                        66,\n                        -117,\n                        -9,\n                        110,\n                        66,\n                        -65,\n                        108,\n                        89,\n                        66,\n                        -78,\n                        -6,\n                        -42,\n                        66,\n                        -98,\n                        127,\n                        -31,\n                        66,\n                        -72,\n                        -72,\n                        33,\n                        66,\n                        -117,\n                        -76,\n                        114,\n                        66,\n                        -116,\n                        46,\n                        -18,\n                        66,\n                        103,\n                        17,\n                        -113,\n                        66,\n                        -83,\n                        -97,\n                        -106,\n                        66,\n                        -62,\n                        35,\n                        48,\n                        66,\n                        -99,\n                        20,\n                        27,\n                        66,\n                        -61,\n                        -45,\n                        -81,\n                        66,\n                        -117,\n                        -15,\n                        -119,\n                        66,\n                        -105,\n                        15,\n                        124,\n                        66,\n                        108,\n                        -67,\n                        -88,\n                        66,\n                        89,\n                        -125,\n                        -4,\n                        66,\n                        -107,\n                        -25,\n                        4,\n                        66,\n                        -64,\n                        -38,\n                        -115,\n                        66,\n                        -100,\n                        -20,\n                        -48,\n                        66,\n                        88,\n                        6,\n                        77,\n                        66,\n                        86,\n                        -22,\n                        0,\n                        66,\n                        -96,\n                        -108,\n                        -89,\n                        66,\n                        94,\n                        100,\n                        -55,\n                        66,\n                        -66,\n                        14,\n                        28,\n                        66,\n                        -89,\n                        60,\n                        37,\n                        66,\n                        -118,\n                        -84,\n                        34,\n                        66,\n                        96,\n                        73,\n                        76,\n                        66,\n                        80,\n                        64,\n                        -114,\n                        66,\n                        -115,\n                        -104,\n                        19,\n                        66,\n                        75,\n                        30,\n                        98,\n                        66,\n                        -105,\n                        91,\n                        -106,\n                        66,\n                        74,\n                        31,\n                        -11,\n                        66,\n                        -118,\n                        -3,\n                        -11,\n                        66,\n                        -82,\n                        -72,\n                        -72,\n                        66,\n                        -113,\n                        35,\n                        -83,\n                        66,\n                        -116,\n                        -113,\n                        -91,\n                        66,\n                        -73,\n                        84,\n                        5,\n                        66,\n                        -109,\n                        -94,\n                        -104,\n                        66,\n                        119,\n                        -19,\n                        39,\n                        66,\n                        -117,\n                        17,\n                        108,\n                        66,\n                        -98,\n                        117,\n                        -86,\n                        66,\n                        -102,\n                        56,\n                        -113,\n                        66,\n                        96,\n                        90,\n                        98,\n                        66,\n                        -111,\n                        53,\n                        -126,\n                        66,\n                        -104,\n                        -64,\n                        57,\n                        66,\n                        86,\n                        77,\n                        -121,\n                        66,\n                        -65,\n                        -98,\n                        51,\n                        66,\n                        -113,\n                        -58,\n                        -58,\n                        66,\n                        77,\n                        -104,\n                        29,\n                        66,\n                        72,\n                        50,\n                        -46,\n                        66,\n                        -125,\n                        -36,\n                        15,\n                        66,\n                        91,\n                        -31,\n                        126,\n                        66,\n                        -98,\n                        106,\n                        81,\n                        66,\n                        -83,\n                        -115,\n                        86,\n                        66,\n                        -116,\n                        -24,\n                        22,\n                        66,\n                        94,\n                        110,\n                        41,\n                        66,\n                        -70,\n                        65,\n                        -104,\n                        66,\n                        94,\n                        113,\n                        -94,\n                        66,\n                        -107,\n                        -51,\n                        68,\n                        66,\n                        -100,\n                        122,\n                        -57,\n                        66,\n                        -71,\n                        -16,\n                        105,\n                        66,\n                        -119,\n                        -100,\n                        -19,\n                        66,\n                        106,\n                        -14,\n                        55,\n                        66,\n                        -113,\n                        -97,\n                        89,\n                        66,\n                        -117,\n                        19,\n                        -2,\n                        66,\n                        -122,\n                        -27,\n                        107,\n                        66,\n                        114,\n                        -11,\n                        -3,\n                        66,\n                        -112,\n                        10,\n                        -126,\n                        66,\n                        -76,\n                        -26,\n                        80,\n                        66,\n                        90,\n                        -77,\n                        -89,\n                        66,\n                        87,\n                        10,\n                        57,\n                        66,\n                        -115,\n                        45,\n                        62,\n                        66,\n                        -108,\n                        -120,\n                        -57,\n                        66,\n                        -110,\n                        -27,\n                        -117,\n                        66,\n                        110,\n                        -96,\n                        44,\n                        66,\n                        -125,\n                        3,\n                        101,\n                        66,\n                        87,\n                        -89,\n                        -119,\n                        66,\n                        -112,\n                        20,\n                        102,\n                        66,\n                        -128,\n                        -5,\n                        103,\n                        66,\n                        -61,\n                        6,\n                        -60,\n                        66,\n                        -72,\n                        -111,\n                        -22,\n                        66,\n                        -98,\n                        -92,\n                        -3,\n                        66,\n                        73,\n                        -98,\n                        -6,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 228,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1160410184,\n                        1017177992,\n                        583521974,\n                        969262118,\n                        716729444,\n                        600505403,\n                        1100016922,\n                        753852334,\n                        643567427,\n                        1160578652,\n                        588239222,\n                        597076159,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1155824878,\n                        630820141,\n                        581396444,\n                        581216390,\n                        1025975534,\n                        1013272006,\n                        726472544,\n                        626323669,\n                        768460445,\n                        711865301,\n                        582921926,\n                        587687386,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 26,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 26,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 2636386493963874353,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        788174302,\n                        798717110,\n                        366505670,\n                        119613373,\n                        66512334,\n                        853456866,\n                        502119545,\n                        842074535,\n                        914171442,\n                        870410549,\n                        714861873,\n                        489239669,\n                        991668698,\n                        578117463,\n                        74496707,\n                        237819874,\n                        232737587,\n                        1067103550,\n                        599611629,\n                        250213437,\n                        1021368235,\n                        228956670,\n                        595533297,\n                        263410797,\n                        594373949,\n                        402575103,\n                        858203485,\n                        45956269,\n                        194420585,\n                        1010017503,\n                        221618018,\n                        619120463,\n                        706196786,\n                        111868117,\n                        191735609,\n                        903846469,\n                        380274103,\n                        18166,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        112,\n                        -66,\n                        86,\n                        69,\n                        92,\n                        39,\n                        28,\n                        66,\n                        79,\n                        -121,\n                        62,\n                        66,\n                        -104,\n                        40,\n                        -80,\n                        66,\n                        78,\n                        91,\n                        -59,\n                        66,\n                        90,\n                        -80,\n                        91,\n                        66,\n                        -91,\n                        -67,\n                        83,\n                        66,\n                        -118,\n                        32,\n                        118,\n                        66,\n                        -96,\n                        -65,\n                        -83,\n                        66,\n                        -95,\n                        71,\n                        -113,\n                        65,\n                        -63,\n                        -16,\n                        19,\n                        66,\n                        68,\n                        117,\n                        14,\n                        66,\n                        -78,\n                        -93,\n                        -10,\n                        66,\n                        -74,\n                        -66,\n                        -10,\n                        66,\n                        127,\n                        -8,\n                        -88,\n                        65,\n                        85,\n                        -90,\n                        7,\n                        66,\n                        24,\n                        -54,\n                        -111,\n                        66,\n                        -61,\n                        -27,\n                        -40,\n                        66,\n                        -125,\n                        86,\n                        60,\n                        66,\n                        -112,\n                        117,\n                        34,\n                        66,\n                        -109,\n                        62,\n                        -126,\n                        66,\n                        -76,\n                        -26,\n                        98,\n                        66,\n                        -75,\n                        4,\n                        118,\n                        64,\n                        -51,\n                        72,\n                        -93,\n                        66,\n                        -82,\n                        -56,\n                        61,\n                        66,\n                        -83,\n                        106,\n                        96,\n                        66,\n                        -110,\n                        120,\n                        -39,\n                        66,\n                        -117,\n                        14,\n                        -128,\n                        66,\n                        89,\n                        -25,\n                        -42,\n                        66,\n                        -122,\n                        67,\n                        5,\n                        66,\n                        -84,\n                        3,\n                        -1,\n                        66,\n                        111,\n                        12,\n                        -109,\n                        66,\n                        -98,\n                        -50,\n                        113,\n                        66,\n                        -102,\n                        90,\n                        7,\n                        66,\n                        -125,\n                        57,\n                        21,\n                        66,\n                        123,\n                        -32,\n                        39,\n                        66,\n                        -110,\n                        5,\n                        73,\n                        66,\n                        117,\n                        96,\n                        3,\n                        65,\n                        -108,\n                        59,\n                        -116,\n                        66,\n                        87,\n                        -77,\n                        31,\n                        66,\n                        -82,\n                        31,\n                        115,\n                        66,\n                        -59,\n                        -9,\n                        -47,\n                        66,\n                        123,\n                        -21,\n                        105,\n                        66,\n                        -119,\n                        1,\n                        -92,\n                        66,\n                        -69,\n                        -31,\n                        -89,\n                        66,\n                        -85,\n                        -49,\n                        -113,\n                        66,\n                        82,\n                        -50,\n                        -90,\n                        66,\n                        -98,\n                        -102,\n                        -119,\n                        66,\n                        -68,\n                        -79,\n                        17,\n                        66,\n                        -97,\n                        60,\n                        -79,\n                        66,\n                        -88,\n                        -113,\n                        42,\n                        66,\n                        -90,\n                        -10,\n                        -63,\n                        66,\n                        121,\n                        122,\n                        -90,\n                        66,\n                        73,\n                        34,\n                        45,\n                        66,\n                        -110,\n                        27,\n                        7,\n                        66,\n                        -107,\n                        -103,\n                        -16,\n                        66,\n                        -78,\n                        41,\n                        43,\n                        66,\n                        -71,\n                        -93,\n                        -120,\n                        66,\n                        -63,\n                        -45,\n                        -108,\n                        66,\n                        -99,\n                        -50,\n                        40,\n                        66,\n                        -102,\n                        116,\n                        19,\n                        66,\n                        -118,\n                        42,\n                        15,\n                        66,\n                        15,\n                        26,\n                        -98,\n                        66,\n                        83,\n                        -13,\n                        16,\n                        66,\n                        -99,\n                        127,\n                        -43,\n                        66,\n                        -104,\n                        24,\n                        45,\n                        66,\n                        -103,\n                        -18,\n                        -54,\n                        66,\n                        86,\n                        48,\n                        -103,\n                        66,\n                        -111,\n                        -55,\n                        -72,\n                        66,\n                        -70,\n                        -50,\n                        -49,\n                        66,\n                        -63,\n                        -68,\n                        6,\n                        66,\n                        -61,\n                        120,\n                        81,\n                        66,\n                        -79,\n                        65,\n                        95,\n                        66,\n                        -60,\n                        65,\n                        57,\n                        66,\n                        -61,\n                        -63,\n                        -39,\n                        66,\n                        21,\n                        -123,\n                        -121,\n                        66,\n                        -113,\n                        45,\n                        54,\n                        66,\n                        -121,\n                        -121,\n                        118,\n                        66,\n                        108,\n                        -49,\n                        -84,\n                        66,\n                        -71,\n                        98,\n                        -25,\n                        66,\n                        84,\n                        -69,\n                        -71,\n                        66,\n                        -98,\n                        13,\n                        -97,\n                        66,\n                        71,\n                        9,\n                        58,\n                        66,\n                        -115,\n                        -62,\n                        -38,\n                        66,\n                        69,\n                        67,\n                        27,\n                        66,\n                        -74,\n                        24,\n                        56,\n                        66,\n                        -66,\n                        100,\n                        111,\n                        66,\n                        -97,\n                        -95,\n                        -79,\n                        66,\n                        92,\n                        -125,\n                        -63,\n                        66,\n                        -66,\n                        -109,\n                        109,\n                        66,\n                        -63,\n                        -14,\n                        113,\n                        66,\n                        100,\n                        40,\n                        44,\n                        66,\n                        -122,\n                        -8,\n                        -35,\n                        66,\n                        -97,\n                        -8,\n                        98,\n                        66,\n                        -67,\n                        -88,\n                        76,\n                        66,\n                        -127,\n                        86,\n                        -27,\n                        66,\n                        84,\n                        18,\n                        125,\n                        66,\n                        -104,\n                        29,\n                        92,\n                        66,\n                        -62,\n                        -128,\n                        -82,\n                        66,\n                        -84,\n                        36,\n                        -25,\n                        66,\n                        -113,\n                        2,\n                        35,\n                        66,\n                        -63,\n                        -8,\n                        28,\n                        66,\n                        -72,\n                        -82,\n                        50,\n                        66,\n                        -127,\n                        14,\n                        -87,\n                        66,\n                        -110,\n                        101,\n                        -14,\n                        66,\n                        -102,\n                        15,\n                        67,\n                        66,\n                        -99,\n                        -74,\n                        -63,\n                        66,\n                        23,\n                        -38,\n                        -128,\n                        66,\n                        -113,\n                        120,\n                        -72,\n                        66,\n                        98,\n                        39,\n                        -27,\n                        66,\n                        -79,\n                        51,\n                        36,\n                        66,\n                        -69,\n                        0,\n                        34,\n                        66,\n                        87,\n                        -41,\n                        106,\n                        66,\n                        -98,\n                        -59,\n                        -97,\n                        66,\n                        -101,\n                        -107,\n                        -9,\n                        66,\n                        -108,\n                        -107,\n                        -64,\n                        66,\n                        -103,\n                        61,\n                        -11,\n                        66,\n                        98,\n                        -36,\n                        -120,\n                        66,\n                        -73,\n                        120,\n                        -54,\n                        66,\n                        89,\n                        -22,\n                        -66,\n                        66,\n                        69,\n                        -38,\n                        97,\n                        66,\n                        -120,\n                        -40,\n                        -103,\n                        66,\n                        -90,\n                        -34,\n                        70,\n                        66,\n                        -96,\n                        107,\n                        -47,\n                        66,\n                        -67,\n                        -70,\n                        95,\n                        66,\n                        -89,\n                        -86,\n                        -69,\n                        66,\n                        -78,\n                        17,\n                        -7,\n                        66,\n                        84,\n                        -87,\n                        35,\n                        66,\n                        -80,\n                        -40,\n                        113,\n                        66,\n                        118,\n                        -36,\n                        110,\n                        66,\n                        -94,\n                        -7,\n                        71,\n                        66,\n                        -68,\n                        91,\n                        -125,\n                        66,\n                        -116,\n                        -92,\n                        -84,\n                        66,\n                        124,\n                        -15,\n                        -98,\n                        66,\n                        69,\n                        57,\n                        -39,\n                        66,\n                        -74,\n                        51,\n                        -87,\n                        66,\n                        69,\n                        92,\n                        -110,\n                        66,\n                        -113,\n                        16,\n                        100,\n                        66,\n                        -116,\n                        64,\n                        -112,\n                        66,\n                        81,\n                        14,\n                        49,\n                        66,\n                        -95,\n                        -83,\n                        18,\n                        66,\n                        -77,\n                        -25,\n                        102,\n                        66,\n                        87,\n                        122,\n                        114,\n                        66,\n                        81,\n                        94,\n                        0,\n                        66,\n                        -123,\n                        -8,\n                        -58,\n                        66,\n                        -124,\n                        26,\n                        -12,\n                        66,\n                        87,\n                        -66,\n                        40,\n                        66,\n                        -59,\n                        -119,\n                        45,\n                        66,\n                        -68,\n                        54,\n                        -66,\n                        66,\n                        -126,\n                        -17,\n                        102,\n                        66,\n                        72,\n                        86,\n                        94,\n                        66,\n                        114,\n                        -102,\n                        -11,\n                        66,\n                        80,\n                        -10,\n                        -112,\n                        66,\n                        -96,\n                        13,\n                        -104,\n                        66,\n                        86,\n                        -58,\n                        27,\n                        66,\n                        79,\n                        21,\n                        57,\n                        66,\n                        -113,\n                        -3,\n                        -124,\n                        66,\n                        -80,\n                        105,\n                        3,\n                        66,\n                        -117,\n                        120,\n                        13,\n                        66,\n                        -77,\n                        -64,\n                        -11,\n                        66,\n                        -65,\n                        -48,\n                        99,\n                        66,\n                        -102,\n                        75,\n                        -1,\n                        66,\n                        -102,\n                        83,\n                        91,\n                        66,\n                        -99,\n                        37,\n                        -94,\n                        66,\n                        98,\n                        90,\n                        -87,\n                        66,\n                        -83,\n                        64,\n                        126,\n                        66,\n                        77,\n                        -57,\n                        -109,\n                        66,\n                        -102,\n                        -99,\n                        23,\n                        66,\n                        -106,\n                        112,\n                        -96,\n                        66,\n                        122,\n                        86,\n                        97,\n                        66,\n                        103,\n                        14,\n                        53,\n                        66,\n                        -106,\n                        37,\n                        72,\n                        66,\n                        -107,\n                        97,\n                        -32,\n                        66,\n                        -111,\n                        80,\n                        41,\n                        66,\n                        91,\n                        84,\n                        123,\n                        66,\n                        -79,\n                        -84,\n                        -35,\n                        66,\n                        -112,\n                        -17,\n                        -96,\n                        66,\n                        86,\n                        27,\n                        -63,\n                        66,\n                        74,\n                        -24,\n                        82,\n                        66,\n                        -72,\n                        -7,\n                        -6,\n                        66,\n                        -63,\n                        -23,\n                        57,\n                        66,\n                        82,\n                        -20,\n                        74,\n                        66,\n                        -117,\n                        10,\n                        -74,\n                        66,\n                        88,\n                        -103,\n                        3,\n                        66,\n                        97,\n                        15,\n                        -55,\n                        66,\n                        -68,\n                        -86,\n                        92,\n                        66,\n                        93,\n                        94,\n                        -105,\n                        66,\n                        -64,\n                        -63,\n                        -7,\n                        66,\n                        -124,\n                        -20,\n                        104,\n                        66,\n                        -69,\n                        -119,\n                        -81,\n                        66,\n                        -65,\n                        19,\n                        -20,\n                        66,\n                        -68,\n                        44,\n                        -47,\n                        66,\n                        -61,\n                        65,\n                        -2,\n                        66,\n                        -101,\n                        25,\n                        59,\n                        66,\n                        -107,\n                        -76,\n                        100,\n                        66,\n                        106,\n                        -5,\n                        53,\n                        66,\n                        -120,\n                        51,\n                        11,\n                        66,\n                        -99,\n                        105,\n                        -68,\n                        66,\n                        -107,\n                        93,\n                        -23,\n                        66,\n                        -65,\n                        110,\n                        -18,\n                        66,\n                        -60,\n                        110,\n                        -53,\n                        66,\n                        -128,\n                        -87,\n                        59,\n                        66,\n                        -95,\n                        58,\n                        72,\n                        66,\n                        94,\n                        -115,\n                        -30,\n                        66,\n                        -110,\n                        -77,\n                        -62,\n                        66,\n                        -90,\n                        110,\n                        -89,\n                        66,\n                        -111,\n                        15,\n                        -127,\n                        66,\n                        80,\n                        28,\n                        -31,\n                        66,\n                        -74,\n                        32,\n                        118,\n                        66,\n                        -93,\n                        -44,\n                        76,\n                        66,\n                        -112,\n                        118,\n                        15,\n                        66,\n                        -98,\n                        55,\n                        -75,\n                        66,\n                        -59,\n                        -89,\n                        42,\n                        66,\n                        84,\n                        -22,\n                        -89,\n                        66,\n                        -112,\n                        -70,\n                        64,\n                        66,\n                        -60,\n                        -52,\n                        39,\n                        66,\n                        86,\n                        77,\n                        30,\n                        66,\n                        -118,\n                        -113,\n                        17,\n                        66,\n                        -111,\n                        -87,\n                        -118,\n                        66,\n                        -117,\n                        -117,\n                        115,\n                        66,\n                        -79,\n                        5,\n                        -109,\n                        66,\n                        85,\n                        -35,\n                        103,\n                        66,\n                        -64,\n                        -74,\n                        33,\n                        66,\n                        69,\n                        57,\n                        47,\n                        66,\n                        -119,\n                        17,\n                        14,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 225,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1112601230,\n                        1098481256,\n                        715612390,\n                        755116096,\n                        774811544,\n                        755091314,\n                        1099883041,\n                        1104134558,\n                        710979488,\n                        639137542,\n                        970204702,\n                        21543125,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1100082838,\n                        1026743281,\n                        974929489,\n                        1157397494,\n                        1162075454,\n                        770030701,\n                        1147892029,\n                        1143048553,\n                        712423184,\n                        1155684874,\n                        974042806,\n                        26391631,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 29,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 29,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 1481498699720728495,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        395504574,\n                        585025198,\n                        900579529,\n                        660706858,\n                        129354621,\n                        584653689,\n                        227894714,\n                        496541255,\n                        500241443,\n                        884430901,\n                        196410546,\n                        367394527,\n                        311609830,\n                        737270875,\n                        454334127,\n                        510170670,\n                        178633154,\n                        179615919,\n                        312450938,\n                        93929382,\n                        259980462,\n                        535254690,\n                        572598979,\n                        718329150,\n                        887737550,\n                        371910753,\n                        1033713388,\n                        710371046,\n                        34957989,\n                        909313710,\n                        621919469,\n                        590414885,\n                        859822039,\n                        440044767,\n                        595507497,\n                        102226114,\n                        343784573,\n                        878258135,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        68,\n                        -101,\n                        -40,\n                        -100,\n                        66,\n                        -125,\n                        79,\n                        10,\n                        66,\n                        -76,\n                        -29,\n                        32,\n                        67,\n                        -47,\n                        -38,\n                        35,\n                        66,\n                        86,\n                        105,\n                        -128,\n                        66,\n                        84,\n                        58,\n                        -22,\n                        67,\n                        1,\n                        -59,\n                        123,\n                        66,\n                        -114,\n                        -96,\n                        55,\n                        66,\n                        35,\n                        -14,\n                        -91,\n                        66,\n                        113,\n                        95,\n                        -115,\n                        65,\n                        -47,\n                        -126,\n                        69,\n                        66,\n                        -122,\n                        -60,\n                        121,\n                        66,\n                        -119,\n                        -120,\n                        -18,\n                        66,\n                        -82,\n                        106,\n                        -94,\n                        65,\n                        12,\n                        27,\n                        -61,\n                        66,\n                        77,\n                        104,\n                        -38,\n                        66,\n                        -72,\n                        94,\n                        -13,\n                        69,\n                        100,\n                        -74,\n                        -54,\n                        66,\n                        -73,\n                        117,\n                        -43,\n                        66,\n                        -110,\n                        -8,\n                        43,\n                        66,\n                        -112,\n                        -109,\n                        -66,\n                        66,\n                        102,\n                        122,\n                        -124,\n                        66,\n                        -65,\n                        44,\n                        -100,\n                        66,\n                        78,\n                        96,\n                        114,\n                        66,\n                        -86,\n                        126,\n                        115,\n                        66,\n                        109,\n                        82,\n                        90,\n                        66,\n                        -77,\n                        33,\n                        75,\n                        65,\n                        17,\n                        115,\n                        -86,\n                        63,\n                        -35,\n                        -47,\n                        2,\n                        66,\n                        114,\n                        126,\n                        36,\n                        66,\n                        -105,\n                        -6,\n                        12,\n                        65,\n                        37,\n                        34,\n                        -15,\n                        66,\n                        -80,\n                        87,\n                        -18,\n                        66,\n                        -73,\n                        118,\n                        -59,\n                        66,\n                        -109,\n                        -3,\n                        72,\n                        66,\n                        -115,\n                        -53,\n                        -32,\n                        66,\n                        -60,\n                        -67,\n                        -94,\n                        66,\n                        -110,\n                        -4,\n                        -39,\n                        66,\n                        -93,\n                        -8,\n                        -29,\n                        66,\n                        -89,\n                        23,\n                        -40,\n                        66,\n                        -101,\n                        30,\n                        47,\n                        66,\n                        -94,\n                        -20,\n                        -80,\n                        66,\n                        -126,\n                        88,\n                        -88,\n                        66,\n                        -76,\n                        57,\n                        58,\n                        63,\n                        -35,\n                        -21,\n                        122,\n                        66,\n                        -100,\n                        -123,\n                        -115,\n                        66,\n                        -79,\n                        84,\n                        66,\n                        66,\n                        -82,\n                        3,\n                        58,\n                        65,\n                        -33,\n                        65,\n                        92,\n                        66,\n                        -96,\n                        81,\n                        82,\n                        66,\n                        -104,\n                        -32,\n                        -96,\n                        66,\n                        -84,\n                        -24,\n                        57,\n                        66,\n                        -102,\n                        -102,\n                        8,\n                        66,\n                        -83,\n                        19,\n                        25,\n                        66,\n                        -95,\n                        15,\n                        -124,\n                        66,\n                        -108,\n                        3,\n                        -38,\n                        66,\n                        -78,\n                        -12,\n                        -6,\n                        66,\n                        -66,\n                        115,\n                        79,\n                        66,\n                        94,\n                        -107,\n                        -50,\n                        66,\n                        -74,\n                        -74,\n                        48,\n                        66,\n                        -64,\n                        -115,\n                        -111,\n                        66,\n                        -97,\n                        -46,\n                        115,\n                        66,\n                        83,\n                        17,\n                        64,\n                        66,\n                        -120,\n                        -51,\n                        -76,\n                        66,\n                        82,\n                        111,\n                        -41,\n                        66,\n                        -99,\n                        -24,\n                        -56,\n                        66,\n                        82,\n                        2,\n                        -23,\n                        66,\n                        -64,\n                        -39,\n                        -78,\n                        66,\n                        85,\n                        -10,\n                        -48,\n                        66,\n                        109,\n                        13,\n                        -112,\n                        66,\n                        -75,\n                        29,\n                        -106,\n                        66,\n                        -76,\n                        31,\n                        -103,\n                        66,\n                        -68,\n                        121,\n                        104,\n                        66,\n                        75,\n                        68,\n                        -34,\n                        66,\n                        -66,\n                        45,\n                        111,\n                        66,\n                        -118,\n                        77,\n                        -64,\n                        66,\n                        -102,\n                        -101,\n                        -95,\n                        66,\n                        -108,\n                        96,\n                        -35,\n                        66,\n                        90,\n                        -6,\n                        108,\n                        66,\n                        -91,\n                        73,\n                        0,\n                        66,\n                        101,\n                        12,\n                        -79,\n                        66,\n                        76,\n                        30,\n                        36,\n                        66,\n                        92,\n                        6,\n                        -27,\n                        66,\n                        -99,\n                        -33,\n                        100,\n                        66,\n                        -128,\n                        23,\n                        86,\n                        66,\n                        -111,\n                        126,\n                        115,\n                        66,\n                        -127,\n                        70,\n                        -89,\n                        66,\n                        62,\n                        109,\n                        -26,\n                        66,\n                        -113,\n                        53,\n                        109,\n                        66,\n                        -125,\n                        -72,\n                        -12,\n                        66,\n                        -71,\n                        61,\n                        -93,\n                        66,\n                        -106,\n                        35,\n                        -13,\n                        66,\n                        -117,\n                        -5,\n                        -16,\n                        66,\n                        -97,\n                        -43,\n                        -121,\n                        66,\n                        -88,\n                        62,\n                        -91,\n                        66,\n                        99,\n                        6,\n                        53,\n                        66,\n                        -71,\n                        -99,\n                        98,\n                        66,\n                        -70,\n                        -60,\n                        41,\n                        66,\n                        -67,\n                        81,\n                        104,\n                        66,\n                        90,\n                        -43,\n                        105,\n                        66,\n                        -63,\n                        118,\n                        -76,\n                        66,\n                        -95,\n                        126,\n                        33,\n                        66,\n                        89,\n                        -122,\n                        109,\n                        66,\n                        -96,\n                        -29,\n                        117,\n                        66,\n                        -63,\n                        3,\n                        -49,\n                        66,\n                        -100,\n                        -88,\n                        79,\n                        66,\n                        74,\n                        108,\n                        50,\n                        66,\n                        -110,\n                        -46,\n                        19,\n                        66,\n                        -90,\n                        -42,\n                        -74,\n                        66,\n                        114,\n                        36,\n                        6,\n                        66,\n                        91,\n                        31,\n                        43,\n                        66,\n                        85,\n                        31,\n                        121,\n                        66,\n                        -105,\n                        -44,\n                        34,\n                        66,\n                        -107,\n                        82,\n                        23,\n                        66,\n                        -64,\n                        -58,\n                        81,\n                        66,\n                        -121,\n                        -10,\n                        38,\n                        66,\n                        99,\n                        -105,\n                        31,\n                        66,\n                        -83,\n                        0,\n                        -128,\n                        66,\n                        -104,\n                        -78,\n                        -86,\n                        66,\n                        -70,\n                        73,\n                        47,\n                        66,\n                        -68,\n                        2,\n                        -68,\n                        66,\n                        -113,\n                        -121,\n                        -79,\n                        66,\n                        89,\n                        85,\n                        108,\n                        66,\n                        -119,\n                        84,\n                        -82,\n                        66,\n                        79,\n                        -44,\n                        -38,\n                        66,\n                        79,\n                        -20,\n                        114,\n                        66,\n                        -72,\n                        -50,\n                        116,\n                        66,\n                        -125,\n                        96,\n                        -25,\n                        66,\n                        -126,\n                        -21,\n                        -6,\n                        66,\n                        -59,\n                        -128,\n                        -90,\n                        66,\n                        -77,\n                        118,\n                        3,\n                        66,\n                        82,\n                        75,\n                        123,\n                        66,\n                        97,\n                        -61,\n                        -86,\n                        66,\n                        -79,\n                        93,\n                        -87,\n                        66,\n                        -64,\n                        11,\n                        30,\n                        66,\n                        -66,\n                        -94,\n                        3,\n                        66,\n                        -60,\n                        -113,\n                        -40,\n                        66,\n                        -107,\n                        -100,\n                        37,\n                        66,\n                        -76,\n                        -45,\n                        82,\n                        66,\n                        -107,\n                        9,\n                        73,\n                        66,\n                        -94,\n                        -123,\n                        125,\n                        66,\n                        -112,\n                        101,\n                        28,\n                        66,\n                        -106,\n                        -119,\n                        121,\n                        66,\n                        -85,\n                        -112,\n                        8,\n                        66,\n                        -60,\n                        67,\n                        16,\n                        66,\n                        -74,\n                        -100,\n                        51,\n                        66,\n                        69,\n                        110,\n                        -88,\n                        66,\n                        95,\n                        9,\n                        -59,\n                        66,\n                        -60,\n                        -56,\n                        -31,\n                        66,\n                        -82,\n                        -15,\n                        49,\n                        66,\n                        -104,\n                        -94,\n                        -50,\n                        66,\n                        83,\n                        52,\n                        -102,\n                        66,\n                        -66,\n                        -24,\n                        10,\n                        66,\n                        -117,\n                        -79,\n                        -91,\n                        66,\n                        -67,\n                        -59,\n                        119,\n                        66,\n                        109,\n                        -67,\n                        110,\n                        63,\n                        -127,\n                        53,\n                        -107,\n                        66,\n                        72,\n                        -18,\n                        51,\n                        66,\n                        -120,\n                        -61,\n                        51,\n                        66,\n                        -69,\n                        -74,\n                        47,\n                        66,\n                        -118,\n                        35,\n                        -77,\n                        66,\n                        -67,\n                        22,\n                        30,\n                        66,\n                        -67,\n                        86,\n                        1,\n                        66,\n                        76,\n                        -110,\n                        -54,\n                        66,\n                        -113,\n                        -98,\n                        -65,\n                        66,\n                        -69,\n                        -57,\n                        -116,\n                        66,\n                        -113,\n                        31,\n                        60,\n                        66,\n                        -105,\n                        29,\n                        38,\n                        66,\n                        127,\n                        101,\n                        125,\n                        66,\n                        -103,\n                        -121,\n                        -41,\n                        66,\n                        -92,\n                        -124,\n                        60,\n                        66,\n                        -78,\n                        86,\n                        120,\n                        66,\n                        -116,\n                        -114,\n                        -56,\n                        66,\n                        -106,\n                        53,\n                        -53,\n                        66,\n                        -62,\n                        65,\n                        69,\n                        66,\n                        -121,\n                        -58,\n                        109,\n                        66,\n                        -125,\n                        58,\n                        -55,\n                        66,\n                        -66,\n                        -70,\n                        -58,\n                        66,\n                        70,\n                        32,\n                        53,\n                        66,\n                        84,\n                        -109,\n                        79,\n                        66,\n                        -117,\n                        2,\n                        115,\n                        66,\n                        117,\n                        -85,\n                        -128,\n                        66,\n                        80,\n                        55,\n                        122,\n                        66,\n                        103,\n                        73,\n                        -127,\n                        66,\n                        -115,\n                        -98,\n                        38,\n                        66,\n                        -63,\n                        -88,\n                        -65,\n                        66,\n                        -102,\n                        16,\n                        123,\n                        66,\n                        -117,\n                        17,\n                        -42,\n                        66,\n                        -110,\n                        -46,\n                        21,\n                        66,\n                        -70,\n                        -40,\n                        94,\n                        66,\n                        103,\n                        60,\n                        -38,\n                        66,\n                        -117,\n                        -79,\n                        62,\n                        66,\n                        93,\n                        -119,\n                        -43,\n                        66,\n                        -66,\n                        -45,\n                        110,\n                        66,\n                        -107,\n                        -66,\n                        -69,\n                        66,\n                        84,\n                        81,\n                        106,\n                        66,\n                        115,\n                        63,\n                        63,\n                        66,\n                        -99,\n                        15,\n                        -97,\n                        66,\n                        101,\n                        -105,\n                        12,\n                        66,\n                        -79,\n                        68,\n                        117,\n                        66,\n                        83,\n                        -121,\n                        24,\n                        66,\n                        -106,\n                        -103,\n                        -7,\n                        66,\n                        104,\n                        77,\n                        -76,\n                        66,\n                        -112,\n                        -98,\n                        -70,\n                        66,\n                        -117,\n                        -8,\n                        42,\n                        66,\n                        -104,\n                        6,\n                        120,\n                        66,\n                        -70,\n                        -97,\n                        38,\n                        66,\n                        -99,\n                        63,\n                        28,\n                        66,\n                        -127,\n                        -21,\n                        -30,\n                        66,\n                        -110,\n                        102,\n                        21,\n                        66,\n                        -85,\n                        110,\n                        56,\n                        66,\n                        -62,\n                        123,\n                        -3,\n                        66,\n                        -59,\n                        111,\n                        -119,\n                        66,\n                        108,\n                        -99,\n                        115,\n                        66,\n                        -104,\n                        -106,\n                        -117,\n                        66,\n                        99,\n                        104,\n                        101,\n                        66,\n                        -107,\n                        -109,\n                        -39,\n                        66,\n                        69,\n                        -7,\n                        -47,\n                        66,\n                        118,\n                        -96,\n                        26,\n                        66,\n                        100,\n                        -53,\n                        -68,\n                        66,\n                        84,\n                        100,\n                        -107,\n                        66,\n                        -64,\n                        92,\n                        -87,\n                        66,\n                        104,\n                        -40,\n                        62,\n                        66,\n                        -64,\n                        -101,\n                        16,\n                        66,\n                        -99,\n                        -23,\n                        -91,\n                        66,\n                        -60,\n                        -61,\n                        -57,\n                        66,\n                        -100,\n                        -80,\n                        67,\n                        66,\n                        -76,\n                        -94,\n                        92,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 228,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        729935225,\n                        774110515,\n                        644020442,\n                        602647442,\n                        724628632,\n                        1160470031,\n                        1102654741,\n                        975699148,\n                        711885020,\n                        597139601,\n                        624895870,\n                        597251228,\n                        0,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        643928587,\n                        774602257,\n                        774832121,\n                        1032937514,\n                        1155686359,\n                        984562334,\n                        730133567,\n                        1013920862,\n                        1141526167,\n                        581151478,\n                        624197408,\n                        625949042,\n                        0,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 26,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 26,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": -326098280807737610,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            },\n            {\n                \"version\": \"2.0\",\n                \"root\": 0,\n                \"maxSize\": 256,\n                \"outputAfter\": 32,\n                \"storeSequenceIndexesEnabled\": false,\n                \"centerOfMassEnabled\": false,\n                \"nodeStoreState\": {\n                    \"version\": \"2.0\",\n                    \"capacity\": 255,\n                    \"compressed\": true,\n                    \"cutDimension\": [\n                        0,\n                        31,\n                        255,\n                        53270838,\n                        647031374,\n                        119730107,\n                        769242474,\n                        81208803,\n                        74292330,\n                        215862725,\n                        487953482,\n                        727395159,\n                        863477410,\n                        347707217,\n                        999504309,\n                        794159870,\n                        345620406,\n                        117119418,\n                        536214317,\n                        240760993,\n                        619092426,\n                        204531182,\n                        743941457,\n                        207144049,\n                        716884917,\n                        358578133,\n                        995748417,\n                        498445609,\n                        792270938,\n                        994048331,\n                        1041956798,\n                        572573245,\n                        764632115,\n                        769913909,\n                        1065293034,\n                        799648081,\n                        913108409,\n                        380348583,\n                        330397483,\n                        664271558,\n                        399767081,\n                        21817,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"cutValueData\": [\n                        69,\n                        76,\n                        38,\n                        14,\n                        66,\n                        51,\n                        -35,\n                        -57,\n                        66,\n                        -83,\n                        36,\n                        -80,\n                        67,\n                        -66,\n                        102,\n                        -98,\n                        68,\n                        -80,\n                        -128,\n                        -68,\n                        66,\n                        -120,\n                        121,\n                        76,\n                        66,\n                        -86,\n                        17,\n                        97,\n                        66,\n                        -82,\n                        19,\n                        82,\n                        66,\n                        -58,\n                        -82,\n                        121,\n                        66,\n                        -117,\n                        -8,\n                        -73,\n                        66,\n                        -92,\n                        -97,\n                        -115,\n                        66,\n                        101,\n                        -74,\n                        1,\n                        66,\n                        94,\n                        96,\n                        112,\n                        66,\n                        -118,\n                        77,\n                        65,\n                        66,\n                        31,\n                        83,\n                        127,\n                        66,\n                        -117,\n                        48,\n                        73,\n                        66,\n                        -63,\n                        126,\n                        -93,\n                        65,\n                        90,\n                        114,\n                        75,\n                        66,\n                        -78,\n                        -41,\n                        113,\n                        66,\n                        114,\n                        -72,\n                        -73,\n                        66,\n                        -92,\n                        -103,\n                        -122,\n                        66,\n                        110,\n                        -10,\n                        30,\n                        66,\n                        -101,\n                        40,\n                        47,\n                        66,\n                        -78,\n                        -108,\n                        115,\n                        66,\n                        82,\n                        49,\n                        -33,\n                        66,\n                        99,\n                        -41,\n                        14,\n                        66,\n                        -96,\n                        -68,\n                        -111,\n                        66,\n                        -59,\n                        -39,\n                        116,\n                        66,\n                        117,\n                        83,\n                        96,\n                        66,\n                        -98,\n                        -12,\n                        -57,\n                        66,\n                        -99,\n                        26,\n                        6,\n                        66,\n                        100,\n                        78,\n                        40,\n                        66,\n                        81,\n                        91,\n                        -42,\n                        66,\n                        86,\n                        26,\n                        110,\n                        66,\n                        -79,\n                        29,\n                        90,\n                        66,\n                        -80,\n                        37,\n                        -74,\n                        66,\n                        -99,\n                        -112,\n                        -68,\n                        66,\n                        -88,\n                        87,\n                        19,\n                        66,\n                        13,\n                        16,\n                        19,\n                        66,\n                        68,\n                        6,\n                        91,\n                        66,\n                        -111,\n                        -115,\n                        -98,\n                        66,\n                        -59,\n                        -39,\n                        10,\n                        66,\n                        -80,\n                        -96,\n                        -95,\n                        66,\n                        -60,\n                        101,\n                        77,\n                        66,\n                        -108,\n                        35,\n                        98,\n                        66,\n                        119,\n                        23,\n                        -16,\n                        66,\n                        119,\n                        -16,\n                        97,\n                        66,\n                        -62,\n                        -118,\n                        86,\n                        66,\n                        23,\n                        -110,\n                        74,\n                        66,\n                        -66,\n                        43,\n                        -44,\n                        66,\n                        -88,\n                        99,\n                        34,\n                        66,\n                        -67,\n                        122,\n                        18,\n                        66,\n                        -105,\n                        87,\n                        -55,\n                        66,\n                        -117,\n                        -59,\n                        -113,\n                        66,\n                        -71,\n                        94,\n                        -54,\n                        66,\n                        -119,\n                        -9,\n                        -67,\n                        66,\n                        102,\n                        91,\n                        14,\n                        66,\n                        46,\n                        83,\n                        -125,\n                        66,\n                        105,\n                        -77,\n                        92,\n                        66,\n                        -108,\n                        -65,\n                        -101,\n                        66,\n                        -88,\n                        28,\n                        71,\n                        66,\n                        -68,\n                        114,\n                        56,\n                        66,\n                        -117,\n                        -73,\n                        -119,\n                        66,\n                        72,\n                        -85,\n                        9,\n                        66,\n                        108,\n                        10,\n                        -10,\n                        66,\n                        -79,\n                        -110,\n                        68,\n                        66,\n                        -115,\n                        -119,\n                        -74,\n                        66,\n                        -117,\n                        -90,\n                        6,\n                        66,\n                        -66,\n                        -48,\n                        -29,\n                        66,\n                        -84,\n                        67,\n                        -51,\n                        66,\n                        -122,\n                        91,\n                        -46,\n                        66,\n                        -91,\n                        34,\n                        119,\n                        66,\n                        -77,\n                        87,\n                        -54,\n                        66,\n                        69,\n                        -47,\n                        5,\n                        66,\n                        -71,\n                        -52,\n                        -48,\n                        66,\n                        95,\n                        -71,\n                        67,\n                        66,\n                        -99,\n                        94,\n                        -51,\n                        66,\n                        98,\n                        37,\n                        -74,\n                        66,\n                        -70,\n                        35,\n                        90,\n                        66,\n                        -115,\n                        -109,\n                        -31,\n                        66,\n                        105,\n                        -92,\n                        31,\n                        66,\n                        -128,\n                        7,\n                        -117,\n                        66,\n                        -121,\n                        -37,\n                        93,\n                        66,\n                        -67,\n                        64,\n                        -126,\n                        66,\n                        -72,\n                        -122,\n                        124,\n                        66,\n                        -121,\n                        105,\n                        19,\n                        66,\n                        -60,\n                        24,\n                        -55,\n                        66,\n                        -81,\n                        20,\n                        42,\n                        66,\n                        81,\n                        90,\n                        -111,\n                        66,\n                        91,\n                        -54,\n                        8,\n                        66,\n                        -112,\n                        -87,\n                        -38,\n                        66,\n                        -93,\n                        -46,\n                        -82,\n                        66,\n                        -62,\n                        16,\n                        -34,\n                        66,\n                        110,\n                        -55,\n                        75,\n                        66,\n                        83,\n                        89,\n                        -110,\n                        66,\n                        90,\n                        -101,\n                        36,\n                        66,\n                        -105,\n                        54,\n                        -83,\n                        66,\n                        -86,\n                        -85,\n                        -56,\n                        66,\n                        -80,\n                        -70,\n                        -99,\n                        66,\n                        97,\n                        77,\n                        -75,\n                        66,\n                        -99,\n                        25,\n                        42,\n                        66,\n                        81,\n                        91,\n                        34,\n                        66,\n                        -78,\n                        71,\n                        -116,\n                        66,\n                        -70,\n                        -58,\n                        -3,\n                        66,\n                        -59,\n                        -21,\n                        -111,\n                        66,\n                        -101,\n                        121,\n                        11,\n                        66,\n                        -63,\n                        101,\n                        -114,\n                        66,\n                        -63,\n                        77,\n                        87,\n                        66,\n                        -75,\n                        -1,\n                        124,\n                        66,\n                        69,\n                        -122,\n                        77,\n                        66,\n                        -112,\n                        20,\n                        -5,\n                        66,\n                        -106,\n                        17,\n                        -36,\n                        66,\n                        108,\n                        -115,\n                        -124,\n                        66,\n                        -72,\n                        -24,\n                        -86,\n                        66,\n                        -101,\n                        116,\n                        -35,\n                        66,\n                        -63,\n                        44,\n                        14,\n                        66,\n                        -113,\n                        36,\n                        -117,\n                        66,\n                        104,\n                        52,\n                        -44,\n                        66,\n                        -106,\n                        -118,\n                        -121,\n                        66,\n                        -67,\n                        -45,\n                        -24,\n                        66,\n                        -110,\n                        27,\n                        60,\n                        66,\n                        98,\n                        91,\n                        -81,\n                        66,\n                        -116,\n                        36,\n                        10,\n                        66,\n                        -114,\n                        -111,\n                        34,\n                        66,\n                        -104,\n                        -75,\n                        29,\n                        66,\n                        -60,\n                        44,\n                        -44,\n                        66,\n                        125,\n                        -49,\n                        -80,\n                        66,\n                        -122,\n                        -120,\n                        -80,\n                        66,\n                        -60,\n                        46,\n                        0,\n                        66,\n                        -127,\n                        88,\n                        -74,\n                        66,\n                        84,\n                        1,\n                        -119,\n                        66,\n                        -114,\n                        -97,\n                        6,\n                        66,\n                        -107,\n                        17,\n                        27,\n                        66,\n                        -68,\n                        79,\n                        43,\n                        66,\n                        -112,\n                        -112,\n                        -78,\n                        66,\n                        -64,\n                        27,\n                        -47,\n                        66,\n                        -127,\n                        -128,\n                        7,\n                        66,\n                        -82,\n                        -81,\n                        49,\n                        66,\n                        -103,\n                        -52,\n                        -35,\n                        66,\n                        -70,\n                        -95,\n                        115,\n                        66,\n                        -67,\n                        -102,\n                        -44,\n                        66,\n                        80,\n                        56,\n                        -87,\n                        66,\n                        -123,\n                        -76,\n                        -14,\n                        66,\n                        -109,\n                        17,\n                        -83,\n                        66,\n                        -103,\n                        -22,\n                        -49,\n                        66,\n                        -107,\n                        -81,\n                        76,\n                        66,\n                        91,\n                        -15,\n                        -109,\n                        66,\n                        93,\n                        -50,\n                        106,\n                        66,\n                        71,\n                        87,\n                        -83,\n                        66,\n                        -65,\n                        -115,\n                        -104,\n                        66,\n                        -80,\n                        97,\n                        61,\n                        66,\n                        -60,\n                        102,\n                        38,\n                        66,\n                        -69,\n                        -81,\n                        10,\n                        66,\n                        -58,\n                        -51,\n                        79,\n                        66,\n                        70,\n                        40,\n                        -22,\n                        66,\n                        96,\n                        -88,\n                        -72,\n                        66,\n                        95,\n                        -52,\n                        105,\n                        66,\n                        -63,\n                        109,\n                        15,\n                        66,\n                        -71,\n                        37,\n                        -27,\n                        66,\n                        82,\n                        47,\n                        61,\n                        66,\n                        84,\n                        58,\n                        44,\n                        66,\n                        -110,\n                        64,\n                        101,\n                        66,\n                        -71,\n                        -42,\n                        5,\n                        66,\n                        -110,\n                        -5,\n                        40,\n                        66,\n                        115,\n                        -89,\n                        90,\n                        66,\n                        -109,\n                        -113,\n                        56,\n                        66,\n                        -103,\n                        43,\n                        94,\n                        66,\n                        94,\n                        -115,\n                        45,\n                        66,\n                        -119,\n                        -41,\n                        88,\n                        66,\n                        -104,\n                        -19,\n                        82,\n                        66,\n                        -98,\n                        -50,\n                        -62,\n                        66,\n                        -101,\n                        34,\n                        -28,\n                        66,\n                        -63,\n                        -114,\n                        113,\n                        66,\n                        -117,\n                        66,\n                        2,\n                        66,\n                        75,\n                        18,\n                        68,\n                        66,\n                        -114,\n                        -104,\n                        -9,\n                        66,\n                        90,\n                        -97,\n                        -90,\n                        66,\n                        -59,\n                        25,\n                        48,\n                        66,\n                        -110,\n                        12,\n                        -48,\n                        66,\n                        -63,\n                        93,\n                        -8,\n                        66,\n                        -103,\n                        -124,\n                        105,\n                        66,\n                        -102,\n                        -107,\n                        -87,\n                        66,\n                        -104,\n                        -104,\n                        -32,\n                        66,\n                        112,\n                        -53,\n                        -120,\n                        66,\n                        -73,\n                        -8,\n                        -127,\n                        66,\n                        -87,\n                        114,\n                        21,\n                        66,\n                        -61,\n                        4,\n                        -78,\n                        66,\n                        -128,\n                        78,\n                        -95,\n                        66,\n                        -102,\n                        91,\n                        127,\n                        66,\n                        -63,\n                        105,\n                        -15,\n                        66,\n                        73,\n                        113,\n                        124,\n                        66,\n                        84,\n                        55,\n                        -107,\n                        66,\n                        -112,\n                        101,\n                        -48,\n                        66,\n                        -85,\n                        23,\n                        -73,\n                        66,\n                        -61,\n                        108,\n                        15,\n                        66,\n                        94,\n                        93,\n                        -107,\n                        66,\n                        -74,\n                        19,\n                        -116,\n                        66,\n                        -124,\n                        -111,\n                        -61,\n                        66,\n                        -96,\n                        92,\n                        -103,\n                        66,\n                        -95,\n                        76,\n                        21,\n                        66,\n                        104,\n                        37,\n                        18,\n                        66,\n                        -109,\n                        -7,\n                        -43,\n                        66,\n                        -63,\n                        48,\n                        43,\n                        66,\n                        85,\n                        104,\n                        -23,\n                        66,\n                        68,\n                        -38,\n                        104,\n                        66,\n                        -97,\n                        -118,\n                        125,\n                        66,\n                        -74,\n                        82,\n                        82,\n                        66,\n                        85,\n                        -1,\n                        111,\n                        66,\n                        -66,\n                        -91,\n                        -98,\n                        66,\n                        79,\n                        -30,\n                        -42,\n                        66,\n                        81,\n                        100,\n                        59,\n                        66,\n                        -113,\n                        25,\n                        -47,\n                        66,\n                        -108,\n                        -70,\n                        33,\n                        66,\n                        -70,\n                        27,\n                        -58,\n                        66,\n                        68,\n                        -68,\n                        5,\n                        66,\n                        -103,\n                        -31,\n                        59,\n                        66,\n                        -74,\n                        -48,\n                        -27,\n                        66,\n                        -87,\n                        -6,\n                        12,\n                        66,\n                        -64,\n                        1,\n                        -54,\n                        66,\n                        72,\n                        86,\n                        -5,\n                        66,\n                        -110,\n                        115,\n                        57,\n                        66,\n                        95,\n                        -9,\n                        87,\n                        66,\n                        -106,\n                        -58,\n                        -30,\n                        66,\n                        -114,\n                        -43,\n                        -9,\n                        66,\n                        -124,\n                        -117,\n                        -61,\n                        66,\n                        80,\n                        -121,\n                        46,\n                        66,\n                        -105,\n                        -104,\n                        -9,\n                        66,\n                        69,\n                        -48,\n                        -109,\n                        66,\n                        -116,\n                        -62,\n                        -126,\n                        66,\n                        -101,\n                        15,\n                        -70,\n                        66,\n                        -100,\n                        2,\n                        8,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0,\n                        0\n                    ],\n                    \"precision\": \"FLOAT_32\",\n                    \"root\": 0,\n                    \"canonicalAndNotALeaf\": true,\n                    \"size\": 231,\n                    \"leftIndex\": [\n                        -1,\n                        1,\n                        255,\n                        626568200,\n                        629499437,\n                        753339841,\n                        1030788898,\n                        726831224,\n                        596099411,\n                        586113809,\n                        975114418,\n                        987685622,\n                        1142571545,\n                        587744221,\n                        711000005,\n                        13,\n                        0\n                    ],\n                    \"rightIndex\": [\n                        -1,\n                        1,\n                        255,\n                        1161723346,\n                        987939094,\n                        768285566,\n                        769871048,\n                        588302266,\n                        983461084,\n                        597634816,\n                        985094540,\n                        640298897,\n                        1118597027,\n                        1030729828,\n                        1098223177,\n                        13,\n                        0\n                    ],\n                    \"nodeFreeIndexes\": [],\n                    \"nodeFreeIndexPointer\": 23,\n                    \"leafFreeIndexes\": [],\n                    \"leafFreeIndexPointer\": 23,\n                    \"partialTreeStateEnabled\": true\n                },\n                \"boundingBoxCacheFraction\": 0.0,\n                \"partialTreeState\": true,\n                \"seed\": 1885563308252954837,\n                \"id\": 0,\n                \"dimensions\": 32,\n                \"staticSeed\": 0,\n                \"weight\": 0.0,\n                \"hasAuxiliaryData\": false\n            }\n        ],\n        \"executionContext\": {\n            \"parallelExecutionEnabled\": false,\n            \"threadPoolSize\": 0\n        },\n        \"saveTreeStateEnabled\": true,\n        \"saveSamplerStateEnabled\": true,\n        \"saveCoordinatorStateEnabled\": true\n    },\n    \"thresholderState\": {\n        \"randomseed\": 0,\n        \"inAnomaly\": false,\n        \"elasticity\": 0.01,\n        \"attributionEnabled\": false,\n        \"count\": 473,\n        \"minimumScores\": 10,\n        \"primaryDeviationState\": {\n            \"discount\": 0.0050000000000000044,\n            \"weight\": 174.80182844592096,\n            \"sumSquared\": 36.305151251956836,\n            \"sum\": 76.92421447463836,\n            \"count\": 473\n        },\n        \"secondaryDeviationState\": {\n            \"discount\": 0.0050000000000000044,\n            \"weight\": 174.80182844592096,\n            \"sumSquared\": 36.305151251956836,\n            \"sum\": 76.92421447463836,\n            \"count\": 473\n        },\n        \"thresholdDeviationState\": {\n            \"discount\": 0.0025000000000000022,\n            \"weight\": 234.23102014508095,\n            \"sumSquared\": 0.3718330270802468,\n            \"sum\": 0.3718330270802468,\n            \"count\": 473\n        },\n        \"upperThreshold\": 2.0,\n        \"lowerThreshold\": 1.0,\n        \"absoluteThreshold\": 1.0,\n        \"autoThreshold\": false,\n        \"initialThreshold\": 1.5,\n        \"zFactor\": 2.5,\n        \"upperZfactor\": 5.0,\n        \"absoluteScoreFraction\": 0.5,\n        \"horizon\": 0.5\n    },\n    \"preprocessorStates\": [\n        {\n            \"version\": \"2.1\",\n            \"useImputedFraction\": 0.5,\n            \"imputationMethod\": \"PREVIOUS\",\n            \"forestMode\": \"STANDARD\",\n            \"transformMethod\": \"NONE\",\n            \"weights\": [\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0,\n                1.0\n            ],\n            \"lastShingledPoint\": [\n                0.0,\n                81.1,\n                98.0,\n                54.0,\n                0.0,\n                72.875,\n                92.0,\n                53.0,\n                0.0,\n                64.3,\n                84.0,\n                50.0,\n                0.0,\n                84.9,\n                97.0,\n                55.0,\n                0.0,\n                72.11111111111111,\n                92.0,\n                51.0,\n                0.0,\n                70.36363636363636,\n                84.0,\n                51.0,\n                0.0,\n                85.66666666666667,\n                96.0,\n                65.0,\n                0.0,\n                75.875,\n                97.0,\n                53.0\n            ],\n            \"lastShingledInput\": [\n                0.0,\n                81.1,\n                98.0,\n                54.0,\n                0.0,\n                72.875,\n                92.0,\n                53.0,\n                0.0,\n                64.3,\n                84.0,\n                50.0,\n                0.0,\n                84.9,\n                97.0,\n                55.0,\n                0.0,\n                72.11111111111111,\n                92.0,\n                51.0,\n                0.0,\n                70.36363636363636,\n                84.0,\n                51.0,\n                0.0,\n                85.66666666666667,\n                96.0,\n                65.0,\n                0.0,\n                75.875,\n                97.0,\n                53.0\n            ],\n            \"timeDecay\": 0.0,\n            \"startNormalization\": 10,\n            \"stopNormalization\": 2147483647,\n            \"shingleSize\": 8,\n            \"dimensions\": 32,\n            \"inputLength\": 32,\n            \"clipFactor\": 10.0,\n            \"normalizeTime\": false,\n            \"previousTimeStamps\": [\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0,\n                0\n            ],\n            \"valuesSeen\": 505,\n            \"internalTimeStamp\": 505,\n            \"dataQualityState\": {\n                \"discount\": 1.0E-4,\n                \"weight\": 253.49802371541492,\n                \"sumSquared\": 253.49802371541492,\n                \"sum\": 253.49802371541492,\n                \"count\": 505\n            },\n            \"timeStampDeviationState\": {\n                \"discount\": 1.0E-4,\n                \"weight\": 253.49802371541492,\n                \"sumSquared\": 0.0,\n                \"sum\": 0.0,\n                \"count\": 505\n            }\n        }\n    ],\n    \"ignoreSimilarFactor\": 0.3,\n    \"triggerFactor\": 3.5,\n    \"lastAnomalyTimeStamp\": 47,\n    \"lastAnomalyScore\": 5.335765316974856,\n    \"lastAnomalyAttribution\": {\n        \"high\": [\n            0.0,\n            1.4117562111336642E-10,\n            1.418729485350228E-15,\n            1.0083921478610583E-8,\n            0.0,\n            4.917565799521917E-10,\n            0.0,\n            1.1332577277142232E-9,\n            0.0,\n            1.546364093339942E-15,\n            1.339953959003808E-14,\n            8.314645805260075E-9,\n            0.0,\n            1.2662953969342049E-11,\n            2.6668325044282365E-10,\n            1.2347552503991264E-13,\n            0.0,\n            4.1285628236595896E-7,\n            2.47339143039254E-9,\n            6.004034811065243E-10,\n            0.0,\n            1.007864651346114E-15,\n            1.4166121775317541E-8,\n            0.0,\n            0.0,\n            8.499118339727005E-9,\n            8.797576634943594E-10,\n            0.0,\n            0.0,\n            0.4803347475667489,\n            4.854252775821479,\n            0.0\n        ],\n        \"low\": [\n            0.0,\n            3.818998412901733E-9,\n            7.882017408917721E-9,\n            1.0616258029073952E-11,\n            0.0,\n            9.378450942272161E-9,\n            0.0011529511744925037,\n            8.055723799453522E-9,\n            0.0,\n            1.0999779464398417E-7,\n            1.8676992635241835E-6,\n            4.329378325769762E-9,\n            0.0,\n            2.4004210236996577E-6,\n            1.359497861942361E-9,\n            1.898837754447581E-9,\n            0.0,\n            1.2579229088962973E-9,\n            2.1450535608803446E-9,\n            2.574875299812547E-7,\n            0.0,\n            1.2736401329305464E-5,\n            8.056112720413422E-10,\n            2.1851422542224092E-8,\n            0.0,\n            4.0497722314210486E-10,\n            3.371667769598239E-6,\n            1.6874340609136803E-6,\n            0.0,\n            0.0,\n            0.0,\n            1.895641896778137E-6\n        ]\n    },\n    \"lastScore\": 0.0,\n    \"lastAnomalyPoint\": [\n        0.0,\n        73.0,\n        92.0,\n        58.0,\n        0.0,\n        70.22222222222223,\n        86.0,\n        54.0,\n        0.0,\n        69.88888888888889,\n        87.0,\n        55.0,\n        0.0,\n        71.55555555555556,\n        96.0,\n        50.0,\n        0.0,\n        80.2,\n        98.0,\n        53.0,\n        0.0,\n        67.11111111111111,\n        97.0,\n        49.0,\n        0.0,\n        76.27272727272727,\n        93.0,\n        49.0,\n        0.0,\n        503.8,\n        4343.0,\n        49.0\n    ],\n    \"previousIsPotentialAnomaly\": false,\n    \"inHighScoreRegion\": false,\n    \"ignoreSimilar\": false,\n    \"numberOfAttributors\": 5,\n    \"randomSeed\": 0,\n    \"forestMode\": \"STANDARD\",\n    \"transformMethod\": \"NONE\",\n    \"lastRelativeIndex\": 0,\n    \"lastReset\": 0\n}"
  },
  {
    "path": "Java/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0                       http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>software.amazon.randomcutforest</groupId>\n    <artifactId>randomcutforest-parent</artifactId>\n    <version>4.4.0</version>\n    <packaging>pom</packaging>\n\n    <name>software.amazon.randomcutforest:randomcutforest</name>\n    <description>OpenSearch Random Cut Forest</description>\n    <url>https://github.com/aws/random-cut-forest-by-aws</url>\n\n    <licenses>\n        <license>\n            <name>The Apache License, Version 2.0</name>\n            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>\n        </license>\n    </licenses>\n\n    <developers>\n        <developer>\n            <id>amazonwebservices</id>\n            <organization>Amazon Web Services</organization>\n            <organizationUrl>https://aws.amazon.com</organizationUrl>\n            <roles>\n                <role>developer</role>\n            </roles>\n        </developer>\n    </developers>\n\n    <properties>\n        <maven.compiler.source>1.8</maven.compiler.source>\n        <maven.compiler.target>1.8</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <modules>\n        <module>core</module>\n        <module>parkservices</module>\n        <module>benchmark</module>\n        <module>examples</module>\n        <module>serialization</module>\n        <module>testutils</module>\n    </modules>\n\n    <dependencyManagement>\n        <dependencies>\n            <dependency>\n                <groupId>org.junit.jupiter</groupId>\n                <artifactId>junit-jupiter-engine</artifactId>\n                <version>5.10.1</version>\n            </dependency>\n            <dependency>\n                <groupId>org.junit.jupiter</groupId>\n                <artifactId>junit-jupiter-params</artifactId>\n                <version>5.10.1</version>\n            </dependency>\n            <dependency>\n                <groupId>org.hamcrest</groupId>\n                <artifactId>hamcrest</artifactId>\n                <version>2.2</version>\n            </dependency>\n            <dependency>\n                <groupId>org.mockito</groupId>\n                <artifactId>mockito-core</artifactId>\n                <version>5.7.0</version>\n            </dependency>\n            <dependency>\n                <groupId>org.mockito</groupId>\n                <artifactId>mockito-junit-jupiter</artifactId>\n                <version>5.7.0</version>\n            </dependency>\n            <dependency>\n                <groupId>org.powermock</groupId>\n                <artifactId>powermock-api-easymock</artifactId>\n                <version>2.0.7</version>\n            </dependency>\n        </dependencies>\n    </dependencyManagement>\n\n    <build>\n        <plugins>\n            <plugin>\n                <artifactId>maven-surefire-plugin</artifactId>\n                <version>2.22.2</version>\n            </plugin>\n            <plugin>\n                <artifactId>maven-failsafe-plugin</artifactId>\n                <version>2.22.2</version>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-site-plugin</artifactId>\n                <version>3.7.1</version>\n            </plugin>\n            <plugin>\n                <groupId>org.jacoco</groupId>\n                <artifactId>jacoco-maven-plugin</artifactId>\n                <version>0.8.8</version>\n                <executions>\n                    <execution>\n                        <goals>\n                            <goal>prepare-agent</goal>\n                        </goals>\n                    </execution>\n                    <!-- attached to Maven test phase -->\n                    <execution>\n                        <id>report</id>\n                        <phase>test</phase>\n                        <goals>\n                            <goal>report</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>com.diffplug.spotless</groupId>\n                <artifactId>spotless-maven-plugin</artifactId>\n                <version>1.31.0</version>\n                <configuration>\n                    <java>\n                        <licenseHeader>\n                            <file>${maven.multiModuleProjectDirectory}/license-header</file>\n                        </licenseHeader>\n                        <eclipse>\n                            <file>${maven.multiModuleProjectDirectory}/spotless-eclipse.xml</file>\n                        </eclipse>\n                        <removeUnusedImports/>\n                        <importOrder>\n                            <order>java,javax,org,com</order>\n                        </importOrder>\n                    </java>\n                </configuration>\n                <executions>\n                    <execution>\n                        <phase>compile</phase>\n                        <goals>\n                            <goal>apply</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-deploy-plugin</artifactId>\n                <version>3.0.0-M1</version>\n                <executions>\n                    <execution>\n                        <id>default-deploy</id>\n                        <phase>deploy</phase>\n                        <goals>\n                            <goal>deploy</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-source-plugin</artifactId>\n                <version>2.2.1</version>\n                <executions>\n                    <execution>\n                        <id>attach-sources</id>\n                        <goals>\n                            <goal>jar-no-fork</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-javadoc-plugin</artifactId>\n                <version>3.2.0</version>\n                <executions>\n                    <execution>\n                        <id>attach-javadocs</id>\n                        <goals>\n                            <goal>jar</goal>\n                        </goals>\n                    </execution>\n                </executions>\n            </plugin>\n            <plugin>\n                <groupId>org.sonatype.plugins</groupId>\n                <artifactId>nexus-staging-maven-plugin</artifactId>\n                <version>1.6.13</version>\n                <extensions>true</extensions>\n                <configuration>\n                    <serverId>ossrh</serverId>\n                    <nexusUrl>https://aws.oss.sonatype.org/</nexusUrl>\n                    <autoReleaseAfterClose>false</autoReleaseAfterClose>\n                </configuration>\n            </plugin>\n         </plugins>\n    </build>\n\n    <reporting>\n        <plugins>\n            <plugin>\n                <groupId>org.codehaus.mojo</groupId>\n                <artifactId>findbugs-maven-plugin</artifactId>\n                <version>3.0.5</version>\n                <configuration>\n                    <excludeFilterFile>findbugs-filters.xml</excludeFilterFile>\n                </configuration>\n            </plugin>\n        </plugins>\n    </reporting>\n\n    <distributionManagement>\n        <snapshotRepository>\n            <id>ossrh-snapshot</id>\n            <url>https://aws.oss.sonatype.org/content/repositories/snapshots</url>\n        </snapshotRepository>\n        <repository>\n            <id>ossrh</id>\n            <url>https://oss.sonatype.org/service/local/staging/deploy/maven2/</url>\n        </repository>\n    </distributionManagement>\n\n    <scm>\n        <connection>scm:git:git://github.com/aws/random-cut-forest-by-aws.git</connection>\n        <developerConnection>scm:git:ssh://github.com/aws/random-cut-forest-by-aws.git</developerConnection>\n        <url>https://github.com/aws/random-cut-forest-by-aws/tree/main</url>\n    </scm>\n    <profiles>\n    <profile>\n      <id>gpg-sign</id>\n      <build>\n        <plugins>\n          <plugin>\n            <groupId>org.apache.maven.plugins</groupId>\n            <artifactId>maven-gpg-plugin</artifactId>\n            <version>3.1.0</version>\n            <executions>\n              <execution>\n                <id>sign-artifacts</id>\n                <phase>verify</phase>\n                <goals>\n                  <goal>sign</goal>\n                </goals>\n                <configuration>\n                  <gpgArguments>\n                    <arg>--pinentry-mode</arg>\n                    <arg>loopback</arg>\n                  </gpgArguments>\n                </configuration>\n              </execution>\n            </executions>\n          </plugin>\n        </plugins>\n      </build>\n    </profile>\n  </profiles>\n</project>\n"
  },
  {
    "path": "Java/serialization/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n\n    <parent>\n        <groupId>software.amazon.randomcutforest</groupId>\n        <artifactId>randomcutforest-parent</artifactId>\n        <version>4.4.0</version>\n    </parent>\n\n    <artifactId>randomcutforest-serialization</artifactId>\n    <packaging>jar</packaging>\n\n    <dependencies>\n        <dependency>\n            <groupId>software.amazon.randomcutforest</groupId>\n            <artifactId>randomcutforest-core</artifactId>\n            <version>${project.version}</version>\n        </dependency>\n        <dependency>\n            <groupId>org.projectlombok</groupId>\n            <artifactId>lombok</artifactId>\n            <version>1.18.30</version>\n            <scope>provided</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.core</groupId>\n            <artifactId>jackson-core</artifactId>\n            <version>2.16.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.fasterxml.jackson.core</groupId>\n            <artifactId>jackson-databind</artifactId>\n            <version>2.16.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.junit.jupiter</groupId>\n            <artifactId>junit-jupiter-engine</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.junit.jupiter</groupId>\n            <artifactId>junit-jupiter-params</artifactId>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonToV3StateConverter.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.serialize.json.v1;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\n\nimport java.io.IOException;\nimport java.io.Reader;\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Optional;\n\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.state.ExecutionContext;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerState;\nimport com.amazon.randomcutforest.state.store.PointStoreMapper;\nimport com.amazon.randomcutforest.state.store.PointStoreState;\nimport com.amazon.randomcutforest.store.IPointStore;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.tree.ITree;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class V1JsonToV3StateConverter {\n\n    private final ObjectMapper mapper = new ObjectMapper();\n\n    public RandomCutForestState convert(String json, Precision precision) throws IOException {\n        checkArgument(precision == Precision.FLOAT_32, \"float 64 is deprecated in v3\");\n        V1SerializedRandomCutForest forest = mapper.readValue(json, V1SerializedRandomCutForest.class);\n        return convert(forest, precision);\n    }\n\n    public Optional<RandomCutForestState> convert(ArrayList<String> jsons, int numberOfTrees, Precision precision)\n            throws IOException {\n        ArrayList<V1SerializedRandomCutForest> forests = new ArrayList<>(jsons.size());\n        int sum = 0;\n\n        for (int i = 0; i < jsons.size(); i++) {\n            V1SerializedRandomCutForest forest = mapper.readValue(jsons.get(i), V1SerializedRandomCutForest.class);\n            forests.add(forest);\n            sum += forest.getNumberOfTrees();\n        }\n        if (sum < numberOfTrees) {\n            return Optional.empty();\n        }\n        return Optional.ofNullable(convert(forests, numberOfTrees, precision));\n    }\n\n    public RandomCutForestState convert(Reader reader, Precision precision) throws IOException {\n        checkArgument(precision == Precision.FLOAT_32, \"float 64 is deprecated in v3\");\n        V1SerializedRandomCutForest forest = mapper.readValue(reader, V1SerializedRandomCutForest.class);\n        return convert(forest, precision);\n    }\n\n    public RandomCutForestState convert(URL url, Precision precision) throws IOException {\n        checkArgument(precision == Precision.FLOAT_32, \"float 64 is deprecated in v3\");\n        V1SerializedRandomCutForest forest = mapper.readValue(url, V1SerializedRandomCutForest.class);\n        return convert(forest, precision);\n    }\n\n    public RandomCutForestState convert(V1SerializedRandomCutForest serializedForest, Precision precision) {\n        return convert(Collections.singletonList(serializedForest), serializedForest.getNumberOfTrees(), precision);\n    }\n\n    static class SamplerConverter {\n        private final IPointStore<Integer, float[]> pointStore;\n        private final List<CompactSamplerState> compactSamplerStates;\n        private final Precision precision;\n        private final ITree<Integer, float[]> globalTree;\n        private final int maxNumberOfTrees;\n\n        public SamplerConverter(int dimensions, int capacity, Precision precision, int maxNumberOfTrees) {\n            pointStore = PointStore.builder().dimensions(dimensions).capacity(capacity).shingleSize(1)\n                    .initialSize(capacity).build();\n            globalTree = new RandomCutTree.Builder().pointStoreView(pointStore).capacity(pointStore.getCapacity() + 1)\n                    .storeSequenceIndexesEnabled(false).centerOfMassEnabled(false).boundingBoxCacheFraction(1.0)\n                    .build();\n\n            compactSamplerStates = new ArrayList<>();\n            this.maxNumberOfTrees = maxNumberOfTrees;\n            this.precision = precision;\n        }\n\n        public PointStoreState getPointStoreState(Precision precision) {\n            return new PointStoreMapper().toState((PointStore) pointStore);\n\n        }\n\n        public void addSampler(V1SerializedRandomCutForest.Sampler sampler) {\n            if (compactSamplerStates.size() < maxNumberOfTrees) {\n                V1SerializedRandomCutForest.WeightedSamples[] samples = sampler.getWeightedSamples();\n                int[] pointIndex = new int[samples.length];\n                float[] weight = new float[samples.length];\n                long[] sequenceIndex = new long[samples.length];\n\n                for (int i = 0; i < samples.length; i++) {\n                    V1SerializedRandomCutForest.WeightedSamples sample = samples[i];\n                    float[] point = toFloatArray(sample.getPoint());\n                    Integer index = pointStore.add(point, sample.getSequenceIndex());\n                    pointIndex[i] = globalTree.addPoint(index, 0L);\n                    if (pointIndex[i] != index) {\n                        pointStore.incrementRefCount(pointIndex[i]);\n                        pointStore.decrementRefCount(index);\n                    }\n                    weight[i] = (float) sample.getWeight();\n                    sequenceIndex[i] = sample.getSequenceIndex();\n                }\n\n                CompactSamplerState samplerState = new CompactSamplerState();\n                samplerState.setSize(samples.length);\n                samplerState.setCapacity(sampler.getSampleSize());\n                samplerState.setTimeDecay(sampler.getLambda());\n                samplerState.setPointIndex(pointIndex);\n                samplerState.setWeight(weight);\n                samplerState.setSequenceIndex(sequenceIndex);\n                samplerState.setSequenceIndexOfMostRecentTimeDecayUpdate(0L);\n                samplerState.setMaxSequenceIndex(sampler.getEntriesSeen());\n                samplerState.setInitialAcceptFraction(1.0);\n\n                compactSamplerStates.add(samplerState);\n            }\n        }\n    }\n\n    /**\n     * the function merges a collection of RCF-1.0 models with same model parameters\n     * and fixes the number of trees in the new model (which has to be less or equal\n     * than the sum of the old models) The conversion uses the execution context of\n     * the first forest and can be adjusted subsequently by setters\n     * \n     * @param serializedForests A non-empty list of forests (together having more\n     *                          trees than numberOfTrees)\n     * @param numberOfTrees     the new number of trees\n     * @param precision         the precision of the new forest\n     * @return a merged RCF with the first numberOfTrees trees\n     */\n    public RandomCutForestState convert(List<V1SerializedRandomCutForest> serializedForests, int numberOfTrees,\n            Precision precision) {\n        checkArgument(serializedForests.size() > 0, \"incorrect usage of convert\");\n        checkArgument(numberOfTrees > 0, \"incorrect parameter\");\n        int sum = 0;\n        for (int i = 0; i < serializedForests.size(); i++) {\n            sum += serializedForests.get(i).getNumberOfTrees();\n        }\n        checkArgument(sum >= numberOfTrees, \"incorrect parameters\");\n\n        RandomCutForestState state = new RandomCutForestState();\n        state.setNumberOfTrees(numberOfTrees);\n        state.setDimensions(serializedForests.get(0).getDimensions());\n        state.setTimeDecay(serializedForests.get(0).getLambda());\n        state.setSampleSize(serializedForests.get(0).getSampleSize());\n        state.setShingleSize(1);\n        state.setCenterOfMassEnabled(serializedForests.get(0).isCenterOfMassEnabled());\n        state.setOutputAfter(serializedForests.get(0).getOutputAfter());\n        state.setStoreSequenceIndexesEnabled(serializedForests.get(0).isStoreSequenceIndexesEnabled());\n        state.setTotalUpdates(serializedForests.get(0).getExecutor().getExecutor().getTotalUpdates());\n        state.setCompact(true);\n        state.setInternalShinglingEnabled(false);\n        state.setBoundingBoxCacheFraction(1.0);\n        state.setSaveSamplerStateEnabled(true);\n        state.setSaveTreeStateEnabled(false);\n        state.setSaveCoordinatorStateEnabled(true);\n        state.setPrecision(precision.name());\n        state.setCompressed(false);\n        state.setPartialTreeState(false);\n\n        ExecutionContext executionContext = new ExecutionContext();\n        executionContext.setParallelExecutionEnabled(serializedForests.get(0).isParallelExecutionEnabled());\n        executionContext.setThreadPoolSize(serializedForests.get(0).getThreadPoolSize());\n        state.setExecutionContext(executionContext);\n\n        SamplerConverter samplerConverter = new SamplerConverter(state.getDimensions(),\n                state.getNumberOfTrees() * state.getSampleSize() + 1, precision, numberOfTrees);\n\n        serializedForests.stream().flatMap(f -> Arrays.stream(f.getExecutor().getExecutor().getTreeUpdaters()))\n                .limit(numberOfTrees).map(V1SerializedRandomCutForest.TreeUpdater::getSampler)\n                .forEach(samplerConverter::addSampler);\n\n        state.setPointStoreState(samplerConverter.getPointStoreState(precision));\n        state.setCompactSamplerStates(samplerConverter.compactSamplerStates);\n\n        return state;\n    }\n}\n"
  },
  {
    "path": "Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v1/V1SerializedRandomCutForest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.serialize.json.v1;\n\n/**\n * Serialized RCF for internal use only.\n */\npublic class V1SerializedRandomCutForest {\n\n    public Random getRng() {\n        return rng;\n    }\n\n    public void setRng(Random rng) {\n        this.rng = rng;\n    }\n\n    public int getDimensions() {\n        return dimensions;\n    }\n\n    public void setDimensions(int dimensions) {\n        this.dimensions = dimensions;\n    }\n\n    public int getSampleSize() {\n        return sampleSize;\n    }\n\n    public void setSampleSize(int sampleSize) {\n        this.sampleSize = sampleSize;\n    }\n\n    public int getOutputAfter() {\n        return outputAfter;\n    }\n\n    public void setOutputAfter(int outputAfter) {\n        this.outputAfter = outputAfter;\n    }\n\n    public int getNumberOfTrees() {\n        return numberOfTrees;\n    }\n\n    public void setNumberOfTrees(int numberOfTrees) {\n        this.numberOfTrees = numberOfTrees;\n    }\n\n    public double getLambda() {\n        return lambda;\n    }\n\n    public void setLambda(double lambda) {\n        this.lambda = lambda;\n    }\n\n    public boolean isStoreSequenceIndexesEnabled() {\n        return storeSequenceIndexesEnabled;\n    }\n\n    public void setStoreSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n        this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n    }\n\n    public boolean isCenterOfMassEnabled() {\n        return centerOfMassEnabled;\n    }\n\n    public void setCenterOfMassEnabled(boolean centerOfMassEnabled) {\n        this.centerOfMassEnabled = centerOfMassEnabled;\n    }\n\n    public boolean isParallelExecutionEnabled() {\n        return parallelExecutionEnabled;\n    }\n\n    public void setParallelExecutionEnabled(boolean parallelExecutionEnabled) {\n        this.parallelExecutionEnabled = parallelExecutionEnabled;\n    }\n\n    public int getThreadPoolSize() {\n        return threadPoolSize;\n    }\n\n    public void setThreadPoolSize(int threadPoolSize) {\n        this.threadPoolSize = threadPoolSize;\n    }\n\n    public Executor getExecutor() {\n        return executor;\n    }\n\n    public void setExecutor(Executor executor) {\n        this.executor = executor;\n    }\n\n    private static class Random {\n    }\n\n    private static class Tree {\n        private boolean storeSequenceIndexesEnabled;\n        private boolean centerOfMassEnabled;\n        private Random random;\n\n        public boolean isStoreSequenceIndexesEnabled() {\n            return storeSequenceIndexesEnabled;\n        }\n\n        public void setStoreSequenceIndexesEnabled(boolean storeSequenceIndexesEnabled) {\n            this.storeSequenceIndexesEnabled = storeSequenceIndexesEnabled;\n        }\n\n        public boolean isCenterOfMassEnabled() {\n            return centerOfMassEnabled;\n        }\n\n        public void setCenterOfMassEnabled(boolean centerOfMassEnabled) {\n            this.centerOfMassEnabled = centerOfMassEnabled;\n        }\n\n        public Random getRandom() {\n            return random;\n        }\n\n        public void setRandom(Random random) {\n            this.random = random;\n        }\n    }\n\n    static class WeightedSamples {\n        private double[] point;\n        private double weight;\n        private long sequenceIndex;\n\n        public double[] getPoint() {\n            return point;\n        }\n\n        public void setPoint(double[] point) {\n            this.point = point;\n        }\n\n        public double getWeight() {\n            return weight;\n        }\n\n        public void setWeight(double weight) {\n            this.weight = weight;\n        }\n\n        public long getSequenceIndex() {\n            return sequenceIndex;\n        }\n\n        public void setSequenceIndex(long sequenceIndex) {\n            this.sequenceIndex = sequenceIndex;\n        }\n    }\n\n    static class Sampler {\n        private WeightedSamples[] weightedSamples;\n        private int sampleSize;\n        private double lambda;\n        private Random random;\n        private long entriesSeen;\n\n        public WeightedSamples[] getWeightedSamples() {\n            return weightedSamples;\n        }\n\n        public void setWeightedSamples(WeightedSamples[] weightedSamples) {\n            this.weightedSamples = weightedSamples;\n        }\n\n        public int getSampleSize() {\n            return sampleSize;\n        }\n\n        public void setSampleSize(int sampleSize) {\n            this.sampleSize = sampleSize;\n        }\n\n        public double getLambda() {\n            return lambda;\n        }\n\n        public void setLambda(double lambda) {\n            this.lambda = lambda;\n        }\n\n        public Random getRandom() {\n            return random;\n        }\n\n        public void setRandom(Random random) {\n            this.random = random;\n        }\n\n        public long getEntriesSeen() {\n            return entriesSeen;\n        }\n\n        public void setEntriesSeen(long entriesSeen) {\n            this.entriesSeen = entriesSeen;\n        }\n    }\n\n    static class TreeUpdater {\n        public Sampler getSampler() {\n            return sampler;\n        }\n\n        public void setSampler(Sampler sampler) {\n            this.sampler = sampler;\n        }\n\n        public Tree getTree() {\n            return tree;\n        }\n\n        public void setTree(Tree tree) {\n            this.tree = tree;\n        }\n\n        private Sampler sampler;\n        private Tree tree;\n    }\n\n    static class Exec {\n        private TreeUpdater[] treeUpdaters;\n        private long totalUpdates;\n        private int threadPoolSize;\n\n        public TreeUpdater[] getTreeUpdaters() {\n            return treeUpdaters;\n        }\n\n        public void setTreeUpdaters(TreeUpdater[] treeUpdaters) {\n            this.treeUpdaters = treeUpdaters;\n        }\n\n        public long getTotalUpdates() {\n            return totalUpdates;\n        }\n\n        public void setTotalUpdates(long totalUpdates) {\n            this.totalUpdates = totalUpdates;\n        }\n\n        public int getThreadPoolSize() {\n            return threadPoolSize;\n        }\n\n        public void setThreadPoolSize(int threadPoolSize) {\n            this.threadPoolSize = threadPoolSize;\n        }\n    }\n\n    static class Executor {\n        private String executor_type;\n        private Exec executor;\n\n        public String getExecutor_type() {\n            return executor_type;\n        }\n\n        public void setExecutor_type(String executor_type) {\n            this.executor_type = executor_type;\n        }\n\n        public Exec getExecutor() {\n            return executor;\n        }\n\n        public void setExecutor(Exec executor) {\n            this.executor = executor;\n        }\n    }\n\n    private Random rng;\n    private int dimensions;\n    private int sampleSize;\n    private int outputAfter;\n    private int numberOfTrees;\n    private double lambda;\n    private boolean storeSequenceIndexesEnabled;\n    private boolean centerOfMassEnabled;\n    private boolean parallelExecutionEnabled;\n    private int threadPoolSize;\n    private Executor executor;\n}\n"
  },
  {
    "path": "Java/serialization/src/main/java/com/amazon/randomcutforest/serialize/json/v2/V2StateToV3ForestConverter.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.serialize.json.v2;\n\nimport static com.amazon.randomcutforest.CommonUtils.checkArgument;\nimport static com.amazon.randomcutforest.CommonUtils.checkNotNull;\nimport static com.amazon.randomcutforest.CommonUtils.toFloatArray;\nimport static com.amazon.randomcutforest.state.Version.V2_0;\nimport static com.amazon.randomcutforest.state.Version.V2_1;\n\nimport java.util.List;\nimport java.util.Random;\n\nimport com.amazon.randomcutforest.ComponentList;\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.executor.PointStoreCoordinator;\nimport com.amazon.randomcutforest.executor.SamplerPlusTree;\nimport com.amazon.randomcutforest.sampler.CompactSampler;\nimport com.amazon.randomcutforest.sampler.Weighted;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.amazon.randomcutforest.state.Version;\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerMapper;\nimport com.amazon.randomcutforest.state.sampler.CompactSamplerState;\nimport com.amazon.randomcutforest.state.store.PointStoreState;\nimport com.amazon.randomcutforest.state.tree.CompactRandomCutTreeContext;\nimport com.amazon.randomcutforest.store.PointStore;\nimport com.amazon.randomcutforest.tree.RandomCutTree;\nimport com.amazon.randomcutforest.util.ArrayPacking;\n\npublic class V2StateToV3ForestConverter {\n\n    public RandomCutForest convert(RandomCutForestState v2State) {\n        String version = v2State.getVersion();\n        checkArgument(version.equals(V2_0) || version.equals(V2_1), \"incorrect convertor\");\n        if (Precision.valueOf(v2State.getPrecision()) == Precision.FLOAT_32) {\n            RandomCutForestMapper mapper = new RandomCutForestMapper();\n            mapper.setCompressionEnabled(v2State.isCompressed());\n            return mapper.toModel(v2State);\n        } else {\n            return convertFrom64(v2State);\n        }\n    }\n\n    public PointStore convertFromDouble(PointStoreState state) {\n        checkNotNull(state.getRefCount(), \"refCount must not be null\");\n        checkNotNull(state.getPointData(), \"pointData must not be null\");\n        checkArgument(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_64,\n                \"precision must be \" + Precision.FLOAT_64);\n        int indexCapacity = state.getIndexCapacity();\n        int dimensions = state.getDimensions();\n        float[] store = toFloatArray(\n                ArrayPacking.unpackDoubles(state.getPointData(), state.getCurrentStoreCapacity() * dimensions));\n        int startOfFreeSegment = state.getStartOfFreeSegment();\n        int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());\n        int[] locationList = new int[indexCapacity];\n        int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());\n        System.arraycopy(tempList, 0, locationList, 0, tempList.length);\n        if (!state.getVersion().equals(Version.V3_0)) {\n            transformArray(locationList, dimensions / state.getShingleSize());\n        }\n\n        return PointStore.builder().internalRotationEnabled(state.isRotationEnabled())\n                .internalShinglingEnabled(state.isInternalShinglingEnabled())\n                .dynamicResizingEnabled(state.isDynamicResizingEnabled())\n                .directLocationEnabled(state.isDirectLocationMap()).indexCapacity(indexCapacity)\n                .currentStoreCapacity(state.getCurrentStoreCapacity()).capacity(state.getCapacity())\n                .shingleSize(state.getShingleSize()).dimensions(state.getDimensions()).locationList(locationList)\n                .nextTimeStamp(state.getLastTimeStamp()).startOfFreeSegment(startOfFreeSegment).refCount(refCount)\n                .knownShingle(state.getInternalShingle()).store(store).build();\n    }\n\n    void transformArray(int[] location, int baseDimension) {\n        checkArgument(baseDimension > 0, \"incorrect invocation\");\n        for (int i = 0; i < location.length; i++) {\n            if (location[i] > 0) {\n                location[i] = location[i] / baseDimension;\n            }\n        }\n    }\n\n    RandomCutForest convertFrom64(RandomCutForestState state) {\n        boolean parallel = false;\n        int threadPoolSize = 1;\n\n        if (state.getExecutionContext() != null) {\n            parallel = state.getExecutionContext().isParallelExecutionEnabled();\n            threadPoolSize = state.getExecutionContext().getThreadPoolSize();\n        }\n        RandomCutForest.Builder<?> builder = RandomCutForest.builder().numberOfTrees(state.getNumberOfTrees())\n                .dimensions(state.getDimensions()).timeDecay(state.getTimeDecay()).sampleSize(state.getSampleSize())\n                .centerOfMassEnabled(state.isCenterOfMassEnabled()).outputAfter(state.getOutputAfter())\n                .parallelExecutionEnabled(parallel).threadPoolSize(threadPoolSize)\n                .storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).shingleSize(state.getShingleSize())\n                .boundingBoxCacheFraction(state.getBoundingBoxCacheFraction()).compact(state.isCompact())\n                .internalShinglingEnabled(state.isInternalShinglingEnabled());\n        Random random = builder.getRandom();\n        PointStore pointStore = convertFromDouble(state.getPointStoreState());\n        ComponentList<Integer, float[]> components = new ComponentList<>();\n\n        PointStoreCoordinator<float[]> coordinator = new PointStoreCoordinator<>(pointStore);\n        coordinator.setTotalUpdates(state.getTotalUpdates());\n        CompactRandomCutTreeContext context = new CompactRandomCutTreeContext();\n        context.setPointStore(pointStore);\n        context.setMaxSize(state.getSampleSize());\n        checkArgument(state.isSaveSamplerStateEnabled(), \" conversion cannot proceed without samplers\");\n        List<CompactSamplerState> samplerStates = state.getCompactSamplerStates();\n        CompactSamplerMapper samplerMapper = new CompactSamplerMapper();\n\n        for (int i = 0; i < state.getNumberOfTrees(); i++) {\n            CompactSampler compactData = samplerMapper.toModel(samplerStates.get(i));\n            RandomCutTree tree = RandomCutTree.builder().capacity(state.getSampleSize()).pointStoreView(pointStore)\n                    .storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled())\n                    .outputAfter(state.getOutputAfter()).centerOfMassEnabled(state.isCenterOfMassEnabled())\n                    .randomSeed(random.nextLong()).build();\n            CompactSampler sampler = CompactSampler.builder().capacity(state.getSampleSize())\n                    .timeDecay(state.getTimeDecay()).randomSeed(random.nextLong()).build();\n            sampler.setMaxSequenceIndex(compactData.getMaxSequenceIndex());\n            sampler.setMostRecentTimeDecayUpdate(compactData.getMostRecentTimeDecayUpdate());\n\n            for (Weighted<Integer> sample : compactData.getWeightedSample()) {\n                Integer reference = sample.getValue();\n                Integer newReference = tree.addPoint(reference, sample.getSequenceIndex());\n                if (newReference.intValue() != reference.intValue()) {\n                    pointStore.incrementRefCount(newReference);\n                    pointStore.decrementRefCount(reference);\n                }\n                sampler.addPoint(newReference, sample.getWeight(), sample.getSequenceIndex());\n            }\n            components.add(new SamplerPlusTree<>(sampler, tree));\n        }\n\n        RandomCutForest forest = new RandomCutForest(builder, coordinator, components, random);\n        if (!state.isCurrentlySampling()) {\n            forest.pauseSampling();\n        }\n        return forest;\n    }\n}\n"
  },
  {
    "path": "Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonResource.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.serialize.json.v1;\n\nimport lombok.Getter;\n\n@Getter\npublic enum V1JsonResource {\n\n    FOREST_1(\"forest_1.json\", 1, 25, 128), FOREST_2(\"forest_2.json\", 4, 40, 256);\n\n    private final String resource;\n    private final int dimensions;\n    private final int numberOfTrees;\n    private final int sampleSize;\n\n    V1JsonResource(String resource, int dimensions, int numberOfTrees, int sampleSize) {\n        this.resource = resource;\n        this.dimensions = dimensions;\n        this.numberOfTrees = numberOfTrees;\n        this.sampleSize = sampleSize;\n    }\n}\n"
  },
  {
    "path": "Java/serialization/src/test/java/com/amazon/randomcutforest/serialize/json/v1/V1JsonToV3StateConverterTest.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.serialize.json.v1;\n\nimport static org.junit.jupiter.api.Assertions.assertEquals;\nimport static org.junit.jupiter.api.Assertions.assertTrue;\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.nio.charset.StandardCharsets;\nimport java.util.ArrayList;\nimport java.util.Random;\nimport java.util.stream.Stream;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.params.ParameterizedTest;\nimport org.junit.jupiter.params.provider.Arguments;\nimport org.junit.jupiter.params.provider.MethodSource;\n\nimport com.amazon.randomcutforest.RandomCutForest;\nimport com.amazon.randomcutforest.config.Precision;\nimport com.amazon.randomcutforest.state.RandomCutForestMapper;\nimport com.amazon.randomcutforest.state.RandomCutForestState;\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\npublic class V1JsonToV3StateConverterTest {\n\n    private V1JsonToV3StateConverter converter;\n\n    @BeforeEach\n    public void setUp() {\n        converter = new V1JsonToV3StateConverter();\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"args\")\n    public void testConvert(V1JsonResource jsonResource, Precision precision) {\n        String resource = jsonResource.getResource();\n        try (InputStream is = V1JsonToV3StateConverterTest.class.getResourceAsStream(jsonResource.getResource());\n                BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));) {\n\n            StringBuilder b = new StringBuilder();\n            String line;\n            while ((line = rr.readLine()) != null) {\n                b.append(line);\n            }\n\n            String json = b.toString();\n            RandomCutForestState state = converter.convert(json, precision);\n\n            assertEquals(jsonResource.getDimensions(), state.getDimensions());\n            assertEquals(jsonResource.getNumberOfTrees(), state.getNumberOfTrees());\n            assertEquals(jsonResource.getSampleSize(), state.getSampleSize());\n            RandomCutForest forest = new RandomCutForestMapper().toModel(state, 0);\n\n            assertEquals(jsonResource.getDimensions(), forest.getDimensions());\n            assertEquals(jsonResource.getNumberOfTrees(), forest.getNumberOfTrees());\n            assertEquals(jsonResource.getSampleSize(), forest.getSampleSize());\n\n            // perform a simple validation of the deserialized forest by update and scoring\n            // with a few points\n\n            Random random = new Random(0);\n            for (int i = 0; i < 100; i++) {\n                double[] point = getPoint(jsonResource.getDimensions(), random);\n                double score = forest.getAnomalyScore(point);\n                assertTrue(score > 0);\n                forest.update(point);\n            }\n            String newString = new ObjectMapper().writeValueAsString(new RandomCutForestMapper().toState(forest));\n            System.out.println(\" Old size \" + json.length() + \", new Size \" + newString.length()\n                    + \", improvement factor \" + json.length() / newString.length());\n        } catch (IOException e) {\n            fail(\"Unable to load JSON resource\");\n        }\n    }\n\n    @ParameterizedTest\n    @MethodSource(\"args\")\n    public void testMerge(V1JsonResource jsonResource, Precision precision) {\n        String resource = jsonResource.getResource();\n        try (InputStream is = V1JsonToV3StateConverterTest.class.getResourceAsStream(jsonResource.getResource());\n                BufferedReader rr = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));) {\n\n            StringBuilder b = new StringBuilder();\n            String line;\n            while ((line = rr.readLine()) != null) {\n                b.append(line);\n            }\n\n            String json = b.toString();\n            int number = new Random().nextInt(5) + 1;\n            int testNumberOfTrees = Math.min(100,\n                    1 + new Random().nextInt(number * jsonResource.getNumberOfTrees() - 1));\n            ArrayList<String> models = new ArrayList<>();\n\n            for (int i = 0; i < number; i++) {\n                models.add(json);\n            }\n\n            RandomCutForestState state = converter.convert(models, testNumberOfTrees, precision).get();\n\n            assertEquals(jsonResource.getDimensions(), state.getDimensions());\n            assertEquals(testNumberOfTrees, state.getNumberOfTrees());\n            assertEquals(jsonResource.getSampleSize(), state.getSampleSize());\n            RandomCutForest forest = new RandomCutForestMapper().toModel(state, 0);\n\n            assertEquals(jsonResource.getDimensions(), forest.getDimensions());\n            assertEquals(testNumberOfTrees, forest.getNumberOfTrees());\n            assertEquals(jsonResource.getSampleSize(), forest.getSampleSize());\n\n            // perform a simple validation of the deserialized forest by update and scoring\n            // with a few points\n\n            Random random = new Random(0);\n            for (int i = 0; i < 100; i++) {\n                double[] point = getPoint(jsonResource.getDimensions(), random);\n                double score = forest.getAnomalyScore(point);\n                assertTrue(score > 0);\n                forest.update(point);\n            }\n            int expectedSize = (int) Math\n                    .floor(1.0 * testNumberOfTrees * json.length() / (number * jsonResource.getNumberOfTrees()));\n            String newString = new ObjectMapper().writeValueAsString(new RandomCutForestMapper().toState(forest));\n            System.out.println(\" Copied \" + number + \" times, old number of trees \" + jsonResource.getNumberOfTrees()\n                    + \", new trees \" + testNumberOfTrees + \", Expected Old size \" + expectedSize + \", new Size \"\n                    + newString.length());\n        } catch (IOException e) {\n            fail(\"Unable to load JSON resource\");\n        }\n    }\n\n    private double[] getPoint(int dimensions, Random random) {\n        double[] point = new double[dimensions];\n        for (int i = 0; i < point.length; i++) {\n            point[i] = random.nextDouble();\n        }\n        return point;\n    }\n\n    static Stream<Arguments> args() {\n        return jsonParams().flatMap(\n                classParameter -> precision().map(testParameter -> Arguments.of(classParameter, testParameter)));\n    }\n\n    static Stream<Precision> precision() {\n        return Stream.of(Precision.FLOAT_32);\n    }\n\n    static Stream<V1JsonResource> jsonParams() {\n        return Stream.of(V1JsonResource.FOREST_1, V1JsonResource.FOREST_2);\n    }\n}\n"
  },
  {
    "path": "Java/serialization/src/test/resources/com/amazon/randomcutforest/serialize/json/v1/forest_1.json",
    "content": "{\"rng\":{},\"dimensions\":1,\"sampleSize\":128,\"outputAfter\":32,\"numberOfTrees\":25,\"lambda\":7.8125E-4,\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"parallelExecutionEnabled\":false,\"threadPoolSize\":0,\"executor\":{\"executor_type\":\"SequentialForestTraversalExecutor\",\"executor\":{\"treeUpdaters\":[{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.6688810574953065,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-0.6694446327136416,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-0.7099435869862155,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-0.6773930450002961,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-0.7370178483016269,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-0.7238772617635025,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-0.8175803370004122,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-0.7758355111900491,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.844745847625517,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-0.7969586787740108,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-0.8391000844693494,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.0895485530999558,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.8314768328093453,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.838073853403588,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-0.8551831713845987,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.8058383270521662,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.1311393708193649,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.051359534840302,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-1.3602646883333933,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-0.8120967944743611,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-0.8412152503792993,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.1937884573691986,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.9576425135569512,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.1745351394411094,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-1.2719590656044537,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-0.9663214704708991,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-1.0767487950245458,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-0.8862884033039824,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.9402075771773278,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.1171274952744488,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-0.9760712216922116,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.209212861519442,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-0.9894347876931673,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-1.2286851681923492,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-1.8929615161994924,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.463831142380754,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-1.2704648701797001,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.4393749275345746,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.4060006577653574,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-0.8148845029074638,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.360125646960061,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.0423688595531402,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-2.553495220744535,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-1.4910857979989445,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.3317600115556942,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-0.959177855457047,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-2.7665000110087017,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.2862562017089456,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-1.4123571398870165,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.6402049971323254,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.3664624302145258,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.015903796762246,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.054501582696978,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.1562451840794954,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.0944423940504946,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-0.9242013521567816,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.4809513774064462,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-1.0448237468808645,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-1.1281627461045587,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.2371952431519164,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.335000218061075,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.0033032793915928,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-1.4320823771973759,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-1.2684611705814648,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-1.442832305582126,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-3.265475832121588,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.69980080710007,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-1.8327848104692763,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.4585940595367028,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.4744002140273547,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.949166867023715,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-2.0296441767906503,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.867753988264947,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-2.6221320839127484,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.4363740719383313,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-1.5903510227982356,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-1.8760016255517504,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.857390233550908,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-2.4774735223520823,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-4.585151612810165,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.6774559748537292,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-1.5417237717712085,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-2.1625413968401896,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-2.4643965193289397,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-2.0104729576220635,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-7.162157053775397,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-3.29441413539427,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-2.7176687256007863,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-1.5224061214648075,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-2.7629376079269425,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-2.268919552260495,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-1.0809978342615045,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-1.638796168124627,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-5.359628619182979,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-4.089934341675898,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.3542128410657468,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.2992143969963252,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-2.2723652738369524,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.617107747050164,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-3.8103984532377875,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.6904584135996799,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-3.667653827928123,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-4.236443260134257,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-1.3895151777205472,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.7617310145560734,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-8.878206083060684,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-1.7803986028498844,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.5771117256819902,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-2.6827966301130877,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-1.483437571521761,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.441508147436031,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-0.9955171996152521,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-1.584238767343765,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-2.554670097909892,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.742779405966055,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.386770714515293,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.9209347694994323,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.4889444303609287,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.2515132464104808,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-1.659274647098906,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-1.453380181643202,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-4.451051624349917,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-5.336198647833298,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-1.551768518633028,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-2.775494119010644,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-2.4583243143275446,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-1.620742108354329,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.4785060894456088,\"sequenceIndex\":253}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.4598864382520127,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.4746032720716633,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.5568547624146211,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-0.48560546738000987,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-0.4979307010904822,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.571389126958656,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-0.5982928453295104,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-0.5037392277089343,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.4916107245299145,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-0.6025756908956976,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-0.8688532821468505,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.5982668644912492,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-0.6021041505718532,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-0.7274571266664908,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-0.6654429106276508,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.5144903797659188,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-1.093625882714599,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-0.7239899086841673,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.6170644758508531,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.6744938724043,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-0.7239772226477125,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-1.0584791515996916,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-0.9006528634835586,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.814429688490947,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-0.953352184022313,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.8808235064808103,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-0.8523667128680026,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.792749361777971,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-1.1274846114403103,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-0.6670483594005042,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.7740001739265925,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-0.7114960804632913,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-0.6601594370552187,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-1.2914616029551815,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-1.4653700887304537,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.2142005254090709,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-0.8351680104085134,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.0483239813002518,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.2882353912604798,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.0759929280317087,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-1.1465263840906157,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.278209444828219,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-0.7303815806755529,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-1.6552212049721997,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.2668374429513252,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.2620229695828002,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-1.542923035506623,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.2465968112684012,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.6052008692640545,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-1.085060587670115,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.135171891489322,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.9505362519768855,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-0.9568516710936271,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.3600525447711713,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.2338868096793152,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-1.1172934897253752,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-1.1068152932635398,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1658852696544753,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-1.2286731265834137,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-1.626867953631204,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-1.1089353888069637,\"sequenceIndex\":254},{\"point\":[0.730967787376657],\"weight\":-0.7954485984612603,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-1.1450423587614187,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-0.7642190744334827,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-1.1790546685763579,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-1.4154730224575665,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-2.3198558360766177,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-1.7002952368913662,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.03422769978397,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-3.5109412885021487,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-2.3499528115804007,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-2.701728455054479,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-2.0533128197558863,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-1.4318488215284482,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.3566248768796831,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-2.3376175044670946,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-1.0887648161840557,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.3806606118790359,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-1.3384414363851929,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.66358687965076,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-1.6498622383501975,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-6.112443209875558,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-1.2493020347490515,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-2.5702200826612907,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-3.4318407938500037,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.2198167281526948,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-3.127545398871097,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-2.0962101547732455,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-2.526849458527857,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-3.316003078229306,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.401006170880422,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-3.365956527402797,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-1.280074096406072,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-3.2062141689217705,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.6844984194742731,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-1.9994953694982531,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.7754375214295273,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-2.4297442724783673,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-2.2324726399801267,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-1.7987657263500112,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-2.3790035619962784,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.2441072953716394,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-4.861967892186293,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-1.142098169150573,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.9986063006489297,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-3.33003565897416,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-2.782357199798995,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.6763419918437137,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-1.9226755516731338,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-3.2753040522574786,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-2.132090423734058,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.503527931557662,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-2.1226760979531423,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.605485597130487,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-1.1318387705853517,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-2.0423216433553315,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-1.415514231413752,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-1.5690274895599907,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.5668008666488293,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-1.7361418360041414,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-2.253770974860661,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.5680683447181383,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-1.4438566715805006,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.9148500224695411,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-5.3950381164925885,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-2.524935451607215,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-2.39216803050598,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.4255377424738875,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.6084608564073564,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.6179686183086871,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.6412342794373282,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-0.635888511053123,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-0.646954029837056,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-0.6416011662222623,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-0.6697495230795004,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-0.6855986388859618,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.6733299678230226,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-0.8456116832752445,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-0.6595573455051265,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.7818963687299432,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.7242726094971086,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-0.7676564302803074,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-0.7196063435599683,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.7023381441964808,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.6990463810153638,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-0.9706658888231493,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-0.7854523327189776,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.5379117443444936,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-0.9743642328162164,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-1.1601247027043582,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-0.694358444060882,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.7865991569435075,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-0.7866625900573727,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-0.9458278984561896,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-0.7574742830477769,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.8105420344688304,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.817381975133741,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.7572429564939562,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-0.7647576679538656,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.7891532248024621,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.1038836285849694,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-0.9326280818754813,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-1.2039088515760283,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.0668944246316745,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-1.140992643871671,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-1.3319042184082273,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.1557869954803173,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-1.5601516471849939,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-2.5176857322565125,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.7588384800849302,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-1.2457054171595865,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.8694609769692652,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.4024438896156235,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-2.4211286312094744,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-1.4926669550949925,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-0.9183594711474482,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.8092435156432933,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.9931770822810341,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-1.6539763765965285,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.378508991917559,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-1.129119434008446,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.4271272502438137,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.2323981656525358,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-2.702389463535556,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-0.8795665575812794,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.0688479544579668,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-1.4430782407610507,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.9119605992033363,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.3175207369465065,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-0.800880316424639,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-1.5095687157884452,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.1803576801785824,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-0.9780629061340059,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-1.6784573941614913,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.3256525461806938,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-1.4628360648939904,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-2.1190202578998063,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.4280977787832962,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-1.5428524532235766,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.0832389308864134,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-2.376231466267528,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.8864929966469879,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-3.5046449211056574,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-5.895974654000236,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-2.3692190830657784,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-1.1952344559561106,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.7174184183310466,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.7261063391497298,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-2.056947789122293,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-3.2322832484212647,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-5.9196991187933214,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-2.5343098709618053,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-2.1536939993176802,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-1.4648540252681124,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-2.1121301265390477,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-2.4348211400195194,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-3.043173548463852,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-3.757930176685792,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-4.660672824063548,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-2.9863390963181904,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-2.4942747747696696,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.7766578361808776,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-1.8388571356055712,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-2.024296425696412,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-3.405456865268623,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-2.356732126757701,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.939247795667226,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.0643617463864807,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.5255353988162614,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-3.2995612688154523,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-1.9278419776710214,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-2.1650765215580074,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-2.2002362606268107,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.8034377732195606,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-2.8111155100070078,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-2.7909265348446497,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-3.8185769626082946,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-2.128898640956419,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-2.8075041544933805,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-4.016094814570343,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-3.5401417393213697,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.3187550923256954,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-3.060741528289331,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-2.7060399492993503,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-3.3454514931582486,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-1.8253012356546707,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-2.269223257968656,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-3.320729280665398,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-2.155007855756757,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.6029215465375557,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-2.3240446044198246,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.596989580267768,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-1.5610183134862492,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-2.369977703447609,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-2.3334217379134725,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.3458564027345714,\"sequenceIndex\":254}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.4918387193284984,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-0.49850744587000817,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-0.5154944626176663,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-0.49873352412810024,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-0.53799056414208,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-0.5495557111649064,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.5634921840243842,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-0.5054095662365382,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-0.5902884831826791,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-0.5809033703495999,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-0.5831180216438226,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-0.6138948387469644,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.5640759855606138,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.8160691992091744,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.5908022073665468,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.5938396236616338,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.6146011583642937,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-0.6401658501607816,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-0.66962407039031,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-0.6478147688363068,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.6407755755079727,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-0.7132000384695297,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-0.6039685053846763,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-0.6723401772961782,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-0.6139356405224405,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.7826294529478222,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-0.5916551143926003,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.8745061449519493,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-0.9868601746266324,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-0.698853218218544,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.6982399850797093,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-0.8268240997228921,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.5796616544917688,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-1.4228690566171962,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.1087400045869216,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-1.9767938814497483,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.1193063440217477,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-0.9897982409063251,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-0.958447158263374,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-0.73859717224796,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.718687793327568,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.742240750871137,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.2357994186688368,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-1.9351889517328797,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.2217801299397852,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.5224376471600216,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-0.6280606249060455,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-0.7536476875086657,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-1.0926854258877494,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-0.8005880962274846,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-0.9348661530025668,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-1.8327145404416971,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-0.8879228616596568,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-0.7115016313802975,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-1.315008994998176,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.0687142081802856,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-2.5947198358624446,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.4823989972195553,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-1.479267574102055,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-0.8611259174729374,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.772199802352788,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-1.4907428324504157,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-1.2616856134082106,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-2.393785932159696,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-2.556750207006987,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-1.5909605635193538,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.3552623877056025,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-5.595833755752905,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-2.6644763323398806,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-2.5345437499108585,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.434447023380137,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-4.894453553442508,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-2.754746721553871,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-1.2036069256692608,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-1.1656849998316028,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-2.565523521924805,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.6808968333399366,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.3794031958965502,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-4.530107001642157,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-2.5756982124452668,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-0.9459409024531418,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.955933643545851,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-1.5831007597354272,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-2.30241030492723,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-1.9601341837524529,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-2.266709045821773,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-1.9815998129211139,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-3.182156809235952,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-3.7486824411011614,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-2.0135310600568976,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-2.35554023277745,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.604772288549963,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-3.7400619943798,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-3.909693848884693,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.2917655913023134,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-1.252965876301647,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-3.4590619300691205,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.8943557759120602,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.3354821041698166,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.0023730112591953,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.6669905817529977,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-3.8654549690155227,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-0.9528853441363206,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-8.395085903344876,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-2.241579517541763,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.6951147751150895,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.9991442770182315,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-2.9014560671213854,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-0.913151746578685,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.9393068470934942,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-2.6990890489206807,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-4.383309354129223,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.9405281504834977,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-7.711357221889984,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-3.214089016171303,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-2.159653859710726,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-1.6344256408241522,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-2.933132517382972,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.8566719139966223,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-1.807848110330068,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.517376977078577,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-1.9333113242257403,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-2.1257369900901955,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-4.1085461653375335,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-4.3349239459342614,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.7007341769299478,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-3.1191140632825016,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-3.5663470711835585,\"sequenceIndex\":254}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.3942427248938064,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.4042725160332574,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-0.4031332489384645,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.4248673737471514,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.49724345653606794,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-0.4117999317655414,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.5515701402617614,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-0.4458177817342104,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-0.5823579383232954,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.5389107500411541,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.5234519983749363,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-0.4151448053525711,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-0.5180006085049377,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-0.693230875418893,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-0.7728328013408399,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-0.49165456784642136,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-0.4864262087994201,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-0.6146121289590856,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.023549216901151,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-0.9452051543542427,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-0.5562081896891584,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-0.7585134594089914,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-0.8723029890414317,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-0.45717589131167574,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-0.4306874275137363,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-0.6448811390127112,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-0.8614681491332961,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-0.7262033433729348,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-1.046289713440814,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-1.0315060444934334,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-1.241184501886098,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-0.6147017733342368,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.9335729949908398,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-1.4929513073022094,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.8504298479694588,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.6409484564986658,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.8683435171945882,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.3252962110315236,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.1060749431786654,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.1909012954178932,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.2335539644463676,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-0.8232400445706083,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-0.8257457947621415,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-0.9745964511171992,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.1027673672654497,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.3367209756020972,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-1.174392224476621,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.656078267803931,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.01856811964963,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.0468995189392591,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-0.9051109527322432,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-0.8978783104900538,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-0.9596981022017028,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.133590472669345,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.763553441398202,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.8592812086592356,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.184731946675002,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.4699083793895873,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.1794116209750456,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-1.0322396989139246,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-1.059174573112066,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.286550489542196,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-1.4091090826463857,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-0.787229759982829,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-2.184311055324552,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-2.78360259751831,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-1.4447277286364808,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-3.0803113082381857,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-7.4858338480328985,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-1.1924639194540714,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-3.5312811638752457,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-4.61851614247996,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.5490751026019975,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-1.8710750969672574,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-3.7651146174928902,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-2.869141583177973,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-1.8398028957671668,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.7107495591686424,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-2.009435924951885,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-1.3206774832842743,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.23625749360816,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-8.221135044513039,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-2.3824955053212986,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.2618462216439241,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-3.0208050942190816,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-2.8302012159619,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-1.2380307074299786,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.262383014229145,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-2.5160268265881522,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-1.1461067387936843,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-2.371784224359063,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.7720465554855762,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-4.235063989170059,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-3.452916642684486,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.8658547344858387,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-1.6862344813661352,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-1.609419878622262,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-2.025849253186489,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.8861538644343543,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-3.6822400539999793,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-2.5736759452572406,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.559464439092824,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.2305979076148819,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.5809875507005888,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.422821607786243,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-2.1598997646648823,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-3.7874958678888277,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-2.932436387446123,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.2078530247588337,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.916378889825776,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-2.16860727925941,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-1.5327423121650618,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-1.5883825185981868,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-1.7174053170964834,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.5645415863275716,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.9324304225902686,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-3.093401044899174,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-2.732499474212359,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-3.2895553950687093,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.4819404632848447,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-2.4061570181549423,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-1.4156953884466694,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-1.900019174582312,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-3.1764660858300706,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-2.051904936803954,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-3.0294094644491,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-2.7586771610933547,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-0.8196283889377628,\"sequenceIndex\":253}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.3745367531639623,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-0.37877978048338945,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-0.3866575265633535,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.38074600699032013,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-0.3895194537143277,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-0.410013443232726,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.42562278036230344,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-0.48882920075452674,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-0.6004133546638947,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-0.4085594000826217,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.5199605786938322,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-0.43502887228350084,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-0.8203405997387597,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.4314203402514819,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-0.4638653741264149,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-0.5465998651769503,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.0472446394027142,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-0.6470191534135813,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-1.2673135743122983,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.6726014098392259,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.4335958898935595,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-0.6735191209332574,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-0.5336055367778775,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.7237510415496837,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-0.4943594731410316,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-0.8651535807853445,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.9062460455059667,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-0.9474510905870214,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.4699322890232057,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-0.6424683303072316,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-0.8910027646512361,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-0.8781700438023343,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-2.8692977804274182,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.2136261992782689,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-2.1267850730439237,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.1868469116367208,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-1.1530702998878244,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.8801301069372478,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.2919932138493835,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.2412227640854396,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-2.5345474304374633,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-0.501822027671344,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-0.9487670281929158,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-0.7680157053767822,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.8498176978085695,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-0.6260014036404236,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-1.0367944283556563,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.454500440368106,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.0061750906199864,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-0.6452497103602544,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-0.73012721888952,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.3263358405590195,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-1.400695527403614,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-0.9138764966253813,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.722458255359197,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-1.361150941093798,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.0660561860120379,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.7220116811799626,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-1.428248134564843,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-2.055165014235044,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-1.0320686126003955,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-1.3100290860868042,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-1.1887029824643782,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-1.2263810708800624,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-1.9625081481376203,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-5.334687569624574,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-3.2699779285523998,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-2.830816837621441,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-1.323305273171099,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-5.661370528075809,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-5.111014563752955,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.979195585599677,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-3.236502864208798,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-3.7074041277911505,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-2.6342869152543087,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-5.772923028687533,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-2.0532623897277236,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-1.9429028472685381,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-2.160977549784784,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-2.41317423440498,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.5380935359527954,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-2.5932486493498828,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-2.651336940861381,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-2.087569891662539,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-6.83310242504874,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.8992992050471766,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-1.7376934942541404,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.7967052404358608,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.872316811115721,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-1.4768499302144764,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.7644095289774624,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.2665872657297679,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-1.9350074408696945,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.8527898647382832,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.411576500866859,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-1.7018314340724687,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-1.538824911782397,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-2.413471762211262,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-3.215480858041949,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-1.0082653509063784,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-1.037641324613221,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-1.5010261890900773,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.5516026242389274,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-2.976061467939254,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-2.945755418570011,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-1.440502505926924,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-1.919890073556987,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-2.0594164628269587,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.349990348099767,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.8020733996952494,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-2.9971363182863247,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-2.2218181439455975,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-2.9411722734755057,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.5820831542489464,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.4376031523923962,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-2.0697523473026997,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-0.8060669121521558,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-3.7008991108145652,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-3.3615471392694625,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-3.2199804034812924,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-2.063867918830157,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-3.026136287885627,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.076649947944664,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-2.4408831774205084,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-2.2488804459499314,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-2.428435705379277,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.9007481380406148,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.6914540893997076,\"sequenceIndex\":253}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.41838550379824685,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-0.4225232620532363,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.5458623673305161,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.440290019709832,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-0.4462554509114506,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.5575096128224734,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-0.589353352142135,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-0.4644901179168558,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.5358813573532822,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-0.6822493095503841,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-0.6456115541618354,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.6797706048633014,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.5713944950198999,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-0.6066453472743127,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-0.6153336133132159,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-0.4742214380701076,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-0.5485109446972981,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-0.6822214206327576,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-0.8258739991579688,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-1.0849538120813116,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-1.2933332938752884,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-0.7691352751090798,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-0.7961299301584364,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-0.7448673905112919,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-0.7523717769030436,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.8056383290753033,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-0.7724538107220646,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-0.7364803138593382,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-0.8566346235362479,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.7615456530180789,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-0.7473313060228517,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-0.5111926579397474,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.0980227007281955,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-0.7808676618557331,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-0.8741755328977605,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-0.9015939099493882,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-0.8711946236770283,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-0.9164816478096371,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-1.1894903956480227,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-1.1188615479311899,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.3776166054386196,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-2.437189822979597,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.5095437350519867,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-0.9859140329343965,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-1.7216809161890398,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-0.9199489753062121,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.9799334669119212,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-1.4774969123746513,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-1.4176731533464741,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.7690440085045831,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-0.8661113313587943,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.4126156756883166,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-1.6977487109362226,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-0.7923269153389717,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.8109493583420584,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-1.8875672778228212,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.8206980215368391,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.1713228551585004,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-0.909746276632225,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-0.762364924343923,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.8996920612968247,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-1.3774224772822559,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.9761422751775989,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-0.9018400800821313,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-2.0804881505691815,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-1.1263528067939719,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-4.751211145233858,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-1.6504039687820673,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-2.7115755487110413,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.6163036794499503,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-1.039650743568114,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.323212101353041,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-3.4488018562640885,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-1.250333720351577,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-1.0915392706309925,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-2.6642871842345945,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-3.457727730100165,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-2.540460221084745,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-2.0914951671752235,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.128008697752501,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-4.6277304684209035,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-3.6925348359430905,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-2.5453653501986655,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-3.0921252637212833,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-3.119365481312482,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-1.5312566771856495,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.748947023581692,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.567081790181768,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-1.2935113522053503,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-2.2852729745244216,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.2952316566320627,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-1.10977352280912,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-1.126356411622563,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-4.386698711374045,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.4592965294341962,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-2.625818019837613,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.7614236294532615,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-3.8385350974891326,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-2.0754303658889754,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-2.6399716150219596,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-5.205584774989032,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-1.3557736024558686,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.9972368006278618,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-1.5529374171545067,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-2.45452974178186,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-2.551090937076561,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-2.226810831502148,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.8035396406463245,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-1.4049810345189315,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-1.3961530439972936,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-4.19398002120294,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-2.970030755361144,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-1.972174506262721,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-1.1140159354403636,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-2.2543674663474516,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-2.139112676550789,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-3.360206480167664,\"sequenceIndex\":254},{\"point\":[0.730967787376657],\"weight\":-2.294739589415878,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.8122597064115298,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-2.453545247725982,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-1.6839120204394413,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-1.9686542979337376,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-2.1007321854747265,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-3.899233852145424,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-1.553860120416265,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-4.082598151203524,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-3.9153261401703294,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.9988351944691639,\"sequenceIndex\":252}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5656921098533466,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-0.5736699983755305,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.5970976317951886,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-0.6010938496959839,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-0.6003176245756465,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-0.5977588993197622,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.6117451860416946,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-0.614074628470778,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-0.6776383832139771,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-0.7741493267767349,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-0.717082382980193,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-0.7442769417015882,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.6131957600806823,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-0.6168054605165058,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-0.683662465632388,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.6268568658866474,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.7050786130782036,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.0156829898970225,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.9899039765665348,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-1.1718958274261029,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.9954565850603413,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-0.8108783216932964,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-0.747320920643402,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-0.9740092420594945,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.7884782678004076,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-0.6662114687420471,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-0.648795252227527,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.7053812017730513,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-0.9606626553083641,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-0.6965978924925147,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-0.7653520712504159,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.6676278568725179,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-1.0538063914547726,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-1.2752424317162336,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-0.9681127313608838,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.1007444286198425,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-1.311171673937453,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.7128361123385918,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-0.9944290728440663,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-1.6704869950278378,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.4142881148540378,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-1.0677593340615175,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.0516177413909489,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.4586809190319239,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.9692829469039261,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-0.9152665347281144,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-0.7541353290169365,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-1.2758284324803708,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.261252560651491,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-1.369012586977198,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-0.9732950576062757,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-1.407921660789241,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.111398745686547,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-0.868095734459801,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-0.762498054762816,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.743391016284396,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.8723312680071155,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-1.5453949410634549,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.0960827762337249,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-0.7932313689653547,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-2.728198692810048,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.944600870835263,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.2675144136438241,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-0.8166852891892733,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-2.1057046246564664,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.2366165771793083,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-2.159905045029235,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-3.4582820898218376,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-2.7066297040799774,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-2.510198905444525,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.085492644615235,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.3278226051344535,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-3.431186109495991,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-2.294806384368148,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-1.4108034492867334,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-1.738410320657588,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-3.6235510086228957,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-3.9479206115030645,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-2.838746697049354,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-3.5923656097297587,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-1.7255886124586144,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-2.2237588809154496,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-1.6100069173512694,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-3.335961409697231,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-3.8093345812025374,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-2.015589152207231,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-1.785834643373611,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.7438229724127239,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.8997131536718688,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.9844982030856861,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.015368032184167,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.9182195211664574,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-1.1118783411071498,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.8612409267913705,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-3.3866127129616466,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-2.3337278109293855,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-2.1435544229354435,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-1.8958543700298525,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-2.3112096359465477,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-2.218899018691928,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-2.460314815261322,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-3.728663638871481,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-3.2846501228847433,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-1.6735591633291222,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-2.772359937312368,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-1.4038215846287816,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-1.4447132720433036,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.541051405595043,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.7021425475987217,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-3.547958594316238,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.3179385483114288,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-1.9429931647354697,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.9092781057791215,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-3.258028768235679,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.5615861125262778,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-1.9042528580634817,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-1.7106004424867165,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.5632290914349152,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.3777318053235523,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-6.436342737377894,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-1.8151493634535198,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-5.766785847524656,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-2.9801206798449047,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-2.853870235886857,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.3288896711915825,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-1.3614859829238684,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-2.330823242851595,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.1981023495301846,\"sequenceIndex\":244}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.3979004303302931,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.42110051135299487,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-0.432072079229562,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-0.4772164450459,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-0.4595473506779807,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.4713679959507915,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-0.4713136366225582,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-0.5965745082303615,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-0.4949040894807297,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.5974382735660391,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-0.5158330156858522,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-0.5117370261595496,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.7745822131216803,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-0.5818824508971533,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.574881270932269,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.6855161098007567,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-0.7708399594578343,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-0.6741984388592762,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.49977630350812285,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-0.9944713377835025,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.6394381220170593,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.6252239722903458,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-0.8192053210808831,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-0.603450072468302,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-0.582192735673142,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-0.8848868048688218,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-0.8724754940558181,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.7556672665090586,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-0.6186955426832307,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.8320592669628364,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.6965719294292367,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.6962189510277936,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-0.9834786253975808,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-1.6001190046293554,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.1047104308695939,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.5629740843772701,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-1.0969345429412543,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-0.8699316812072716,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.0468348580067022,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-1.6876562501551347,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-1.2225577666136955,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-0.6677404918930775,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-0.6668369734219141,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.7228738332927922,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.2621866429683644,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-0.945407495495765,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.8742499832712327,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-0.875491920925412,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-0.8571952746689382,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.901318326155882,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-0.8346868823077689,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-1.5909370961093492,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.12703483149762,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-1.3067702705580897,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.5803521959340934,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-0.7945548374840099,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.9317565110787419,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.3280577459054195,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-1.4279609992564333,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.8806428595342358,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.0924166526542147,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.8960950764792546,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-1.0592409991463505,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-0.955385455505203,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-1.0214027808939117,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-1.752924806129832,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-2.148841432237284,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-2.6963990758963883,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.9938547482905526,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-3.41996607176652,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-2.5270762480304048,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-2.131937095145985,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-3.1942866048970857,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-2.038118041787221,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-7.20640254023289,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.5004195923811972,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-1.346346421326581,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.5111524277996766,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-1.1807758983271366,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-1.7035039976169173,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-4.4901806567231475,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-2.3714913752299016,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-1.5980468929981966,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.2625829423643455,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-1.2471481889209146,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-0.9956615910088513,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-2.0427841781516687,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.5192732806406097,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-1.9478589400262896,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.4950692518225535,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-3.9015500953619533,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-2.204410178942197,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-1.3667761664770188,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-1.1539944354930676,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-2.344925896492984,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-3.353689292244069,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.5330797058353156,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-2.1926726030998114,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-3.235261214462398,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-2.28485646956944,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-2.1682204568954644,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-2.3211573580307863,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-1.4678808837165913,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-2.5907853542156936,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.67734819039643,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-4.332775314453334,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.346035557848181,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-3.1330415236820612,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-2.351574272577085,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-2.4017821789778995,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-2.1995326924149143,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.2284392575473988,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-0.8638438881575636,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.174632855487931,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-0.9607519762285063,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-4.210752760550772,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-2.297502856688658,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-3.5355369933813043,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-2.4249899848120102,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-1.4693052061947043,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-7.750364832214417,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-2.362055845931037,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-2.17212465746521,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-3.251556028976327,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.424300615989522,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-2.2811916623370725,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-1.3073672886632108,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-2.2741418026509828,\"sequenceIndex\":255}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.43146758893534326,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.43557429034355205,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-0.43579843585184486,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.4397582914803788,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.512571718354482,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-0.5217539025912886,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-0.44523391094616505,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-0.5475509931081446,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-0.7961112973521325,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-0.7444592709842324,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.5237357720926554,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.5741730326827027,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.6364227265567481,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.5404549061879133,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-0.47532960799358615,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-0.5499986033768038,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-0.8314886938408989,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.2478552870924284,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-0.855680831113282,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.8636390195993987,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-0.8645845849528438,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-0.8755046967400183,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-0.947282983146368,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.5782380592445479,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.9288495488801758,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-0.6917525221726755,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.6997420111772198,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-0.671007898438596,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-0.9817788988428946,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-1.0198931917076965,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.5018714588808663,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.6486044348700184,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-0.9354812269112043,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-0.8485252594296395,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-1.2398658703632062,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.7878331906696463,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-1.5382383364259857,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-0.944280203506797,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.5191248425465418,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.4421695323365806,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.4978270934938764,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.012587985529494,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.1268380048062832,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-1.0766926281514735,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-1.3080859203651838,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.4113282894742023,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.368809815986762,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.3806948286359886,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-0.8844224275389612,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-0.9555308173333571,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-1.0527643708738217,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-1.2254077557246803,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-1.3090878194581146,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.7783149920332701,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-1.0895953687585689,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.0813357918849698,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-1.264783616250924,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.0462801854491435,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.6904792095689085,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-1.6606390875191452,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-1.0621109288071795,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-0.9602193646067033,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.705122692538227,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.6746348334348757,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-1.9921750528106763,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.2390318998211738,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-3.5815095072529872,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-2.7995423422137122,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-0.8962782752247748,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-1.2794323578529414,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.3064982202725528,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-2.7415669456500433,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-2.395933237667176,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-2.4073667967351864,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-1.9978248489719568,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-1.7047302706360608,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-1.2707714518412112,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-2.774930319435379,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-3.135649437553182,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-2.665378982978812,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-2.576264404095832,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.5135554563582352,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-2.798876677555753,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-2.1427337043639723,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.4113321062357487,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.9229764427385319,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-2.820169521671969,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-2.2732157066142276,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-3.5423014119262692,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-1.8198578826736882,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.5386420154928093,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.6568536824043778,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.7375816414740926,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.9070412017997278,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-3.1443981487224697,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.2396013830819417,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-3.0594646600299638,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-5.223976682762646,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-2.9494145469931117,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-2.046062473540764,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-4.541520926120505,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-2.1283835609183672,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-2.5760520913687985,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-1.6134115942883618,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.4638440613441213,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-2.0369841642012974,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-3.331931561948335,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-3.4892510197978717,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-1.9867373843726694,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-2.1490986376227066,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-3.3006728428173857,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-2.26935484480584,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.1981617691568691,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-2.151157274248079,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.290834136451234,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.267548261015881,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-5.397343591307884,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-3.3943944189931767,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-2.1478382893980976,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-2.140847002833952,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-2.0474000822824414,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.3185312276333938,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-2.360756467964179,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-2.0257346459123227,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-1.124625706275347,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-2.483810465104045,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-2.9020802121262603,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-2.587806609563866,\"sequenceIndex\":253}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5985264608618716,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-0.615465415186054,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-0.625204706664254,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-0.6607211807536036,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-0.733160628441427,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.7480832721878428,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.6325616318481434,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.7193981196282556,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-0.6885276533420687,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-0.8056386432673962,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.7362779855939124,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-0.7815833847203907,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-0.7769513245185988,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.8131585148052826,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-0.6435557257986289,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-0.8668465474750551,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-0.7658044113572764,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.1130536934540707,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.3264814615748264,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.0225278522462662,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-0.9380905114169368,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-0.8464625795580908,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.7810523086260113,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-0.8321548830326747,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.9972875472498358,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.2028658013867521,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-0.8593616213501847,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-0.9311374707500794,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.8245151615771091,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-1.0701641033028275,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-0.8153430225018521,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-0.9397506564791762,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.1523355310972445,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-2.316954632148596,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.8314474044923728,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-1.326711152224425,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-1.2134507405359918,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-1.626959895492517,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.5162848218519063,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.2772461870862388,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.1080125965525818,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-1.2998345738957846,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.9812996271662214,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-2.2278572984856244,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-1.1219028418402184,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-1.6718262150903234,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.8092366561749608,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.7042290920001961,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.1284334029091072,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.7484122763210357,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.3886637751988407,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-1.4217981910445874,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-1.5373913934401326,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.3989444547248966,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.8074422437180981,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-1.679420992783001,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.9511251892246928,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.2371073862759325,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.2344317469294006,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.1589815376608552,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-1.2631471501920901,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.8304423409650232,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-1.1564083342852114,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.036703450288186,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.322122992816131,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-3.145799497288541,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-1.691756943600974,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-3.211854154948815,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-3.248461775753026,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-2.719806894941148,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-5.312553852349928,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-2.268553294124917,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-2.1120999390493895,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.3561565828717814,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.3604758053002233,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-1.8024687103424943,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.7300277061884552,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-4.1307798278778805,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-2.078669677058207,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-2.588885942101912,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-1.7364668927308655,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.6507344879517971,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-1.5287782808620034,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-2.2114927391136256,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-1.3599906458023305,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.0342693440838253,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-4.79201572030283,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-4.842644249601919,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-2.676810673238077,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.564581587789824,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.6567079533794447,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.7954119545786797,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-2.264780282394174,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-0.966199857303043,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.5679483913216696,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-1.910152645490697,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-2.2927364970699338,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-2.120325437765966,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.3703646844968707,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-2.3943631962318888,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-2.053515729627891,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-3.759263291488838,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-1.506369481053324,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-3.030206356761602,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.9131448631241217,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-2.196162489799756,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-3.052886608795893,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-1.7886503777424005,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-2.248530297075843,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.9429986841137883,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-3.248926202319391,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-2.6390378660572513,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-3.166156732978181,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-2.4512067116069063,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.450951925842845,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-1.3142009826350793,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-11.100537658413634,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.809087501964695,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-1.359180667725009,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-3.583521083746113,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-2.575616959010276,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-2.2581780572127714,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-1.852358522657793,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-3.1584149942851614,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-2.5294921040588036,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.6855953759777917,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-1.7149246688464164,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-2.148210641758875,\"sequenceIndex\":253}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5283790698673042,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.5832034273251664,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-0.5474141953972866,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.5922845535196044,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-0.6150736460452414,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-0.5531826654629085,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.5866327815696623,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-0.6108585289498049,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.6453437745690954,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-0.8362568824620145,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-0.710532486879776,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-0.6155600216231422,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.7510778596262119,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-0.630079658220941,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.588122740722473,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-0.6215114902853656,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-1.4202925291514323,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.1377901907912291,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.8121381866448434,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.8491147178382328,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.2464890600428453,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-0.7846918712032219,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-0.7789084079157159,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.1579854162006977,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-0.8115611112138372,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-0.7690804551726946,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.7885371679085424,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.6855211333101923,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.6995709990840997,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-0.7235407934970841,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-0.7087118675377284,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.6608240853459527,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.3220963866287594,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-1.670563651323393,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-1.5628511063673614,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.4321612217549893,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.1440136571683723,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-0.965831115718165,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-0.9509275092884122,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.2868413359303235,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.1078335397778052,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.9603082104295482,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-1.4989088299614424,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-2.8795992951804763,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.8005241626249846,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-0.9110028293271484,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-1.5500177128238206,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-2.0821876575951728,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.6710969228000347,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-1.1464749795360702,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.6756244789012817,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-1.2624159202767733,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-1.0850305976783523,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-1.7245529533088575,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.3951111831950997,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.40470422374607,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1160360650816148,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.9253243289840702,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-0.7378272130969655,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-0.7346795097456673,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-0.7764133410508264,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-3.8429264216929373,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.0385050701051306,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.6969834762685099,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-6.35699953839354,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.4634656200358822,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-2.4045250577802393,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-1.7517765107202783,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-3.3897140111859247,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-1.8441584328862697,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-1.6854919260764845,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-4.961745468835444,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.5910279832906564,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-3.7924850897350972,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-1.3570287941044237,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-5.607516119649087,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.9895798635165223,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.3683304133117626,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-1.4336565162979449,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-2.8068722546750946,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-2.2797193301013707,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-1.2387948278325365,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.1833709647635144,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-2.0485392361363637,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-4.071480076750943,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.875743005891659,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-2.6318139228367685,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-3.9948897800585685,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-3.2064866362247155,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-3.5634880439089582,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.522650888350291,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-1.8379536503428568,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-4.6798758905911875,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-1.6128456536202669,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-2.071941594450168,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-2.303329756547118,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-2.215541768606544,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-4.8357711554975715,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-5.468685178714277,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-3.277169964918438,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-1.2704088520641788,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-2.1309822209848033,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-2.7617031898399165,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-1.4610545792647693,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-4.060998916292974,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-1.3869224350790204,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-2.3938442920911585,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-2.723964835607138,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-2.7869538457534277,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-3.360745478355683,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.6834853610196907,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-4.836005092709841,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.9652209758734123,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-2.7790992880483327,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.8639301357713143,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-2.288098088367965,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-2.932267203238342,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.883441085035591,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-4.067094925776925,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-1.6940279892104295,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.8517648161034913,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-1.2987935805571151,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.7511471803591436,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-4.634113632224792,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-6.332423883698831,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-1.3315021861810112,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-2.0291078395835487,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-2.1277967895810583,\"sequenceIndex\":255}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.4564533112186506,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-0.46076263231395503,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-0.4638642318769172,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-0.4771009023087702,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-0.47794244150779686,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.5691869931949881,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-0.5284267436149688,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.48247089897299916,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-0.5981169578320444,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-0.48995281834204857,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.7888379259557037,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-0.6539124522733537,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.9229577091178605,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-0.6560768309092694,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.6965767748338334,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.6683590377082833,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.7190922802740541,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-0.8365191910529775,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.9440887130697458,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-0.5709090632791352,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.5230506554459252,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-1.003569663850387,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.9909075003322744,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.8687512241484613,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.6838907307203738,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.000771572910703,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-1.1725959951853997,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.9998943682854018,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-0.6718063831670658,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-0.7470397446967049,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-0.7060770599636623,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-0.6965812564686975,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-1.3379180910309416,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-0.9999315273551973,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-0.835304219597804,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.010748853150331,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-0.9954971666772627,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-1.0753536667749999,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-1.4279460944197442,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.758448406610987,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.0231410374115144,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-0.8206826560081675,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-0.5670268460862107,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1531211935539862,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.2523429784552362,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-1.0672006868947967,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-1.0342451591852915,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-1.0003836575468563,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-1.1212611072552192,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-0.8362438309608461,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.5331948861981242,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.0015736989294703,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-1.257712164038836,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.4707661135109953,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.329963082561126,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.9192588514465831,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-2.047826702649652,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-0.6927443623430289,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-1.0571057310106935,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-1.4488410506610538,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.307820826668597,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-0.7466049227419639,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-0.8905802766076513,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.3540558918728627,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-2.6627978525894083,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-2.2138853015899898,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-2.298380471841719,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-1.778870119993746,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-4.567756325652653,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-3.2026137158619292,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-3.496362043851107,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.4313134861413737,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-1.5986853152754252,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-2.156429128403296,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-3.0263784188635987,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-1.3434756092039075,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-2.7272800135435435,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.9058758858766665,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-2.2716461651090327,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-3.378197777795787,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-3.0994294339709754,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-6.98561347392969,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-1.5685021213353956,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-2.963071690967249,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-4.474169277217052,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.3244400617001664,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-1.8565117523050025,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-4.8482732751269655,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-4.513852599658036,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-4.080305328414731,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-4.380216405083996,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-2.1493544636885256,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-2.3228992772203063,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-1.1258082712982536,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-2.7471961804219927,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-3.0821017390571432,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.6010361781647608,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-2.488826856392909,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-2.3367415898110298,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.5919171700470434,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-1.1759636015385062,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-3.0635118465528874,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-3.8979262408845794,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-1.4184718250282926,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-2.571767385957578,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-3.4902256424972267,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-1.3856735738634032,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-5.5186087251029265,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-2.7563269982628293,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-1.6736753331100376,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-1.8607495732673476,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-3.1886520307463404,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-2.1157494441437277,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-3.3210939758322247,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-2.8726538277159386,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.762760011335502,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.6067843373500255,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-1.2261073809782934,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-3.1583072685325506,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-2.367905526257128,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-2.7174447286017602,\"sequenceIndex\":254},{\"point\":[0.730967787376657],\"weight\":-2.0887604099699466,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-3.5224944463821783,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-2.1093448345704444,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-2.146547906123566,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-2.4757461272612784,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-1.57077133974567,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-1.5208190122833167,\"sequenceIndex\":255}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5318187098873884,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-0.5654904614818087,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.533270456609582,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-0.6103578414243609,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-0.5658440270155903,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-0.6001247551108103,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.5523562300949072,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-0.6165369745575265,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-0.7447277497372076,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-0.6166447328044925,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-0.6486197556595551,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.6290561685252707,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-0.6471219635422167,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.5945380249207219,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-0.8448245718399776,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-0.6166307073318523,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.6738327393282543,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-0.7789628741625582,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.7749481640986031,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-0.6246686799685517,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-0.8688437842905221,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-0.8413489463829863,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-0.694841508708282,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-0.8036711210127871,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-0.633471401476135,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-0.8855730309301386,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-0.7061334644443285,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-0.9182983491062298,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-0.6816110104554199,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-1.0659067991610571,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.8815000564995018,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.7575890221165628,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-1.5546827256702263,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-1.0280447544235234,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-2.039539637646523,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-1.3187913917852476,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.3415373141140017,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-0.9735587648188142,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-0.792519866380562,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-1.2478676467439191,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-0.8919827427300476,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.8922528329822433,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.9497752916919143,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-1.3706554959479065,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-1.1307496855716188,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.8546679880879613,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.8378194405718111,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.644839363732581,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-0.9964633257296456,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.8883569201364916,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-0.687548544220937,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-1.6049262437777745,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-1.7492133810614596,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-0.795179344301267,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-1.3272673411277478,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.196686281716323,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.4018390531398686,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-0.7764731370838394,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-1.4238734755903029,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-1.1037862898550883,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-1.114646830645598,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-1.2848157669210358,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.2129485931720998,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-1.035913781126804,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-2.5418062428343386,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-2.8732071231532967,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-1.8266369956472803,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-2.2150010826434716,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-3.094628062141695,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-2.562062971558999,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-3.0487327596259233,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-2.325190736336864,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-5.229484784928418,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-2.1374409445864595,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-2.374484796755332,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.69808309369887,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-1.464948153470899,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-2.4258450238192797,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-1.1484421444778685,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-1.686396918823977,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-3.1482116215087026,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-2.504142570294506,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.272799498023884,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-4.468258052541071,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-2.890995241950525,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-1.3237333777706186,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-2.4418159743793493,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-3.3702626213576017,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-2.39382243476584,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-1.7126654585719776,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-2.155226822779673,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.8610169376804342,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-2.9730406586984572,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.6360892046088826,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-2.5596126561977077,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-2.2733809133032943,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-1.6975984125206027,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-2.440600214527288,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.967656617712802,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-2.084122543511944,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-0.9376219294034867,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-3.9207645699585427,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-1.6713603378875577,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.6936701840197852,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-2.612725396403457,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-2.1100697253074143,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-1.886622399333592,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-1.3031636990970743,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-1.128643302146683,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.569916092183773,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-3.0320690142737985,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-2.3845692170746533,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-3.763823050368068,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-1.6161988773486655,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-2.7911731425990416,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-1.3686698087977756,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-0.9275238833774407,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-2.015708988256981,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-1.4976500116594296,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-2.09859462162153,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-2.9267501629240917,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-2.716211936806261,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-1.5411255083283577,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-1.85545803753371,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-1.9405490108651162,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.736027051201294,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.4378933858459464,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.1995487230563042,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.3438148465361525,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.40256283262134296,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-0.3448981417461847,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-0.4676022121999738,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-0.5579349154193868,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.3879423258599878,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-0.35239037498664555,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-0.473846629865774,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-0.597570569571043,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-0.7326715596699891,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-0.8096260909588213,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-0.5092072477673558,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.4778477930575075,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-0.382368760757008,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-0.5366555338732443,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.5575919135325039,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-0.9477344182198841,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-0.8558885658357751,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-0.7930792976884375,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-0.9237541246817846,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-0.8607786803974451,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-0.8133282474589802,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.2031497698758342,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-0.6480489618745867,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-1.0323245143212925,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-0.5294206686322864,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-1.1834353980757137,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-0.6737984970195576,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-0.7623414563824161,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-0.6371784036969197,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.6304918152038705,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.6807530101585866,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-1.0366189504259227,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.1967142394461303,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.5111367158954252,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.1232297311371753,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.2556348998043156,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-0.8341757444767626,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-1.2961935698116283,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.154503288081514,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-0.963725749466751,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-0.8797938314075873,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.1748981203257358,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-0.8386252196693483,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-1.2668316406345985,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.042371558438259,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-1.8698895136103657,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-1.3517448278297186,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-0.7701745073185721,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-1.1511600702206355,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.2334088961519125,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-1.1608043028973976,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-0.803838323813835,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.2670795251900904,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.872344982470206,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-0.7801907886711229,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.8705286645973387,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-0.889031907749118,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.9162734031523211,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.0852716552114023,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-1.0035958260595395,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.036787672581167,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-1.5994442119491956,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-0.6994415620744231,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-2.509168714923339,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-1.3051564634417543,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-4.569617658135123,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.4057061283402454,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.298339772596517,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.689284124523941,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-1.6367216484219196,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.2397914568545862,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-2.8129772472154912,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-4.154682816367456,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-5.774168363749988,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.4043035369463437,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-1.4022653879706128,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-3.0160314741430176,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-2.7370000607028,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-1.181222090081106,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-2.825509146528255,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-3.1427356349219586,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-2.057188844198273,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.5979508976294063,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-2.384911123193851,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-2.4934656488368314,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-2.0794105823147846,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-0.8430570947558428,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-1.583062071631961,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-1.900950525735522,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.6603233291087203,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-4.910682080495397,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-2.2331721883940827,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-3.313339468991698,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-3.4956878699882674,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.8769343382557342,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-1.7681824933718822,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-2.4188717619339917,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.9105741696657654,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.3740658128830137,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.4948541685776218,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.5651442150174164,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-1.5718873531022508,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-1.6798837022342403,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.7207430232805285,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-1.5608348328237314,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1004703232411095,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-1.9525684074654577,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.438510931338322,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.9941504573983242,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-3.1562169664528077,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.9476434861602756,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-1.0172693645748159,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.470304568864497,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-0.9231873436940075,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.9394766789851574,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-1.1647747340890369,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.8414083779016315,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-2.0092709298621405,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-1.6478680983152354,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-2.1229378553555236,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.0367461927678534,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-1.701420350316869,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-2.7808506734564897,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-4.4168366371233,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-2.6649252496553375,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.613422341738511,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-0.7105703110872363,\"sequenceIndex\":254}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.2716899350411218,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-0.27471090367902873,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-0.28474961199644333,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-0.34190056081742404,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.3073717821143171,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-0.310153698446141,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-0.33071864648236327,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-0.5317753790258758,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.38298998466082745,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.3634377746489217,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-0.5452916459156779,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-0.446269489818724,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-0.3958006016449809,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-0.3496538769532813,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-0.5495885320083429,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-0.7460482142666371,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-0.8877956895145098,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-0.5420031671300687,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.43045694696942804,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.49682089432205834,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-0.4019460071743589,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-0.8934227495926454,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1219210858025719,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-0.5404408280131889,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.8828407118047868,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-0.8848169146818389,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-0.496637382937721,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.3528133814431951,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-0.4535326560139121,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.6494765117950774,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-0.6402464878769788,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.7776263331932095,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.2976801107208407,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-1.2250181322865736,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.3970541978955977,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.339749008305191,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.4168370402167227,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.0982510048403273,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-2.334412683527544,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-0.7590056433424044,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-1.2126301010478104,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-0.4450683648945088,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-0.8795919338904586,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-2.6301422886639068,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.229414130685538,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.3396731415585807,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.1570077925389652,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.1457336240183227,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-1.4489400158044805,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-2.841087678736005,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.2912340218137652,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-0.9221052599167883,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-1.3010633777968605,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.5963381519322952,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.1655046322772882,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-0.43382682410779094,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.9000989414068234,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.733084285204238,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-1.0875943767201786,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-2.112340444977423,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.8479416699586986,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.1210698654162197,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-1.1049256028337535,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.867139422833671,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-1.7350710436135974,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-2.6493083011703824,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-5.588738052160094,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-3.654055642618111,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.2310227012413808,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-2.560493765611929,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-1.555139695454966,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.9124098600764998,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-1.9888791818258242,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.5188343945323743,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-4.816220916877915,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-3.856026258108521,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-2.202249107983622,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-3.0422750392189397,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-2.421139408737896,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.9305782092429432,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-1.6441289574470639,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.8026557847183284,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.456445218093241,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-1.8747810921423125,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-1.4835190416406348,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-1.0767311709215643,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-1.2586131637198126,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-3.770279271931733,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-4.041535146904952,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-5.483209718710974,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-1.7072253436877005,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-3.970819599068188,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-2.0893185845712594,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-1.2449332995730615,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.4972302445133614,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-2.141028788118774,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-6.632511594166266,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.7902133214240274,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-1.5656657948819075,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-3.5933876294790745,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-2.857578204767013,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.9608528649801944,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-3.0254978228150295,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.3322232846651745,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.51375473204136,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-1.330047000108227,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-2.1802076442057663,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-1.1593269752909365,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.7648636586996097,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-2.5115543768850634,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.27667790654646,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.0718411355447786,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-3.0363298987587064,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.2057499758912003,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.5710286087182006,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-6.70591012588516,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-2.2699592728675615,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.3674582456069517,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-1.2063618123228732,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-4.193938687127718,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-4.402826781277982,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-2.1072066643501794,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-3.3322908197219046,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-2.5290929247160556,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-1.2386499699210565,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-2.417330811680178,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.2884548260619535,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.9123476014937866,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5147647719937322,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.5283045043338832,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-0.5301794071411086,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-0.5459412471684224,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.5304968657991089,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-0.6558637153914211,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-0.7201119157694705,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.5673972269710099,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.5539439159847406,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-0.5880273977465741,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.5440303028131699,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-0.7980082680205237,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-0.8624763603132237,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-0.842200327103271,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-0.7477394416112302,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.5846389440651368,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.9000250351095493,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.1211240498832515,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.6486528073565214,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.6492996416697495,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-0.6762704194746647,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-0.6911181019746376,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.640840823471894,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-0.8023287344540558,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-0.9757155691010011,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-0.9785916310865459,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-0.8634417256472278,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-0.9484551879209451,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.0277493948407341,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.8025700000083862,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-0.7691178187065626,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-0.6907399122081311,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.0570628943492693,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-1.0144494701882967,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.182910061940822,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-1.291433910487792,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.9050980550716798,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-0.9036272518434175,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-0.8746194508702475,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-0.6887278406352619,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-1.8852969360534115,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-1.1655968912590937,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-0.9037743983375113,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-0.8554559508283657,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.945375141782169,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-0.7269256019953815,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-0.9267525747094809,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-0.8750032997941084,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-0.8140166549099045,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.0659429767440116,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.6232140137194015,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.2271335074062415,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.0717201578742643,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.7849702203114561,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-1.3703683438861405,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-1.0796879805584816,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-1.32696675361515,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-1.1750446200609477,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-1.1974130841434427,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-1.0680507087240354,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-2.0279661906859925,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-0.9918801946474926,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-0.8324061377980361,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.7708417134493972,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-1.8403591605085388,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.5703839657874517,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-4.866508818245713,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.3608459882443613,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-3.9519005207184463,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-2.267569114545663,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.804104443078728,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-4.385155542852683,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-1.659339205387125,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-2.321457972378974,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-2.5125687485548727,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.9695514367540587,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.249398099381721,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.007872826195355,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-3.145354143379366,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.674090652614644,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.4827615952820108,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-3.0262552150210666,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-2.217402069188484,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-2.429425978256601,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-2.170058978762898,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-0.9199060946188189,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.8788074209348304,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.2263330314972996,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-2.246663098092042,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-2.626959597436669,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.7387982021231787,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.4795713492218079,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.7387448126209181,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-1.433630164537891,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-1.6533612855794144,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-2.426320475250804,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-2.2209639624838258,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-1.4549302841426746,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-5.119696697420786,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-3.33706935170092,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.2631793690351976,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-3.897893174750405,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-1.7493213692450575,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-2.765923899720794,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-1.5263332761387187,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-4.491854613536974,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-3.477032319873072,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-2.2868596953903757,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-2.0229346575525,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.1879126190055413,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-2.579144914690684,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-5.6941122127727075,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.3793929310962443,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-3.50383351206438,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.6157856139757218,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-2.605187563722744,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.6567209960520597,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-2.9201388177333136,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-2.5674465248393497,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-2.996222252373557,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-2.225072038821199,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-3.4796612100007493,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-2.9122328994501343,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-2.4235812742158536,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-1.389324845429176,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.0204056515358098,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-2.5885837818620074,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.8332993823852461,\"sequenceIndex\":252}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.46831573103336505,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-0.4920034507462027,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-0.5568696266116789,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-0.5757011045266621,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-0.5170729128448872,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.6242710590396826,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-0.5624495932890967,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-0.5798846711943635,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-0.6340021188577166,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.7323822192091085,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.6141063191751956,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-0.7916748412441669,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.7741123885540975,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-0.5667633330265399,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.5962698755084289,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-0.9528761784457509,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.7273114423574275,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.7808903841390626,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-0.7404356918152774,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.1224211478278021,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.0776841568623996,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-0.9061437470781566,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-0.7610392028917319,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-1.0842993072569231,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-0.8666301346042629,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.8040570484908357,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.7795984148829249,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-0.6516811613917103,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-0.8127116090453896,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-0.7093584355913547,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-0.7315408393443674,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.990745716444881,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-0.9904589714307233,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-0.7547046540621087,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-0.9385106684942083,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-0.9944755410185144,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.2051592415612744,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-1.2333357566281156,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.3329729300550026,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-1.1991340379887532,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.3274623806041033,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.526594462791338,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-1.3576831463085286,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.1523640029972193,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-1.0287535171907853,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-0.761861719035806,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-0.8111568744213442,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.1311925864317038,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-1.097091317483172,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.0128493895636976,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.221175508936959,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-0.8087087572152968,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.591378193264179,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-0.7808942450365118,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-0.7874354613361961,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-1.9243698599229264,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.6973614486878218,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-0.853648012412175,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.4664914455096558,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-2.098195218055375,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-1.102975773283685,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-0.8462130869136666,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-0.9629392583439209,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-2.7828599052255107,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-4.316338989747713,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-2.2571448459814674,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-3.142787110379834,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-1.3023983406778277,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-1.2586752798853267,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-1.6304311296262717,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-3.1512880784342827,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.8883655729979114,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-1.094508648297114,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-2.6395767472866987,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-1.3169353862467152,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-4.209492573005034,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-6.966352719021773,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.825492902675068,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-2.72830638791692,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.1452772095296053,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-1.3111867071046746,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-2.1790736634312675,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-3.365823533576492,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-2.770574762426797,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-1.5356215265628248,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.8120813790826056,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-3.610644731094732,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-6.813435066579621,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.511656105527198,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-1.825470850594686,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.7000217209135524,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-3.3686285770987894,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.994572511242888,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-2.738423980255016,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-2.2627373696668442,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.1398004384767948,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.8676583609310387,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-2.3619686556571415,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-2.671197674896997,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-2.2844582790878247,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.7985628567386853,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-3.688468167424617,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-1.280100076012047,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-3.1906490962657528,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-3.1274148634287844,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-4.071943096205934,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-3.4075466198009265,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-3.0987580846126246,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-1.4440908419918534,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.8397014301486523,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-3.339474020887816,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-3.8851832445691867,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-2.192563768798378,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.2421581667392063,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-2.854968984020898,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.3045322470782494,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-2.990184786917006,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-2.2793085009695484,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.8735816453745033,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-2.283254675313028,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-2.8871729847634615,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-2.0262227326310347,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-2.4103902158089165,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.875022827253578,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-1.2401553326566073,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.1350934188165602,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-1.2048634269913785,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-3.767252004310466,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.48161520252225043,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.5032685329174207,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-0.4884000061177509,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-0.5612824294877223,\"sequenceIndex\":254},{\"point\":[0.730967787376657],\"weight\":-0.7340001560739683,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-0.5805711655468995,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-0.532031115958577,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.5629952438385468,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-0.7103043125954556,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-0.7561521094213371,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-0.740343674331296,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-0.6532282241282985,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.6162847071446554,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-0.8571691384462168,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-0.5921971220724024,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-0.7332702674290201,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.6447086323545408,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.7459554239767379,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-0.8183494214455185,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-0.7863286277119447,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-0.7961769327121967,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-0.8209856787877501,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.747374578116906,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-0.720165978559799,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.9065798298949114,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-1.1051513360837184,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-0.6543418503396461,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-0.9069651144540471,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-0.8653889525774512,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.6297289101710448,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.7197958014646774,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-0.8365867455716735,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.9010951953767131,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-0.8247407561517901,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.508995179943234,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-0.8583829379498158,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.931658553962453,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.039312143204178,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-1.5077953150610535,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.7499332758349835,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-1.5400189365876056,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.9183540360116808,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-1.4948965513267536,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-0.869061732277029,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-0.9834801027191972,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-0.8338449943661358,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.8703062457144525,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-0.9455114066375463,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-2.127707148525387,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-1.0167957927184554,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.426122588410124,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-1.3798671872256478,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-2.1392673160233384,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.2155977639708777,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.780165000095329,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-2.1102177502129082,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.9166512104499234,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-1.6097913202984864,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.2526434919287859,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-0.708368837456199,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-0.6984741236374886,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-0.7794950871871834,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-1.1074471530781493,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.210343930727509,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-2.453876262808502,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.622290351597412,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-1.4570283650453468,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-2.6099936427346444,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-2.3835239787524745,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-2.0123673229507184,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-3.3450622098305525,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-2.4997800319582093,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-2.494157114019547,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-2.1762600438745534,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.99821282102994,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-2.7627267357770244,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.06999381496046,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-2.617651337113746,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-1.6553562737371077,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-2.535818247059016,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-4.2533990181830985,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-2.414376360893475,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-1.853203682553218,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-1.4890922557464112,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-1.3632031170730483,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-5.591559706493305,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-1.5092848473336438,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-3.139746241722752,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-2.326589993281474,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.4987892010689419,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.289085612763529,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-5.584373324895164,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-2.911114888709017,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-2.197738422171821,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-3.7491535356742225,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.9572861296104282,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-1.8780122752394994,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-4.9654814735371335,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-2.31153907572989,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-4.451371918918136,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-1.6122868201779776,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.441264309123618,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-5.407959713274903,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-4.179683383453015,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-2.2956649858956597,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-2.4019139697749092,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-2.7656334229662525,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.7889040302811825,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-1.2400768695586455,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.8718685124008376,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-3.2967194418376393,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-2.8845603827459825,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-2.3733140386304137,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.9857627490746395,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-1.637620455918089,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-2.0622464445758535,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-1.666144620035993,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-5.174222507737078,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-1.473497549215472,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-1.4222227825610019,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-4.9393525944147365,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-3.09228272824089,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-2.4419432501742913,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.2641785114451423,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-2.6650368389283385,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-1.7336330996232743,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-1.578192997070825,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.6072343150083284,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.564761493133161,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-0.5736218996531217,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-0.5886312930179589,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.6406822919065119,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-0.6641688590402863,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-0.6202622542037859,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-0.6261338882959768,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-0.6837026673726713,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-0.6857611291595339,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-0.7161014023709106,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-0.8232049145117503,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-0.7411832952110171,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-0.6408512953293359,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-0.741739416909683,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.6457641846548611,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-0.830720694209438,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-0.9084987518469034,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.1152633521871145,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-1.010528143883659,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.9396409429699507,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-0.8978971158578838,\"sequenceIndex\":86},{\"point\":[0.730967787376657],\"weight\":-1.1996475544061387,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.0061211010964777,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-0.8805802510385022,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.7544073345237722,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-0.6433345826785652,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.064542690433194,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.9657550277412527,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-1.1130551351121234,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-0.6905814499051617,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.8276001877469491,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-0.9753211246975901,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.6611511614705754,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-2.4994596003681147,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.666888357219219,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.1596169255482598,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.2332649691786004,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.2326513614053691,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.2647988428531252,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.2641040523242801,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.4133737030538003,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.206070796226922,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.1789259526817444,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-1.216445613642882,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-1.4385673021677678,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-1.2529956095863182,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-1.4662203824853897,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.809042114508578,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.4858286831159846,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-0.8350478514904276,\"sequenceIndex\":164},{\"point\":[0.730967787376657],\"weight\":-0.9224043534846053,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-1.022635784395301,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-0.680642798899195,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-1.1718475355134257,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-1.3871742751290854,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-1.0443462558788044,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.3370725796309237,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-1.6581055703805105,\"sequenceIndex\":29},{\"point\":[0.730967787376657],\"weight\":-1.1960966021851065,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-0.757589373987626,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-0.9379105730374425,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-0.8688877630359408,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-0.9316539018349295,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-1.8699046324247761,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-1.942613808041928,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-5.053511462688396,\"sequenceIndex\":169},{\"point\":[0.730967787376657],\"weight\":-2.1081025549085104,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-2.6638181493729305,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-4.018645605569034,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.8327622416721527,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-2.0210738123988805,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-2.3144293918346097,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-3.0442369061149597,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-2.2637702015913623,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-1.5218612147887414,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-1.5182182287422974,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-2.605927561279445,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-3.2076368646660964,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-2.738835295122725,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-3.063281559042889,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-3.2720552890460164,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-3.0557831697120124,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-3.4666187022413655,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.2777850173794876,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-2.235777063492217,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.9636558652840295,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-2.7416030136177834,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-1.5579905963300618,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-1.3820991889093708,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-1.8432425755273605,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.820639744047021,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-1.4338678527462445,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-2.240682849444293,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-2.1073199812905443,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-2.23725008333913,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-2.0611501528498373,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-2.1520834119033103,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-3.21255864792381,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-3.1772353411898595,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.9910509516716692,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-0.9642256254960733,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-3.649168002940256,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-6.199443881411274,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-5.783173271776237,\"sequenceIndex\":104},{\"point\":[0.730967787376657],\"weight\":-1.8974741961999595,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.4518357150958405,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-0.7809275614791581,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.4941055183554826,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-1.5822614103169086,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.853485564926993,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-3.1001348949246763,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.4864588476884966,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-4.614086458880863,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.891177171776994,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.3046585707700835,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-2.8807971645920247,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-5.59318356154028,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.711754664068602,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.3824063413914451,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.8172841610313254,\"sequenceIndex\":14},{\"point\":[0.730967787376657],\"weight\":-2.5296743437104667,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-0.9794216145780051,\"sequenceIndex\":139},{\"point\":[0.730967787376657],\"weight\":-4.573476915148771,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-0.9441447602810532,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.9996514892735204,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-1.3313592864440738,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-1.6820281598262337,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.9482856758938119,\"sequenceIndex\":252}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.4690226603242761,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-0.47391002574010566,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-0.47576934660910797,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-0.5118725950820997,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-0.5858666337155083,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-0.47676726189854296,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.5592716595811936,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-0.5867980782507415,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-0.6638495253565898,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.6070646006053623,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-0.6459072382000302,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-0.5005194912496422,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.7051162832111174,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-0.6141493047696998,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-0.616394044281684,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.6001402346121937,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-0.6313310517723464,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.263606187080282,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-0.8723508153187265,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.7784771009695204,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-0.8410340990668541,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-1.411335209016898,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-1.00483209125564,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.0043968809926567,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-1.3729508647418978,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.7062815953758743,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.7766955944600424,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.8073544426538425,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.1908414684423816,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.6875676049583614,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-1.096133864460894,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-0.6396097151375295,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-1.5187709601098565,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-0.8329598385700744,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-1.15632975799481,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-2.035380007204431,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.4820441223490732,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-1.037170324459352,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-2.244783043713239,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-0.8768985441843398,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.8965022546554009,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.0668373309933914,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-1.8958213817460277,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-2.467472064859864,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-1.593981432282716,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-1.037021780639619,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-2.232306587350207,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-1.496704044194034,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.129411470185101,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.5822664932637913,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-1.5955119504101944,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-0.7148148532528689,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.5089159779038055,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.275668698331838,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-1.057211317371552,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-1.488375422864763,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.8158855956039153,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-1.4290256389972484,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.5446749745783384,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.7642714964916557,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.334846413350134,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-1.2529051810660161,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.1932181487873907,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-0.860981889689751,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-1.643734965526611,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-3.5214590408627697,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-1.8222152873314132,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-2.437864947930331,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-1.4901819263998541,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.464480508502039,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.7079103160165392,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-3.4639089477819898,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-2.2936580101316992,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-4.433496954849337,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-2.6848530004962528,\"sequenceIndex\":75},{\"point\":[0.730967787376657],\"weight\":-2.1684811228552623,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.6829915910962994,\"sequenceIndex\":38},{\"point\":[0.730967787376657],\"weight\":-2.8731318107859445,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-3.0054401455059945,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-3.072554262159996,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-1.863991940667053,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-0.9072970647872125,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-1.9812988878490263,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-4.152715695480708,\"sequenceIndex\":228},{\"point\":[0.730967787376657],\"weight\":-1.3998675279124526,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.966715289043048,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-2.752756546041057,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-2.7466319457177675,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-2.7236305793013074,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.8015947384538493,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-1.6083748995469445,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-3.087856753319444,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-2.269894598970291,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-6.057665347420337,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-2.2688744874163245,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-4.608044692998849,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-2.9086925388127787,\"sequenceIndex\":97},{\"point\":[0.730967787376657],\"weight\":-1.5657659076325412,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-1.2937542682387821,\"sequenceIndex\":143},{\"point\":[0.730967787376657],\"weight\":-2.031708577095081,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-2.651133080337707,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-3.1710259993683247,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-2.5081949982213447,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-3.4845880886117224,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-0.77396698046313,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-2.6153038296826536,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-2.174754965123791,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.6953089495193248,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-2.1759607478514598,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-1.2981043083964339,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-1.3252636997506482,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-5.415443671148253,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-2.835273508980913,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-2.663762964699005,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-4.103655726561295,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-1.746849033248496,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-2.4327151010230805,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-3.749890634148176,\"sequenceIndex\":118},{\"point\":[0.730967787376657],\"weight\":-1.6447069507271432,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.515353868855795,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-0.8027401705254581,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-3.2023503225342393,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-1.8757185316985974,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-2.335847595387624,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-2.1619601925664362,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-1.8554155965111438,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-1.3606060443850454,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-1.300350017015076,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.48936925615707577,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.5195422987117615,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.5115083905666227,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-0.5252132974013652,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-0.5253375211311792,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-0.5724464115353278,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-0.5206309998807405,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.5561589625919175,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-0.6960135916833867,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-0.6420497472841934,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-0.6052687086899959,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-0.5969579893056953,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-0.7970784993143802,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-0.7259299411051049,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.6341155012745775,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-0.5667682460123049,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-0.6725713031320693,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-0.7132779703395214,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-0.9600104496980892,\"sequenceIndex\":244},{\"point\":[0.730967787376657],\"weight\":-0.6834106241878374,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-0.6432955392423066,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-0.8138924894642104,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-0.6706032827499723,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.6678680187140987,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-0.6406411957205711,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.2158661264250965,\"sequenceIndex\":204},{\"point\":[0.730967787376657],\"weight\":-1.0804048416895382,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.246661919763674,\"sequenceIndex\":239},{\"point\":[0.730967787376657],\"weight\":-0.8114899664108656,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-0.8014167467908717,\"sequenceIndex\":190},{\"point\":[0.730967787376657],\"weight\":-0.8494084507644828,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-0.770748494740312,\"sequenceIndex\":246},{\"point\":[0.730967787376657],\"weight\":-0.8362224432986846,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-0.7197365069228265,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-0.7184739183289851,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-0.7720271107684699,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-1.2035112709955944,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-0.9969284152821132,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.0618675103930024,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.6752068529421864,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-0.7784629762658783,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-0.9201468131516433,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-2.56103496867805,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.8479920694216696,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-1.5364855976352314,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-0.9979316321247762,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-1.1936561273445407,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-1.0012160740236855,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-2.205194587849211,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-0.8920818415453414,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-0.7337279574166271,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-1.6933268581197949,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.3625904708981689,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-1.1046452257111796,\"sequenceIndex\":108},{\"point\":[0.730967787376657],\"weight\":-1.148330123402003,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-1.4617862626828353,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.7011114506512235,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-1.2817035458651869,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-0.8948563421574007,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-1.4092148847497432,\"sequenceIndex\":121},{\"point\":[0.730967787376657],\"weight\":-1.0170676092947422,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.654913567734801,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-1.0108905041777576,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-0.8377414054707961,\"sequenceIndex\":254},{\"point\":[0.730967787376657],\"weight\":-2.2819336331914055,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-2.239836637071006,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-2.206059095371257,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-2.8285350724920644,\"sequenceIndex\":203},{\"point\":[0.730967787376657],\"weight\":-2.298091957032309,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.9537349503344175,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-1.6019887559321693,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-3.2317143734174354,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-2.1820036884843517,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-4.942369102919253,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-1.687330281244884,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-1.3578749343781131,\"sequenceIndex\":76},{\"point\":[0.730967787376657],\"weight\":-1.630525794051171,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-1.9875379191477927,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-2.609146536426053,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-2.3830148221654412,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-5.424420792747065,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.6749998462017444,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-1.8682484603713805,\"sequenceIndex\":156},{\"point\":[0.730967787376657],\"weight\":-1.667960782054002,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-1.101789897506332,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-4.061096583883185,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-3.0076933245182293,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-2.7949145966851745,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-3.364930356899793,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-3.4905175797830874,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.6765599799798108,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-3.575259474443717,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.7255999932963655,\"sequenceIndex\":8},{\"point\":[0.730967787376657],\"weight\":-1.445201781755901,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-2.770241241485631,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-2.127238512751966,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-2.625311402179426,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-6.018922093663173,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-2.3351819022247913,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-0.957887035862091,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-1.420211341352382,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-3.2449927547509314,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-3.199908399828084,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-4.467348434192797,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-2.164040300433013,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-3.582879029373041,\"sequenceIndex\":236},{\"point\":[0.730967787376657],\"weight\":-1.3810263187274343,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-1.9774841590898877,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-2.6106461661218425,\"sequenceIndex\":249},{\"point\":[0.730967787376657],\"weight\":-1.54112773027552,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-1.245733359380224,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-2.127448286671564,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-1.9117167262704728,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.8003111275812387,\"sequenceIndex\":186},{\"point\":[0.730967787376657],\"weight\":-3.3342372023797537,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-3.111959868000764,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-1.2876260927966834,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-1.7203046697234914,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.3722131173924381,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-1.9669596210178122,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-1.6594469714908804,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-2.791288438988505,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-1.2256040609134418,\"sequenceIndex\":123},{\"point\":[0.730967787376657],\"weight\":-2.6357763582448688,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-2.2728796367055173,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-1.35227088110744,\"sequenceIndex\":134},{\"point\":[0.730967787376657],\"weight\":-3.6544624580103204,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-1.8532566572400933,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5400219729664453,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-0.5422898155336928,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-0.5697075814112917,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.6011353205017963,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-0.6414506639218069,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.5805762292167888,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-0.60878848055579,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-0.6084622704412626,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-0.6208784438160347,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-0.6525371946187392,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-0.7065262202845024,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-0.596855975769176,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-0.7195501081969338,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-0.6587466497797975,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-0.6129725752621227,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.6501960651078434,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.7982643902907938,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-0.9590417476758006,\"sequenceIndex\":72},{\"point\":[0.730967787376657],\"weight\":-0.7623214635030011,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-1.2170097723923148,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.8280701594770999,\"sequenceIndex\":149},{\"point\":[0.730967787376657],\"weight\":-0.8465693865049617,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.81222211649316,\"sequenceIndex\":93},{\"point\":[0.730967787376657],\"weight\":-0.8624012508840969,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-0.8867124180039911,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-0.8359050762547878,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-1.3457025876506035,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.7315618428829632,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-0.703801183115682,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-1.1354035363191792,\"sequenceIndex\":122},{\"point\":[0.730967787376657],\"weight\":-0.6329735251349007,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-0.9919356918523081,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-1.1051876207678466,\"sequenceIndex\":32},{\"point\":[0.730967787376657],\"weight\":-0.9167085638400191,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-0.8662650308223615,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.2321983614616387,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.3313667680577388,\"sequenceIndex\":36},{\"point\":[0.730967787376657],\"weight\":-0.9682947935531299,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-1.6626629547814107,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-1.6503433285739375,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.2335404981392106,\"sequenceIndex\":83},{\"point\":[0.730967787376657],\"weight\":-1.041639836026636,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-2.3255234640013795,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.9531658157486329,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-1.1245369656015418,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-0.9825069057485585,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.8498987539525801,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-1.1329223657285659,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-0.9322094887785128,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.0334535473741493,\"sequenceIndex\":170},{\"point\":[0.730967787376657],\"weight\":-1.044075477502805,\"sequenceIndex\":102},{\"point\":[0.730967787376657],\"weight\":-1.1111132895502218,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-0.9329278399123638,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.3872721300742499,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-2.0323046248494707,\"sequenceIndex\":110},{\"point\":[0.730967787376657],\"weight\":-0.7653732202824346,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-1.694940837675672,\"sequenceIndex\":57},{\"point\":[0.730967787376657],\"weight\":-1.1458728828307505,\"sequenceIndex\":200},{\"point\":[0.730967787376657],\"weight\":-1.2726303587751704,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-1.6938889814262188,\"sequenceIndex\":154},{\"point\":[0.730967787376657],\"weight\":-1.726997663753874,\"sequenceIndex\":226},{\"point\":[0.730967787376657],\"weight\":-1.1783802860200225,\"sequenceIndex\":125},{\"point\":[0.730967787376657],\"weight\":-0.7485279560432714,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-1.007268394491335,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-3.0751644876903326,\"sequenceIndex\":248},{\"point\":[0.730967787376657],\"weight\":-2.024699957805815,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-1.1913345564380498,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-2.126135830977528,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-2.2175158485121034,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-2.2291663165049456,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.682773920126382,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-2.2244086200158804,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.4520007422636012,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.7406607159789884,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-5.430575036217457,\"sequenceIndex\":138},{\"point\":[0.730967787376657],\"weight\":-2.4278463490903337,\"sequenceIndex\":214},{\"point\":[0.730967787376657],\"weight\":-2.3126155250365454,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-4.572440975273183,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-2.1204463808947196,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-2.26919732681296,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-2.049671558476338,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-3.4942272546255255,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-3.042513216218693,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-2.7209384282397484,\"sequenceIndex\":42},{\"point\":[0.730967787376657],\"weight\":-1.4921297226679888,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-3.983143464179097,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-3.242370545417214,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-1.1977848503330741,\"sequenceIndex\":22},{\"point\":[0.730967787376657],\"weight\":-2.7774468617383845,\"sequenceIndex\":129},{\"point\":[0.730967787376657],\"weight\":-2.343839694309607,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-1.9161161173702235,\"sequenceIndex\":91},{\"point\":[0.730967787376657],\"weight\":-1.5993114175520602,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-1.5308326036694837,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.7203288345793066,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-1.079328916290324,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-3.0590390641068184,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-2.9774470521484737,\"sequenceIndex\":167},{\"point\":[0.730967787376657],\"weight\":-1.5800435316350046,\"sequenceIndex\":179},{\"point\":[0.730967787376657],\"weight\":-1.710079083416593,\"sequenceIndex\":131},{\"point\":[0.730967787376657],\"weight\":-1.286634476579088,\"sequenceIndex\":25},{\"point\":[0.730967787376657],\"weight\":-1.0413972867714019,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-1.4781904794924092,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.1049154602055509,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-2.5622994725117363,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-2.600495721679402,\"sequenceIndex\":105},{\"point\":[0.730967787376657],\"weight\":-3.992594947928178,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-2.8681813849796813,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-2.9031373258691966,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-2.2432842296148485,\"sequenceIndex\":178},{\"point\":[0.730967787376657],\"weight\":-3.3634117202093954,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-3.961772541695049,\"sequenceIndex\":130},{\"point\":[0.730967787376657],\"weight\":-1.2651272935945312,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-3.8081512784705436,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-4.537574477184877,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.705810157727256,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-1.2185711341029284,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-1.8727249690960706,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-1.4906226125574447,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-1.8807439015037761,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-3.5006651338052603,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-2.773541240840804,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-2.0455679073841817,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-1.9446761281561886,\"sequenceIndex\":2},{\"point\":[0.730967787376657],\"weight\":-1.8060978695648904,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-2.1999139921026183,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-0.9773875163087563,\"sequenceIndex\":62},{\"point\":[0.730967787376657],\"weight\":-2.184433279576839,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-1.3973530150412647,\"sequenceIndex\":251}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.5987498470544814,\"sequenceIndex\":119},{\"point\":[0.730967787376657],\"weight\":-0.6343832884372003,\"sequenceIndex\":223},{\"point\":[0.730967787376657],\"weight\":-0.6265347889525956,\"sequenceIndex\":58},{\"point\":[0.730967787376657],\"weight\":-0.6348284236292753,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-0.6423137304870976,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-0.7445830695465843,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-0.6615704526992696,\"sequenceIndex\":30},{\"point\":[0.730967787376657],\"weight\":-0.6398074679806687,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-0.8889256336331488,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-0.7138143789322109,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.8101563074670317,\"sequenceIndex\":136},{\"point\":[0.730967787376657],\"weight\":-0.7737665313366329,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-0.8155682703890998,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-0.7217825692967873,\"sequenceIndex\":28},{\"point\":[0.730967787376657],\"weight\":-0.8728176670630304,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.6469590934996523,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-1.051537405151372,\"sequenceIndex\":5},{\"point\":[0.730967787376657],\"weight\":-0.9214312129528702,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-1.1659385593708596,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-1.1557678566140734,\"sequenceIndex\":81},{\"point\":[0.730967787376657],\"weight\":-0.8193173281242656,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-0.9356813260410387,\"sequenceIndex\":177},{\"point\":[0.730967787376657],\"weight\":-1.0599525240465841,\"sequenceIndex\":245},{\"point\":[0.730967787376657],\"weight\":-0.933735784896078,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-1.4959337794140695,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-0.8611018872865674,\"sequenceIndex\":135},{\"point\":[0.730967787376657],\"weight\":-0.953554231799264,\"sequenceIndex\":54},{\"point\":[0.730967787376657],\"weight\":-0.7884202776290476,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-0.9771656403730621,\"sequenceIndex\":232},{\"point\":[0.730967787376657],\"weight\":-1.0324842009867354,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-0.993466376294563,\"sequenceIndex\":201},{\"point\":[0.730967787376657],\"weight\":-0.6898828399170609,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-1.3544025379961588,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-1.0825055900542913,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-2.1730513968051994,\"sequenceIndex\":212},{\"point\":[0.730967787376657],\"weight\":-2.076545197679671,\"sequenceIndex\":18},{\"point\":[0.730967787376657],\"weight\":-1.2150886290400185,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-1.637047234204665,\"sequenceIndex\":219},{\"point\":[0.730967787376657],\"weight\":-1.535473443315547,\"sequenceIndex\":210},{\"point\":[0.730967787376657],\"weight\":-1.1869238986235762,\"sequenceIndex\":1},{\"point\":[0.730967787376657],\"weight\":-1.49796110553859,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-1.1241464280043958,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-1.0396976181491524,\"sequenceIndex\":141},{\"point\":[0.730967787376657],\"weight\":-1.4091893814056375,\"sequenceIndex\":88},{\"point\":[0.730967787376657],\"weight\":-0.9951492564450184,\"sequenceIndex\":44},{\"point\":[0.730967787376657],\"weight\":-1.468855709231272,\"sequenceIndex\":209},{\"point\":[0.730967787376657],\"weight\":-1.0895854003749517,\"sequenceIndex\":94},{\"point\":[0.730967787376657],\"weight\":-1.3452972323594308,\"sequenceIndex\":194},{\"point\":[0.730967787376657],\"weight\":-1.561301806093494,\"sequenceIndex\":99},{\"point\":[0.730967787376657],\"weight\":-1.800142036909597,\"sequenceIndex\":162},{\"point\":[0.730967787376657],\"weight\":-1.5015157115907647,\"sequenceIndex\":224},{\"point\":[0.730967787376657],\"weight\":-1.0300510083215342,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-1.478075703187132,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.0505375571033482,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-0.9797694507773522,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-1.7265569955847673,\"sequenceIndex\":112},{\"point\":[0.730967787376657],\"weight\":-2.621494725637668,\"sequenceIndex\":7},{\"point\":[0.730967787376657],\"weight\":-1.1745078405423919,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.201093201218656,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-2.499982630657892,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-1.800979049077177,\"sequenceIndex\":64},{\"point\":[0.730967787376657],\"weight\":-1.6097956093518069,\"sequenceIndex\":221},{\"point\":[0.730967787376657],\"weight\":-1.2205098138587167,\"sequenceIndex\":227},{\"point\":[0.730967787376657],\"weight\":-0.9425211117925953,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-2.611280247617019,\"sequenceIndex\":217},{\"point\":[0.730967787376657],\"weight\":-2.425375352614303,\"sequenceIndex\":66},{\"point\":[0.730967787376657],\"weight\":-2.5955142185414886,\"sequenceIndex\":158},{\"point\":[0.730967787376657],\"weight\":-2.1265641950538843,\"sequenceIndex\":34},{\"point\":[0.730967787376657],\"weight\":-1.0958415344268964,\"sequenceIndex\":68},{\"point\":[0.730967787376657],\"weight\":-3.748339230653709,\"sequenceIndex\":70},{\"point\":[0.730967787376657],\"weight\":-2.7653121916696506,\"sequenceIndex\":35},{\"point\":[0.730967787376657],\"weight\":-2.4327059647519254,\"sequenceIndex\":208},{\"point\":[0.730967787376657],\"weight\":-2.624990646418383,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-2.7563162335731333,\"sequenceIndex\":74},{\"point\":[0.730967787376657],\"weight\":-1.737739611678513,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-2.9825504959922258,\"sequenceIndex\":242},{\"point\":[0.730967787376657],\"weight\":-1.8073167685917038,\"sequenceIndex\":128},{\"point\":[0.730967787376657],\"weight\":-2.9911178934647618,\"sequenceIndex\":78},{\"point\":[0.730967787376657],\"weight\":-1.73580673519933,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-1.3154091681056264,\"sequenceIndex\":80},{\"point\":[0.730967787376657],\"weight\":-1.2518165093403286,\"sequenceIndex\":213},{\"point\":[0.730967787376657],\"weight\":-2.390862971555316,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-1.735486158005254,\"sequenceIndex\":144},{\"point\":[0.730967787376657],\"weight\":-1.6247060147727397,\"sequenceIndex\":173},{\"point\":[0.730967787376657],\"weight\":-1.197677535086284,\"sequenceIndex\":150},{\"point\":[0.730967787376657],\"weight\":-2.6799281213499055,\"sequenceIndex\":43},{\"point\":[0.730967787376657],\"weight\":-1.4825008913516715,\"sequenceIndex\":87},{\"point\":[0.730967787376657],\"weight\":-2.7445118682803082,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-2.3622973085845556,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-1.0102840917328684,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-2.901234283596074,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-2.2558415389451407,\"sequenceIndex\":155},{\"point\":[0.730967787376657],\"weight\":-1.715473750063015,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-1.6499982108244386,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-2.604617901316469,\"sequenceIndex\":95},{\"point\":[0.730967787376657],\"weight\":-1.5651866266248662,\"sequenceIndex\":96},{\"point\":[0.730967787376657],\"weight\":-1.550664767418523,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-2.0560502910552176,\"sequenceIndex\":98},{\"point\":[0.730967787376657],\"weight\":-1.6285505355763177,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.982416971895108,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-2.5604201023226874,\"sequenceIndex\":205},{\"point\":[0.730967787376657],\"weight\":-1.8534556276068563,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-2.4078160171408207,\"sequenceIndex\":185},{\"point\":[0.730967787376657],\"weight\":-2.5800263483739134,\"sequenceIndex\":13},{\"point\":[0.730967787376657],\"weight\":-1.928057298824879,\"sequenceIndex\":225},{\"point\":[0.730967787376657],\"weight\":-3.560167208358161,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-3.007662070121767,\"sequenceIndex\":107},{\"point\":[0.730967787376657],\"weight\":-3.9425599842803383,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-1.0709928558877528,\"sequenceIndex\":109},{\"point\":[0.730967787376657],\"weight\":-1.2167169944445078,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-2.676857642067891,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-2.6216595269157934,\"sequenceIndex\":56},{\"point\":[0.730967787376657],\"weight\":-1.8707625027915304,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-2.741952941694783,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-3.0887692202358825,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.4111256716648566,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-4.1350234695805135,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-1.5317828560929139,\"sequenceIndex\":230},{\"point\":[0.730967787376657],\"weight\":-3.063478529160978,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-2.9601787426542185,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-3.6132895430868235,\"sequenceIndex\":157},{\"point\":[0.730967787376657],\"weight\":-2.0562008750163496,\"sequenceIndex\":235},{\"point\":[0.730967787376657],\"weight\":-3.3813813025025508,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-2.223878842638713,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-2.3226246864946063,\"sequenceIndex\":215},{\"point\":[0.730967787376657],\"weight\":-2.493796802891047,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-2.5746290463306805,\"sequenceIndex\":255},{\"point\":[0.730967787376657],\"weight\":-2.9956432799667416,\"sequenceIndex\":256}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657],\"weight\":-0.37830866914710637,\"sequenceIndex\":84},{\"point\":[0.730967787376657],\"weight\":-0.4463872460091637,\"sequenceIndex\":243},{\"point\":[0.730967787376657],\"weight\":-0.41085873332651934,\"sequenceIndex\":184},{\"point\":[0.730967787376657],\"weight\":-0.4668503812110722,\"sequenceIndex\":145},{\"point\":[0.730967787376657],\"weight\":-0.5372332299725903,\"sequenceIndex\":82},{\"point\":[0.730967787376657],\"weight\":-0.44010041288892215,\"sequenceIndex\":27},{\"point\":[0.730967787376657],\"weight\":-0.41183488848551086,\"sequenceIndex\":61},{\"point\":[0.730967787376657],\"weight\":-0.48510167735530874,\"sequenceIndex\":238},{\"point\":[0.730967787376657],\"weight\":-0.6000965543382466,\"sequenceIndex\":253},{\"point\":[0.730967787376657],\"weight\":-0.6309301714507074,\"sequenceIndex\":40},{\"point\":[0.730967787376657],\"weight\":-0.5385140479567296,\"sequenceIndex\":23},{\"point\":[0.730967787376657],\"weight\":-0.4601380263570092,\"sequenceIndex\":50},{\"point\":[0.730967787376657],\"weight\":-0.5556133355759457,\"sequenceIndex\":182},{\"point\":[0.730967787376657],\"weight\":-0.594884774903095,\"sequenceIndex\":207},{\"point\":[0.730967787376657],\"weight\":-0.5727776685557783,\"sequenceIndex\":127},{\"point\":[0.730967787376657],\"weight\":-0.5032752196009966,\"sequenceIndex\":252},{\"point\":[0.730967787376657],\"weight\":-0.4964075933164406,\"sequenceIndex\":153},{\"point\":[0.730967787376657],\"weight\":-0.6299710522226973,\"sequenceIndex\":159},{\"point\":[0.730967787376657],\"weight\":-0.8207647638190864,\"sequenceIndex\":222},{\"point\":[0.730967787376657],\"weight\":-0.6709847045029163,\"sequenceIndex\":241},{\"point\":[0.730967787376657],\"weight\":-0.7032535105181074,\"sequenceIndex\":168},{\"point\":[0.730967787376657],\"weight\":-0.8456417113587725,\"sequenceIndex\":148},{\"point\":[0.730967787376657],\"weight\":-0.5828771595139194,\"sequenceIndex\":92},{\"point\":[0.730967787376657],\"weight\":-0.4746335371342851,\"sequenceIndex\":181},{\"point\":[0.730967787376657],\"weight\":-0.9237077700626983,\"sequenceIndex\":24},{\"point\":[0.730967787376657],\"weight\":-0.5965496041863341,\"sequenceIndex\":247},{\"point\":[0.730967787376657],\"weight\":-0.577220391683024,\"sequenceIndex\":55},{\"point\":[0.730967787376657],\"weight\":-0.6099339748366777,\"sequenceIndex\":113},{\"point\":[0.730967787376657],\"weight\":-1.0471545012622319,\"sequenceIndex\":59},{\"point\":[0.730967787376657],\"weight\":-0.6228309969891549,\"sequenceIndex\":147},{\"point\":[0.730967787376657],\"weight\":-0.6612059811900408,\"sequenceIndex\":126},{\"point\":[0.730967787376657],\"weight\":-0.5362945666322334,\"sequenceIndex\":256},{\"point\":[0.730967787376657],\"weight\":-0.653074725310054,\"sequenceIndex\":4},{\"point\":[0.730967787376657],\"weight\":-1.1507947796865088,\"sequenceIndex\":233},{\"point\":[0.730967787376657],\"weight\":-0.5738160332811427,\"sequenceIndex\":71},{\"point\":[0.730967787376657],\"weight\":-1.1744472410785696,\"sequenceIndex\":73},{\"point\":[0.730967787376657],\"weight\":-0.7122155493954858,\"sequenceIndex\":216},{\"point\":[0.730967787376657],\"weight\":-0.8679170080529981,\"sequenceIndex\":171},{\"point\":[0.730967787376657],\"weight\":-1.7786304669594108,\"sequenceIndex\":79},{\"point\":[0.730967787376657],\"weight\":-0.8417737356911484,\"sequenceIndex\":41},{\"point\":[0.730967787376657],\"weight\":-0.7321270922840707,\"sequenceIndex\":133},{\"point\":[0.730967787376657],\"weight\":-0.8649697999219775,\"sequenceIndex\":85},{\"point\":[0.730967787376657],\"weight\":-1.2659140442293355,\"sequenceIndex\":234},{\"point\":[0.730967787376657],\"weight\":-1.6148596814511247,\"sequenceIndex\":198},{\"point\":[0.730967787376657],\"weight\":-0.9919347065114452,\"sequenceIndex\":180},{\"point\":[0.730967787376657],\"weight\":-1.833892746836125,\"sequenceIndex\":193},{\"point\":[0.730967787376657],\"weight\":-0.6185808998581362,\"sequenceIndex\":33},{\"point\":[0.730967787376657],\"weight\":-0.48397711872655824,\"sequenceIndex\":187},{\"point\":[0.730967787376657],\"weight\":-1.0537372046167774,\"sequenceIndex\":163},{\"point\":[0.730967787376657],\"weight\":-1.046745542946602,\"sequenceIndex\":100},{\"point\":[0.730967787376657],\"weight\":-1.0941217178696185,\"sequenceIndex\":51},{\"point\":[0.730967787376657],\"weight\":-0.6037583220289323,\"sequenceIndex\":52},{\"point\":[0.730967787376657],\"weight\":-0.7651142067604528,\"sequenceIndex\":106},{\"point\":[0.730967787376657],\"weight\":-1.0476475320670484,\"sequenceIndex\":152},{\"point\":[0.730967787376657],\"weight\":-0.848241011745483,\"sequenceIndex\":111},{\"point\":[0.730967787376657],\"weight\":-0.8623315542633431,\"sequenceIndex\":3},{\"point\":[0.730967787376657],\"weight\":-1.0219322462113691,\"sequenceIndex\":115},{\"point\":[0.730967787376657],\"weight\":-1.760234985471205,\"sequenceIndex\":117},{\"point\":[0.730967787376657],\"weight\":-1.456637802916341,\"sequenceIndex\":140},{\"point\":[0.730967787376657],\"weight\":-2.0266224532379993,\"sequenceIndex\":176},{\"point\":[0.730967787376657],\"weight\":-1.049739066577617,\"sequenceIndex\":15},{\"point\":[0.730967787376657],\"weight\":-0.7076579776318296,\"sequenceIndex\":124},{\"point\":[0.730967787376657],\"weight\":-0.6697533995271825,\"sequenceIndex\":63},{\"point\":[0.730967787376657],\"weight\":-0.5496703117895667,\"sequenceIndex\":240},{\"point\":[0.730967787376657],\"weight\":-1.5326127324826482,\"sequenceIndex\":65},{\"point\":[0.730967787376657],\"weight\":-2.942939641105444,\"sequenceIndex\":195},{\"point\":[0.730967787376657],\"weight\":-0.6854374083736694,\"sequenceIndex\":67},{\"point\":[0.730967787376657],\"weight\":-1.824645264514033,\"sequenceIndex\":17},{\"point\":[0.730967787376657],\"weight\":-3.1456742298365765,\"sequenceIndex\":69},{\"point\":[0.730967787376657],\"weight\":-1.8604762104611985,\"sequenceIndex\":197},{\"point\":[0.730967787376657],\"weight\":-0.7605655195987693,\"sequenceIndex\":206},{\"point\":[0.730967787376657],\"weight\":-1.4362612560989068,\"sequenceIndex\":9},{\"point\":[0.730967787376657],\"weight\":-4.055198780385565,\"sequenceIndex\":202},{\"point\":[0.730967787376657],\"weight\":-0.7472631077507289,\"sequenceIndex\":37},{\"point\":[0.730967787376657],\"weight\":-3.1206107273100008,\"sequenceIndex\":231},{\"point\":[0.730967787376657],\"weight\":-1.1131079305098306,\"sequenceIndex\":19},{\"point\":[0.730967787376657],\"weight\":-1.7897176210491152,\"sequenceIndex\":77},{\"point\":[0.730967787376657],\"weight\":-4.708049760577171,\"sequenceIndex\":39},{\"point\":[0.730967787376657],\"weight\":-2.827730637055983,\"sequenceIndex\":166},{\"point\":[0.730967787376657],\"weight\":-2.558926290996792,\"sequenceIndex\":10},{\"point\":[0.730967787376657],\"weight\":-0.9010713866150604,\"sequenceIndex\":146},{\"point\":[0.730967787376657],\"weight\":-1.076485390186903,\"sequenceIndex\":20},{\"point\":[0.730967787376657],\"weight\":-2.2170917810076873,\"sequenceIndex\":151},{\"point\":[0.730967787376657],\"weight\":-1.2741699910050883,\"sequenceIndex\":21},{\"point\":[0.730967787376657],\"weight\":-6.020392062140608,\"sequenceIndex\":188},{\"point\":[0.730967787376657],\"weight\":-2.5713770525208126,\"sequenceIndex\":251},{\"point\":[0.730967787376657],\"weight\":-1.4062525368669976,\"sequenceIndex\":160},{\"point\":[0.730967787376657],\"weight\":-2.847158460989165,\"sequenceIndex\":11},{\"point\":[0.730967787376657],\"weight\":-1.7285539277638995,\"sequenceIndex\":89},{\"point\":[0.730967787376657],\"weight\":-2.9329604859115883,\"sequenceIndex\":90},{\"point\":[0.730967787376657],\"weight\":-1.8321794889231264,\"sequenceIndex\":45},{\"point\":[0.730967787376657],\"weight\":-2.094926207033655,\"sequenceIndex\":46},{\"point\":[0.730967787376657],\"weight\":-2.4668794071821463,\"sequenceIndex\":229},{\"point\":[0.730967787376657],\"weight\":-0.6906337473361398,\"sequenceIndex\":47},{\"point\":[0.730967787376657],\"weight\":-1.418789836513835,\"sequenceIndex\":192},{\"point\":[0.730967787376657],\"weight\":-2.2824292321262787,\"sequenceIndex\":6},{\"point\":[0.730967787376657],\"weight\":-0.9833522773215045,\"sequenceIndex\":48},{\"point\":[0.730967787376657],\"weight\":-1.9504528742233027,\"sequenceIndex\":49},{\"point\":[0.730967787376657],\"weight\":-1.2913760137514176,\"sequenceIndex\":220},{\"point\":[0.730967787376657],\"weight\":-2.581496777111648,\"sequenceIndex\":237},{\"point\":[0.730967787376657],\"weight\":-2.022636724311212,\"sequenceIndex\":101},{\"point\":[0.730967787376657],\"weight\":-2.3145773687766003,\"sequenceIndex\":211},{\"point\":[0.730967787376657],\"weight\":-2.317208152816806,\"sequenceIndex\":103},{\"point\":[0.730967787376657],\"weight\":-2.086089599415411,\"sequenceIndex\":12},{\"point\":[0.730967787376657],\"weight\":-1.1988584776776527,\"sequenceIndex\":191},{\"point\":[0.730967787376657],\"weight\":-3.242740882500467,\"sequenceIndex\":53},{\"point\":[0.730967787376657],\"weight\":-1.463487619798094,\"sequenceIndex\":189},{\"point\":[0.730967787376657],\"weight\":-1.5567384059263454,\"sequenceIndex\":26},{\"point\":[0.730967787376657],\"weight\":-1.8158539762153432,\"sequenceIndex\":174},{\"point\":[0.730967787376657],\"weight\":-1.4017343125209452,\"sequenceIndex\":16},{\"point\":[0.730967787376657],\"weight\":-1.8881368191412091,\"sequenceIndex\":218},{\"point\":[0.730967787376657],\"weight\":-2.924761737906918,\"sequenceIndex\":183},{\"point\":[0.730967787376657],\"weight\":-1.6249835048205947,\"sequenceIndex\":175},{\"point\":[0.730967787376657],\"weight\":-1.265311282303913,\"sequenceIndex\":114},{\"point\":[0.730967787376657],\"weight\":-2.679985235797616,\"sequenceIndex\":161},{\"point\":[0.730967787376657],\"weight\":-2.2746077659428683,\"sequenceIndex\":116},{\"point\":[0.730967787376657],\"weight\":-2.719413709806574,\"sequenceIndex\":142},{\"point\":[0.730967787376657],\"weight\":-1.9925637158756002,\"sequenceIndex\":196},{\"point\":[0.730967787376657],\"weight\":-1.9367036206239212,\"sequenceIndex\":132},{\"point\":[0.730967787376657],\"weight\":-4.009676453821784,\"sequenceIndex\":120},{\"point\":[0.730967787376657],\"weight\":-2.0611769488689924,\"sequenceIndex\":60},{\"point\":[0.730967787376657],\"weight\":-1.7153632689358809,\"sequenceIndex\":199},{\"point\":[0.730967787376657],\"weight\":-1.7063985016993353,\"sequenceIndex\":165},{\"point\":[0.730967787376657],\"weight\":-0.7445928558684366,\"sequenceIndex\":31},{\"point\":[0.730967787376657],\"weight\":-5.890773122905156,\"sequenceIndex\":137},{\"point\":[0.730967787376657],\"weight\":-0.9625049535392461,\"sequenceIndex\":250},{\"point\":[0.730967787376657],\"weight\":-1.734487502783286,\"sequenceIndex\":172},{\"point\":[0.730967787376657],\"weight\":-0.5771056116769329,\"sequenceIndex\":219}],\"sampleSize\":128,\"lambda\":7.8125E-4,\"random\":{},\"entriesSeen\":256},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}}],\"totalUpdates\":256}}}"
  },
  {
    "path": "Java/serialization/src/test/resources/com/amazon/randomcutforest/serialize/json/v1/forest_2.json",
    "content": "{\"rng\":{},\"dimensions\":4,\"sampleSize\":256,\"outputAfter\":64,\"numberOfTrees\":40,\"lambda\":3.90625E-4,\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"parallelExecutionEnabled\":false,\"threadPoolSize\":0,\"executor\":{\"executor_type\":\"SequentialForestTraversalExecutor\",\"executor\":{\"treeUpdaters\":[{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6146341146058565,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6255214339265123,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6173355557619195,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6424537490521822,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6770442929901405,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6670617915140761,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6470060574953065,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6494753406574544,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6482248369862156,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7111122190981582,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6884986134724926,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7191897617635025,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6902886361900491,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7413988003783913,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7708727856668871,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6642069499799408,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9404390746152521,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.816230222625517,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8568471191810594,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7804561694743611,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7469157529074638,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7465680537740108,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6999084733016269,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7747571296888397,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7209080238967158,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7719800463845987,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7000274285243815,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7420237078093452,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7714865870004123,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8010912713549354,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8287897464503723,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6664928276518648,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8722166156692749,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.452344059536703,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0627722970336262,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8304215441652594,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.022453284840302,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2098078407663795,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1308820105161126,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8669244229088726,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8062404976686423,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.789892276486641,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8220285779837672,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0068269200245459,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9737299968808646,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9412091054570471,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.808790697562306,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1134444676159847,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3932165148870164,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7623090003792994,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7257602020521662,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8581763271773278,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0337984576969779,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1136670590794953,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9564313595531402,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7494019784035879,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9230486880716878,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8670175135569511,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1051158711045586,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.115072236519442,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8236664787134794,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9104509605256094,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9268524716922116,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.716545796911989,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1644372469804525,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9765441626931674,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.17538160009973,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8062223104692763,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8390552661994923,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2770725115556942,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6301038591800907,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.463049892380754,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.894970786975603,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3247984101890453,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4070771969383313,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4245311775345746,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8459235005517505,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1758671181519165,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6857333040813078,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0996507718401896,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.327383547273299,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0254860530999557,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.327703771960061,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6140569919042995,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8194915283039823,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.123944164416666,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6109342399672828,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1599351681923493,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0470782150780569,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.797441488264947,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9917216387178892,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0450603342615046,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9565973017906502,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2335976483492135,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8207407094693493,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1366445144411095,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2839124517089457,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6727640063012823,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.014119419324425,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.364316680582126,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.07323935402226,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.776501886676347,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.050670620819365,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3844370527205472,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9256964704708991,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7745777335509079,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2828686802145257,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2502345930610748,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.298603689836242,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9782151974010167,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0073330190504945,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9132638521567816,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1836778156044538,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9248993560218666,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4360295024064462,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0105676835303468,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8752316444994324,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2916069417127378,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.198967314782571,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2054643969963252,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4572940755157315,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0694712452744488,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.59436466359968,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3920694303609287,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9790845293915927,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1700236705814648,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.155092188059932,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8332619389930083,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4147906618921513,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.214237167233229,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2192929951797002,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3106400429452085,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3494859071500294,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4893800217712083,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.199643032303285,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.550281588893283,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3227556440050092,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.890701858959415,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1616979632619806,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0203006871795393,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1610993779752374,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9214324920237151,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5699152578647797,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9542229576220636,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2465347172366874,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1268878723791396,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9345217675034244,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.639058054522798,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.558904622050164,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3997305643275446,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5313666477982357,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4180985223520826,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5332328406534792,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.818049833060684,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.17851983350423,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6218591301130876,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.432242552588391,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.41435128127985,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8710711689860564,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.553901612810165,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4664690723723686,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6618309748537292,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.19931941084694,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8881595866707879,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1293353323691986,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6349570571000702,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2196580529699257,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4561933943289396,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.60036491821988,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.949497546762246,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.498794635207275,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.128563303775397,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.621942128084607,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.519510845744535,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4971692548113116,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.683293725600786,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4563201729989443,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5181092464648076,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1402466450060076,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7277813579269425,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9687381154451329,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.251341427260495,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3438782679704766,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.048240154435768,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.566530543124627,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.322824268821487,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3672518654164145,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.32290986918298,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.73962880510932,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5412812363201303,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2856553133333934,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.316712841065747,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3306100327653574,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9836426207390336,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2954974119767062,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.234084023836952,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8762911791692782,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6955009879574092,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7246051479253333,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8006328282377875,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6883750110087017,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6007518721323255,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9898078235604157,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.648122577928123,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.5915733537256065,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.196208885134257,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.772369119010644,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1842258321215877,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2127735103942703,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7207153895560734,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.660357530966055,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.293168224313462,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.578540219215814,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7386017278498844,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5854941531113742,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.55601797568199,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8949663305802904,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3892439640273544,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.350278335526986,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.440468821521761,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.367052056643202,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.364332874349917,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.430961272436031,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2656797012676635,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6593958981072445,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.540098142343765,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6720800447694852,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.510138847909892,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9742735199759647,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9348777935217742,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6759521289524897,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0308300324974127,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4774274321908487,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4389766345324038,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5303352089127484,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.575664620524247,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9323159410853243,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2050288714104807,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.17154492662349,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.647555897098906,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.666591017742403,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2138839193092124,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.995012466675898,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9450562048387652,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5210659345291146,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.288151772833298,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.807145832361837,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.539659143633028,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1564325748836533,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.289114464515293,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6793737267393096,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4074730021973758,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3796779644456088,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.571132733354329,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7375167900269994,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7982712308507103,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46160257609048216,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47234957712283093,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5129839426339975,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47626985955480583,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.478050424126917,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5580324894912493,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6185679106276508,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5147304138095607,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4771575995299145,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48849538741462106,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5322631908956976,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5662605593129507,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.561232876958656,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6216523304632913,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6190014844005043,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5362960726570848,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5936856154507646,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4860121636166222,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6574972670394791,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5392135255718532,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5311053453295104,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8344782821468506,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8767081269768855,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7370859384909471,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5963688724043,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6382512272916732,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5927639105567727,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7814212367779709,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7048008766664908,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6397715702005524,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6646096994334827,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8411861202559467,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6476594370552187,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6927685066365329,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8084898176493764,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6958649086841673,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5969200377038163,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8935865590223131,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2198928255504724,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5537832258508532,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9606567836132694,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5324391911262383,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6963972056755529,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9009124629901575,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9853804272664342,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2260854695828003,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8916684884835586,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9421781951647543,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1924772210363077,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1353121477830068,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6123099584419172,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9575906756489297,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.915054796093627,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7239374887175686,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.645941597248104,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.826568772469541,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1099637705853518,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7890266314808103,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.01829252483617,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6744051877147653,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0419074678473952,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7251720489265925,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8708874989259588,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9201061838132067,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0836918853938873,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4088323974575665,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.243348765342244,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.181023850831463,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2777897279551815,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4520888387304536,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0837989278904976,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.904222280619888,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0247971947558865,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8336055104085134,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5223673798163782,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8947547941839094,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0590773161840559,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3501918618790358,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2733916412604798,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7255210803206694,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.337334295880422,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0168425989285064,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1638293765834138,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7075709726477124,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.211803194828219,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.043235257714599,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0170918376701148,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1215263840906156,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.286703001879683,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0536143381620509,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1978013340821176,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4949258666488292,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3525314739647172,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8975835836667849,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9222849266731339,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5748622383501973,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.214820720925634,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6040289942640544,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5993888668437137,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.628321865143275,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3395504369962783,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0271277932635399,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2575820613851927,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.139754419150573,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2744585274199054,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.430063545905811,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.393545224210303,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2107402318935672,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7668198378680026,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9725416515996916,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.597389044474273,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0735434897253753,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.078535472953142,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3544035465805004,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6024257352966889,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1165471114403103,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9616543324876712,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0310662690438637,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1352612444759433,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4092917639106834,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.040250016489322,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4637200514001787,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5307742036312042,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7470110984612602,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8900342771893002,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0958236087614188,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0097166388069636,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9336096934589261,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5467204452571457,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3810595438500037,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1536640435763579,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.976371636875073,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5585749269077878,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.293683961076618,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.98188394978397,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6940452368913663,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3344115529297125,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6148126837307357,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5708246435446735,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9055301384559844,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5994635004202213,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3222184365804006,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4754928600240147,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.200138025409071,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1523562360460153,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.70874972664507,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.688020718895389,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.18684542689058,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1076821446544751,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.486554906861959,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.090581050442323,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3301956294670947,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0182458563002519,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4978748159760245,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8180058251903843,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6159693118402227,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5681549294946782,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.333319366492589,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2679262839741603,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0740398030317087,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0062800025618786,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0898846580813815,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.086566057712326,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.096427584875557,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1062462228885097,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2168801597490515,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3927129780487981,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1531844784162557,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9266746886086075,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4542383102835237,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.060748523871097,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2030198531526948,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6957448392040102,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.222613420117918,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2388300113605806,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5138513754136929,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1923758691291784,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.492083833527857,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3860484778367637,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2316811929513252,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4722199105066232,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.790874142186293,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.031940930169071,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.347987777402797,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5146217699639295,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.830693154080988,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.634802950584005,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.930520552343428,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0598732390569028,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.450716701607215,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.430027435067635,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9619953694982533,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.429491772679377,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7660625214295271,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.261493496212232,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.391463022478367,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.177208474860661,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1938007649801268,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7561568410028945,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7890001013500112,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1747952580425016,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1273079189217707,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2367062032293057,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.379731011115925,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.510879637838231,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.23801606838597,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6805728013534669,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5739712049721997,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1624666703716393,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9707017970941352,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3494269465284483,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.027973327837422,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7712016382951135,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6519344428746707,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.312133628243091,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3495056697711714,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.285315739360893,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7884874543467504,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6636859973284244,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2538196772574786,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.193745971406072,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.089121673734058,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3758599494934716,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.694857199798995,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.500793556557662,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9329238132209916,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.457165268435461,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4811575826612904,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.560954347130487,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1174418477040176,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6642623925185658,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.393751088429611,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9513060183553315,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9606023534842012,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1184696936938434,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5459806145599906,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3233267314137522,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.57061812965076,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5581668818470307,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4171912885021487,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7302824610041414,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.152065561268401,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.121146814735208,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5442402197181384,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0019629260843983,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.439143191252268,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9997257797732455,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.967173951446308,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.604462830054479,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4045993118037339,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3255377424738877,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.135449309679315,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.722387100825859,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.34255865550598,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5759730768738247,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5047103509536448,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5260944109533162,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5359086379131986,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.548388511053123,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5497644053083429,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5393683903666211,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5526880605271366,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5579689695931819,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.573170822567569,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5518202314073564,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.569900424305517,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5437397583510777,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5879334620115415,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6023704628113324,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5807667843568343,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.603209737853231,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.595782154837056,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6853745060153638,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6159080928230225,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6095932730795004,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6941930330477768,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6705752782265264,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5717030294373283,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.582031118308687,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6078642638859618,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6025386662222623,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6406788594971086,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.744356589115243,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6128850191964808,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6908807437299432,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6922331428099769,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6304533890109647,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6648460401544095,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6115483016566329,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9259874568754812,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4007540287832962,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6708968860976656,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7519482844688304,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7812303141607132,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7711117236223178,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8299866832752445,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.753319475133741,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8618101976411661,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9403798578162164,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8731740414272922,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8141741461810272,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6232292205051265,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.685374069060882,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7862085319435075,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7705716406432933,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7070939525742483,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6458950853815364,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9324878199962862,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.713599387183294,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7729660302876872,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0031714930709517,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6707776185331576,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7525191786174004,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7860103048713483,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7102847551772383,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.694621974802462,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7762731430232328,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.788770941424639,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6699969685599683,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7490141546225475,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.965172281134006,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0913836285849694,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8083955894239638,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4362735648939904,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.70454160508493,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9475609748166327,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5151180782235767,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0762076808864134,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0637694246316745,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2593176119465064,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0258200821035812,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4095750114302565,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3018260934082273,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9960102891222928,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.931067707281034,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.078492643871671,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9553903475561631,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0959950373760599,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0867800024317138,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9354902037387403,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0372318764458373,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2170520226896766,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9963929963864807,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3006909990508584,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7588571191650195,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7041784762018137,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0620254917650023,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7131867077189776,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6935360249588589,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6046292691614912,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1884159378171235,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.053728809008446,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8804688461474481,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.216178024577335,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8684841484561897,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7768969650573727,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8404570699813109,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8829256225370673,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7990978075812794,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.016588538431301,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1128125809561107,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7620315232195607,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.763437506111344,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1320535959017948,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0702401204803174,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2113044156525357,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1821138829686557,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8984484713181904,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8676375739608724,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2742238423256953,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9790042044579668,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.242796737913473,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1202178758681027,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7450001802803075,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1213662278942402,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7338054564939562,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9064918492033363,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5790934215375558,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1224076165324355,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9802557787536221,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8311658905511503,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4603499657884453,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7401482929538656,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5832155274376851,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.031638735753606,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.647952340757775,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.597285986662614,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0091790282893314,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.490412396177701,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3168753330657785,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4585439827656566,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6008513765965287,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8159453519692652,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4862354893213694,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0920671328998064,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7638894626082946,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.850044507767239,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4771259099294545,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0730392659564187,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8111309033356875,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1748064645286496,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7540842600070077,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1994772091735926,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.872039871646988,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0758566799220315,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4753480461056574,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3854010532257865,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.8885527790002365,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3232540932876944,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.75521279302123,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.565871671168844,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.517201055444477,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.262716479419825,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9452752638231493,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0049368366637386,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7241532141497298,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8763571706672262,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9951412443302015,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.267034973397639,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2162676234212646,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4980630377074884,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.71367910387317,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5329478468391644,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.526106745961805,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8622169776710213,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1338300106268107,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3980571502681125,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8201099613191152,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.463199809878807,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6324143356399654,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2572931711806938,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7244674571130576,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6185139996374267,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.008407923463852,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.019058444616309,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2751389931582486,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4469826072565124,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3343631152686233,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1245778277043583,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4031598812094743,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.040255126539048,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.847042868793321,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0832089264530416,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7399390861808774,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.683444615258053,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4555575800949925,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3525178752438138,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6829301766857925,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.986796425696412,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.079226605756757,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4220198649506708,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.5841103240635475,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3184508767577006,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0976726918400148,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5617338480605927,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9041830197067955,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1253932265760285,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3559148900195197,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6166680192106695,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.279639393815452,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3227563896156234,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.369721094191956,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1294813847250802,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3683527419175592,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1765769506675414,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.044538945850007,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.04500043710329,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0708814993176805,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0818733965580076,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.633188204019934,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0040071096294456,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.78037965984465,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5634325508099864,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1605491671595864,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1859051986973448,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4108030072433013,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4442747747696694,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7641447794933804,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.615670713535556,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.013360439570342,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7398292733652077,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.515109257755804,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4425516135074237,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4488492443444936,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8384805662689887,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1850541896527313,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5067552052677682,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.69510244929935,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2900658880039564,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6260121683310464,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.654523807606816,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8604470007121694,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3636901681806397,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3965938657610506,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.282872091267528,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.273854280665398,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4317853988162614,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0273715183600531,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.25089915350913,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.274665203447609,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.086410744392713,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1647558020007938,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4972906071262615,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7419821356055711,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.769855761766187,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5121901884862492,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2931929451204605,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.461714147184994,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6706713230068377,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2466376527345713,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0807483051785824,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8245199856546708,\"sequenceIndex\":2}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3818313988005322,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3897263779042205,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3888570213178338,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.39231675133027366,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.404305455877325,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41279602412810024,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4163470662365382,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3925031390937806,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41151063307303837,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4070252232377526,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4447913376176663,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5361604637469644,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5317598415612959,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4419730902261371,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41752154125157137,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.39925694949059193,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4384885128254923,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5172311096266909,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5750541081826791,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.437665473047515,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45747735529633776,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5422131509930802,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.50127181414208,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5404504342808591,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6161421647579762,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5516515641681752,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5482957393926003,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4763413025490339,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5008708736616337,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43993985166104926,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5661928323665468,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4046755214691486,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4968213361649064,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4648855943284984,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47116369587000817,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6257127251607816,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6041254997695569,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.898290908263374,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6787172650331293,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5961868208985492,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5145579274952363,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4984509855606138,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8679971900121906,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5708537005079727,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5739271900386365,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5651492716438227,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5538418749060455,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0328566924805238,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6595030214954836,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9926073862591953,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6650327084121611,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8344909892760701,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8465166116596569,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5871839272961783,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0263939018029444,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5131015590243841,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.766815528074294,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5286509161402626,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5224136583642938,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9907724170986802,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5543939816928036,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6498024850797093,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7540300320808839,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46177564974729,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5709740819030578,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7486349712274847,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5534897794917688,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8875396195700384,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5398216174268968,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.110606874831603,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5584668905224405,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6823471722479599,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0622750940217478,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9958678310764336,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6736059612589619,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1760337936688368,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.959720115906325,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8286609697332845,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7056372052431007,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2292655913023134,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8487511909890516,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6317991438363069,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6862659183275681,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5793408703495999,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7285771429144958,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.032810996426315,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6179470786747483,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6956219134695297,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8759648128471107,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7110576141270611,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.627368843218544,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7894977502454714,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6522155107283083,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5953747553846762,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4221699590438481,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.250622126301647,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.239227744998176,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3028406958965504,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.799216172345604,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1880308988235864,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2517152085252594,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7029419529478221,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8946317780025669,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.158765765227053,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6563028845756802,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0297523641236055,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9847923156858226,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6692726875086656,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.870573621578685,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1786811043637484,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4229372498763906,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.071763234710726,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8307561449519493,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3211868693280795,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1633910371276686,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9860789246266324,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0266866531141596,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8603072191363206,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8611286317571976,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.340697023380137,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0829183774495406,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6003982280537972,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2358364246705715,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7299490997228921,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3926959574504159,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6287743820380354,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2120762384082107,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1121127158017148,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8893068470934942,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6567334453903101,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.428095699102055,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.584710563519354,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.539488674297153,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.477763251642157,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7330331575912814,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.589193130752905,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.72114731068527,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9062279337524528,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.634457486043725,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5212624999108586,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6048589286668404,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1602077201311305,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4063870448939615,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.880391053442508,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7594285742091744,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.726621721553871,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7025445134510022,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.714387302352788,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2032163006692609,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.986391231058456,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2462702418029437,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.399686930069121,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5506797719248047,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6512093333399367,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0481931295869216,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9997362091837202,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0313573008877495,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7994071674729375,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.278585206219982,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5444482124452668,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4443821051711452,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9381284024531418,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9131220064497483,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.923902393545851,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.570363140824152,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0010072933612122,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6834480661011617,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6512162293912321,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8264757085369383,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4855604934623214,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.147292141171303,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.233115295821773,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.48478930851418,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.266955195934262,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5152518668267247,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.164969309235952,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7879219139966225,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6295577989206804,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9004233267328796,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2174832549397854,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0149617629244805,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.31999335777745,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0104060600568974,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5957879135499629,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.034122940871018,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5309774638937086,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.050583898961708,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.22897280492723,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4486095221600215,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.60749369400639,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8126027580089308,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.295444248554696,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1924273477025005,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.154967063995641,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2874056067919226,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.87521515091206,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4054458722195553,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2972008541698166,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4173464673514686,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6619369943798,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6520148419036098,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.899858737618254,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7861580940155224,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6660437960725956,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1947597617121266,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5026320097354273,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.661381375871137,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.274012387705602,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8225582904416973,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.159548267541763,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7984540283116048,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6744116501150894,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.434173852078577,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9573474020182315,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.724254360330068,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.859268567121385,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7087672563802975,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4027346592834933,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2325988360218747,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.31845303488183,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.308757778344876,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.84659257422574,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.469640832006987,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.361434354129223,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5769763323398807,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3345878066171963,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0577767081802856,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.689091596889984,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5052667108624447,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4325828263069296,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.610499801929948,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.035953242855645,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5759749567529977,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.29213935105969,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8487312754834977,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.910085642382972,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8718428419505133,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7494625295459763,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4782326446707756,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.789293425576507,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.887459187921114,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8151625988846933,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.303758155720062,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.264614559480873,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2484412661276765,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.077690115090195,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0060023817690387,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.084327415337533,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3651234605642533,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0214578132825016,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6115916969801127,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.295348432159696,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0542585561719897,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4671283211835586,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.694903363608748,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8212757528408487,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41076555251373625,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41128782560609983,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4196758913116757,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41608797741161296,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4140988442425498,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4426765065532665,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47379026229384225,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4325365317342104,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4258793337994201,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48462242096144525,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47888408153606793,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5320637343429684,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47737560850493765,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4918765670165315,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4930902943761117,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5116274074719844,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4638506105635342,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6128234564986658,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6214201042705433,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6080940767048836,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48555451526176147,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5062644983749363,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47962286339137894,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5363674983703273,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6821735039044128,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5322798983342368,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.818890024133296,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5351460733981976,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6083661387956425,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5354710312022348,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.687620384982829,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5382100173547699,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6734689683729348,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1516941654910087,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5268891883232955,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0494343181786654,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6130496289590855,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.766943006499105,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.350810496015029,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9962839481120661,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.982227213440814,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5398019396891583,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5307076250411541,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5606109931149603,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7793360979694587,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5260005685329219,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6596853122937784,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.606353406204448,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6227255464609807,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7834061232605847,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8648765777322432,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6038655140127112,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5586725831876546,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9827488162508279,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9175737400568243,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.671355875418893,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8618955842070507,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7326150445706083,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.795281433520965,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6866299084815191,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6645468707397092,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2381129895421958,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7208002639377629,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5382508579704549,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2934546225659256,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.169311482366279,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9108079582275467,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4470860467254272,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4217930845386217,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1787920444540714,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8956072405759048,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3154265048634843,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1338700454178934,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8753698699908398,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3075253641711848,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4331856823022093,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8493868684253044,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6802808091686423,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8341658978533315,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9965828851633682,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0156431068311775,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2015227144463676,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.012972711144663,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.229033721643924,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3122935811637702,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9744914126267747,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7921520447621415,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.021388810740196,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7237478344089914,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0676111172654497,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7968591421945883,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1809487648210473,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3003928506020972,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.431087413330223,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8232689354900539,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6730565925708216,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.646312642803931,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.99942749464963,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7937313469672573,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9938573610450077,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5342228202572405,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9493313394491,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.338816792720813,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8635645293542427,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0030683388556312,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9389949772017029,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6706308608445511,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2405305860315234,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9975207525545502,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1517025824299787,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3948310882848447,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8538124586592356,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.151164357199666,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.162466321675002,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9412716694934333,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8301504367243165,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.243467964749317,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2417036560575492,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.933688505413061,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8723374445570999,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7961928673393782,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3275091077862429,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7706821725929299,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8773586414157153,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.26158088379581,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7607234263408399,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7695826861284436,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5833496271936753,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7147239924928903,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.158920430324552,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1788833387228013,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.75782134751831,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.417532114317823,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.438477728636481,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.426434325214341,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.073670683238186,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5063394390928242,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.795349078217416,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.039104169899174,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4613393265881522,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7335740264329358,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.546959375378277,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3968959562812366,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.61148489247996,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4392953354174463,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5205594776019975,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.177642114170059,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7057409413982019,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7096657583999617,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.230091514198687,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.714322370288945,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.861719708177973,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.966474253186489,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.993471091901151,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3591741656279284,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9909674368039538,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.876589574921329,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.029439069774457,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1334609925276595,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.470460079602587,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2974160839387876,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1240700040502956,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3298813143205752,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.6792989438579555,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7230427428888277,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3500736303212983,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0950560146648822,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.306277524897012,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8676006884913285,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9876019692190816,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4981353363275716,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8134043409619,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.829021717070229,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5770216313215686,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9985276228887084,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.253789264229145,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3901688546377242,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8027717390414317,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.063668597669345,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0937186987916445,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.769099770767167,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.336237349359063,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2317266719968463,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7361090554855763,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2484118582842743,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9367796749518853,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5923010369543105,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.627795695946994,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5222670986219726,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.4116150980328985,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8287453594858387,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6838907313661353,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7852409844870567,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0703254887936844,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.16008561860816,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3790982832130605,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1642313768860981,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8474819894343542,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.274957096635813,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.037524518939259,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2066920737923026,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.733976812922519,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.373619767684486,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.559340163343574,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.380204544250063,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6369365670964835,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.314643959044869,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5759094257005888,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4053184081638084,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1258217747588337,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0415027550411342,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5266073786222618,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3782438995186466,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-9.953797287944797,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6746927860933547,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4469061638752456,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.921889512446123,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1329314282116625,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.339262222522451,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.894894514825776,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5024450185981868,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5955213039999796,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.12524790425941,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1430979076148817,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.086501599476621,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.201274145068709,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.892217807536163,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1074404478321163,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8722287892340526,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.403032018154942,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6516304433651645,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9211022975902685,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.0824466837442435,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4242052543895873,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.129338169513039,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6864057242123587,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5234595342420423,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1329272459750455,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3157497076463855,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4389923121650618,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9853646989139248,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2520838602180056,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8796745761171992,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3918672634466696,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7561944538157306,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.851972299582312,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7826093567632108,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.152247335830071,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2517111086132537,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0924713786680296,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2967607859773023,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7298254886252185,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6340658657101486,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7758850588370527,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5093390394507233,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.872705673391796,\"sequenceIndex\":509}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4167723115098101,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4167990148935595,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4201540303623035,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4614281267599293,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4499693586048835,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4449605786938321,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4242291640232057,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5324418702619016,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.50921350145025,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.500259527671344,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5141214713692313,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48498447314103166,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44805866177787745,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.42690605113404884,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4790456295864388,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5699030449756834,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5462463079220368,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6070263882570301,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8086067863538057,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5370396560023059,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5113958758600466,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6586428438895201,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6016441209332574,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6862510415496836,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6057965853602543,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5164289796638947,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5565308303072316,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5722814711469648,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45324049017695023,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6716418217824242,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5756107786404235,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5776557428215141,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8947167155870214,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.706556669825146,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9671582407294835,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6188941534135812,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7094219553767822,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0944956704805329,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8449179205059666,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7566687247387598,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6565857848392259,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9025515747870058,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9147826531929157,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.863723061115721,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8142708228085695,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7646036873126584,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8824882819557498,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8288799464560672,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9870344656199865,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8556760816837938,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6342926662357837,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5513594805180716,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6105645668151447,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7648497224096785,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8466448908464705,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6439876257693039,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.233852148171099,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6993554311799627,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7892830225401399,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1970713388493834,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7820762938023343,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8788933896512361,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7644667088183936,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5854286049532917,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8783312008764235,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2745533372962181,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9111129361712282,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8603608716253812,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8242172441897218,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3476304407184339,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4645923449145009,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0964296748878244,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9898227644027142,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6000212093831463,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9321939383172002,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2328229580181325,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.397887009624906,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9655503331664804,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8477111570141848,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5708718181864472,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8167700166792216,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8007004557853445,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.757114827416389,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1800255469731118,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2005166993122984,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.644878383900552,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.282021598099767,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9688913246132209,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1716915140854396,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8251232557771363,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8703946909742786,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0870270650340428,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3070751170412067,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0091042002455142,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0405581391549248,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5228576608521347,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8802571014473741,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3683478171170431,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4742588742389273,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9887341009063783,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2733272179895687,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8418380479405387,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9559350533556563,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2211253782926048,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9836343110120379,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1833841407297678,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3932561802144763,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3329380561438897,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7896393886201634,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6025475364648505,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3309479335262882,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6569372417006827,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6846455422918375,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5561165094242877,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4372125273923964,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3620359644973052,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7607544121521558,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4052012595648429,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8257006396740046,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.031727514235044,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.887129338349002,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1306779458800624,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.028603072944664,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9254004656157865,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.261591586086804,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1640936074643782,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9628635291694991,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.586201776422233,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.172142201176963,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8499668880406148,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.818125905427418,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.308906319624573,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0356167666625393,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2570873035523995,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.430886699367294,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8241762126214414,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7178884979932947,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1870636992782688,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9777717376003955,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.742051484357004,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9981842647277235,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.083280188752955,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.770876945875638,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.595086940861381,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9756799605996769,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0697538230439236,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1583312866367208,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6784978777911506,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.664645755359197,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.619833790254309,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8185184205794773,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.743235528687532,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3951254403681062,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8652863569372478,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7767357590713768,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1000400497847838,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.912434097268538,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0021491688301567,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7077666740930795,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.40536173440498,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3054746654804634,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4055380567872855,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7163568219062824,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.42710705753575,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6633288914629971,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7320495752991107,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.5961361530758085,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6820166951255975,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.283063634414821,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.79989930004874,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9930102128269587,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8657054550471768,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9641557120861795,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.52385651310193,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6079141320404788,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7623302404358607,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.870327390055666,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8483240361027853,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6677716192541403,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8921956481376203,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9410090045330928,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1654091142087983,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2885336277252009,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5743547366852917,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.356170080379277,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3389202508668592,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7797429897382833,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.374561236531541,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8555223123838052,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1260811893112157,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0274427354560043,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7006595590724687,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4698509085722726,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9683064455976488,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.029710049065513,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5166861493498827,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5653451785830022,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.176808983041949,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8680210435700113,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.335346762211262,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.935987306107144,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2628978921723353,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.169583570949931,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4611824390900774,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8610941484755057,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6430151612882244,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2425692323403414,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9557489679392543,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3390421961869596,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2853202155590195,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1429850466931155,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4358150059269241,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.359289277403614,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.878093198556987,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5307756607657867,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.262798750990264,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4533279109527952,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.432815923867239,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.919723411598876,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7591046496952494,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6552726216793336,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0569351084107086,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.31052183135636,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.219083768945598,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6848972271597833,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3392759410937982,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7152287774466326,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5598175292489465,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.119342393394067,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.448981161782397,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.629054608573534,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0584242223027,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6737845289774624,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2370602021520845,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1977625192087147,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.654805360814565,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4353869245170883,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3150627642694626,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4487869792572465,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1262304034812924,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9029956932863246,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0100934891300133,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7567118960737673,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4392349304374634,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2340252497390036,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2588910399741953,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6938856953385721,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4166644274205087,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8381324408696944,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.928480037885627,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.380817279728157,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.728104488032326,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5926259643997076,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8068348463648936,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7277501525793977,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2695034764436435,\"sequenceIndex\":510}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46149947926444923,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4659594823532823,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47530074501989983,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49639985619579735,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4922752378224734,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4766000344752878,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.498987367330516,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.516083860702059,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.508882531063219,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5239683431275213,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6643260301038669,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5900208144160212,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5011076675630493,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5605515972743128,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5506896791618354,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5488906668333844,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.541870319697298,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6223635174172961,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5959332578602772,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6967019030180789,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.648264934550384,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6661678138593382,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7484323965368391,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6374423890920422,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6923756857220646,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.52111634954169,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5823711389994106,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5620807975082941,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.578025227142135,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5680679883132159,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6668171660539345,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7987631969888587,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6274268548633014,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8788532210795931,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7132759001090798,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7356862903389717,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9832619222008698,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6115160116889482,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7016193187921123,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0533131870813117,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8520285228096371,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7898377485362478,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.857876462739306,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7113364118557332,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9912991459390886,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8551764331336311,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7594111801584363,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8094330989287257,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.706586140511292,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.729590883504583,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8262675813587942,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7118790549872378,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7849352040753034,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7824458906463245,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7894649833420584,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.580583702015045,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6817017232491498,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6613561519030436,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7953082288349702,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7195764731992128,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5865182956327576,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6985031810228517,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8349596829134658,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8222153751649728,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0473334261390963,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.857793151632225,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9054133487865534,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2398176688752884,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1174166051585004,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6026318044499503,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8468317828977605,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8945626599493882,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8845956350746506,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1751775120684196,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2163701855344222,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6152834444831263,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6771294564098844,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.854234772035474,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7954052491579688,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.120196197752501,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0876115479311899,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1862712203515768,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9569176121605903,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0129196242504916,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0965503502843068,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9133367566010938,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2759825096130857,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8020539986770282,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0018207518342042,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.65097779118904,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0200548956309925,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.09180477280912,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.884011475306212,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9709490919119212,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.620100300093402,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4751531623746514,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3404104445102945,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1461086826620284,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3508802050528954,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9743344226332966,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0285113583122545,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.106354158381496,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2063045961356373,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.044715786622563,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4024594256883167,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0139506086760703,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2381427645859084,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9736234542192238,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.654959055446408,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3477103553785372,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3686276111954827,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.674864924343923,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9557977069969222,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0917503104403636,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2326192828427933,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3820793444763784,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.465651979325381,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1248963832555754,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6043279290623136,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8218891706992454,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9567752894866791,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4349000791676074,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7854107789356383,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9785553658889756,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8018400800821314,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9003976944691638,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9640329001775989,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8617968411246071,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9847368006278617,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0550975255691815,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.503078870416265,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.100571556793972,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8020058441149978,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0718508257281956,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6385892903868065,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6371227187820672,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.854750775779705,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4217856298380602,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6434518359362225,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2405441566320627,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7834569724891325,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1340216456480228,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.011916368568114,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8081452609777902,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4357936046426365,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7067159133894356,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3475591595189316,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8400238592338134,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8722855250473998,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7810986932817021,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.398743355100165,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8168358674540204,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4063359575001892,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.059209231312482,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1364361055118954,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.525225846084745,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.238237837785735,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1011378395105142,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1286767577027081,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3455652917027545,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3682572915849516,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3902639977259823,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4748527701432574,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6605035859430908,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3616009804386198,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5129434751986652,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.991070679235778,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0593127637212834,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2803660258551393,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4784457019775843,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1765022222728898,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3196302941930893,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3917184044341961,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.49708722820678,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.38617036678186,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5498942901817678,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9515390329343966,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2892144772053504,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7256125880587048,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2676948495244216,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8914902931196396,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1464884474073167,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1045779805392284,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.431243541199696,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.802090956271655,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4817824012153045,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6883767544532615,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.477653437076561,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3512705065762167,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.676992395233858,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0179466304590106,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5218989280247226,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7950223139864478,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.551949218420904,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.150638956502148,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3885912405299905,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.371848731264089,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.379001278346474,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8375917651703295,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.001607896864202,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1914649471337242,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.224703537573463,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3578929479795967,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.507238308267798,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6110706613718073,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6311067987110413,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.344613192355254,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5123124171545066,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.394558594617732,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.258458534728357,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.16898002120294,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.127434936448886,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3714747483341845,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5806934342345946,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.851772540088463,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.887799506262721,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6641813985816918,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.540661769837613,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5544247400219597,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1684299663474516,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3531842939972936,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4228249850519865,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7800784134079426,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.926280755361144,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2678829774558686,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0032139171752235,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8434266528228211,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0420626458796507,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2879693522822557,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.48781015930085,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.129039088726586,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.116456426550789,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7216347064115296,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2952924613740455,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9789998402606384,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2716927144158783,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2814240683217952,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.112616024989032,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6757823969942738,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5901620204394413,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2700698421376972,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.228680851353041,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9593917830491158,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9209980479337376,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8758639362968248,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0526853104747262,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.659036571992088,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8750151021454244,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.76752459958321,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4336004271856495,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0940169963645419,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.033379401203524,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2285160979096745,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.260987730167664,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.837513532219282,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6872211597567133,\"sequenceIndex\":509}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48303930308021575,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48396339947514816,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5026563496959839,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4901365644317718,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4960032668380658,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5074859121731506,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.574957274661368,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.525942307774576,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6480473630782035,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5274941157582563,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5285827348533466,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.535559003470778,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5413099908866473,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5785653568725179,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5916288817951886,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5333134411508716,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5558858110416947,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6954680489519465,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6893977163981376,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7663368267767349,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5675051245756465,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7707319931663095,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5646856233755305,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6215978924925147,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5857427187420471,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5433503469555423,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5543995243197622,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6608499517730513,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6564034124920135,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6230950254154383,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7466044122803212,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5337609259880344,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7166852891892732,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6635667579801929,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.57937933583185,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9875579898970226,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8447735402372814,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6827579873385918,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9635696978440663,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0708668546954563,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8961267214756261,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6225465310762305,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6121451775389806,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4243059190319238,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7933001966932964,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.815299110853778,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6042008832139771,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9552592420594945,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6876176962504159,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7693376428004076,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8880346063608838,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.786064484459801,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.600459340632388,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5710082600806824,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.627701502227527,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9270867821841668,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8250321597281144,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.656695920643402,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1143703406898235,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7885438689653547,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7540115108696637,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.895772745835263,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9278077777498004,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5914148355165058,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9417566719427323,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2331009521793082,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.87112043388488,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4766157653527965,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6640174121928157,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0413952762337249,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1110046787652488,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3258694801344535,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2685411558991206,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6752991901484489,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9016782803083642,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.723566570657588,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1235269287234417,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9902896163909489,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9746696015665348,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1638251660310568,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6939479874586145,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9511360272072311,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2714845012366216,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9915503350603413,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3109349303235525,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7491077788834144,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6220013492916332,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.739526097412724,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6406785674867164,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9341266969039261,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8737947349354336,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9112547735728804,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0755502161071497,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8428815517913705,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7174165790169365,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0261328254097544,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1658355693077098,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7063863167015881,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8391191686147762,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3548221153290436,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3770752208969916,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6055821689825238,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9537638076062757,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4641449410634548,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9988921744681916,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1617712258929331,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8161786800619594,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.677732429762816,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8360891926999352,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.684793127550665,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9639794465987516,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1500789258994055,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8655281057791215,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8610031430071154,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3547001156459482,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1489715703323877,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9571003409454909,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1337216193524182,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6087831725987216,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2351396711915825,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.939385524398482,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1027898495301844,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0631389564500988,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.287395809679524,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3055948734690783,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.627943892408,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2856911282701047,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7823441604549101,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3947132720433035,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5761776991991223,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8091088227419323,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1278673153234995,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5715978836228954,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0280251414547728,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1918461896189374,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4317195898218373,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.732709643373611,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2689924317162338,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3167543471468943,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1345790286138357,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5549287923512696,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.116427077426103,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4422880663861997,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5434390908196027,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6233354127249378,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.305774720769098,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5175440019390838,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6703861928100476,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.280353259368148,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3522096992867334,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9840268227473988,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.794495235886857,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.107655165799792,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.712203687312368,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.222898297677219,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9174518615030642,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.947584121486783,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.249452923937453,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8585780568444124,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6689244950278377,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2659381300792583,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0966237950292355,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9375003365427776,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.19172763091545,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1157359882360645,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7739029470493537,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.398272489854038,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.677766016284396,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.532915976749696,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7761314562025374,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5280650897698647,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0360089859945694,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6549807508276322,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.442230155444525,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6956350264864763,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.391564815261322,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8258199774103665,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.215118872884743,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5903735387378966,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4315318617410675,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9970562090615174,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.991838446250195,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4917447164349151,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2401379443862637,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.678530944198201,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3541145634943614,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2884391079238684,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0452301727388558,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.357357984495991,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0265256786198425,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.825103778671869,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3325559359293853,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.030313999656466,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.238328432480371,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2417666733114288,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.258518185651491,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.034445620686547,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.272537760946548,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.145246934166006,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.359246961977198,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6020479961017298,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0508210208472404,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1184088301875805,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6888198888714814,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9178288961664576,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9629406559272211,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4121746565729412,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6684810383291222,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3049720879616467,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.068414152775658,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.143907064060903,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3936653346287817,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9254459444962189,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.252367659697231,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1713300285576547,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.530504530595043,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8224124378145492,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.01516780128912,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1623208840279429,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4481080783084597,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3215935357892412,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4001899054277263,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2430640380597275,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9320556647354696,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.131008393691928,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0017287288624357,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1403250078344165,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2758433901367512,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.168575643235679,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5166642375262778,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.495275276538174,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9019091080634818,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8048387450298524,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5454559109099517,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1757175386438241,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4823158449152056,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.110738741436794,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0505856729354432,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3266970156378246,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.389467737377894,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6124890790799773,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.453427344316238,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7917118634535198,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.742957722524656,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5811726534140598,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.932464429844905,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.088360897121808,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.988617644615235,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.887232578085686,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.240315834119491,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8772944793658242,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1454030530382875,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.984404924481527,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.281213867851595,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4927562347297587,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.464722950727941,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4476690086469726,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45026332004590003,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4494386366225582,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46192676568585217,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4782066678245175,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46372649381197195,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4728061244592632,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5031460697992636,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4667790894807297,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5738131220170594,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5896770972903458,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4825839264757147,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4846280371169179,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5051682582303615,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5027632030832622,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5109946204983183,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8367452294140876,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5400163985660391,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4923544285081229,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5811013424019122,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6349279918930775,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7090693989058693,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6439202637490327,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.497146895932269,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5093932761595497,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7338257611289283,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7958772345342358,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6032829594589935,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7939514571213566,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5898129848007567,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6719625544292368,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5279552628623602,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5295387008971533,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0156494443095272,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8553854555052031,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.360461567914433,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6449015638592762,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5432938224683019,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7095118344578343,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9201973753975807,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8429193216250845,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6560769582927922,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6586338484219141,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8269544514792545,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7677006821805319,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7323277825818054,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.782486571080883,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5068021106731421,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.107639923423728,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6178125562554457,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6165314510277936,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7879004312072716,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8747305548688218,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9764883960289552,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8291161190558181,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6859103381216802,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6592645563376651,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0025729727612256,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8383971360787419,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7851842669628364,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6699930694076253,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7665782631575636,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8837108725820881,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5983708113229347,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0210121558939118,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0177336001710082,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.146306200241214,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2538516636632109,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1302054627699096,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.255193593197089,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.002071499904941,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.012891635392044,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4678303900444916,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.024055541787221,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7757025073077688,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5589299176832306,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9317884833844118,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9519551509919846,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9339428410088514,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2326324720106125,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6837500001551347,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.064874593972222,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0110324026039672,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9932253741463506,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0010729878387523,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.653752989013694,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4647203308353156,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4005552061947044,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1295045319730062,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4907723768225536,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9190912621469529,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5760442692432304,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3304480414770188,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8097606190375244,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7825858996689381,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8661169209254119,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6079438336138268,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5724975584504128,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4317118626168253,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.81588545480451,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1291579568954644,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8463023191952451,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7510949484361329,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9122141232905525,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.862985620495765,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.08562858149762,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3042386828481811,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2962233955580897,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0195541808695938,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.209329774781226,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9847396044763976,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7119172665090585,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.75041421248401,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.130101605487931,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5104050563482843,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3054014959054194,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1237955325271836,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1999245902307862,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2095451634102727,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.922330936414615,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8658301012285063,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0443697776542147,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0969886231774308,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.360202763668346,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.803271451155882,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1487106889209147,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4897794751877504,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.621543050106999,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.926871746453948,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.373519365989522,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2211552250342144,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.142724104897086,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7400341811298319,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1226695572372845,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.026821816512353,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.669836575896388,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4864250008401618,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5868377546293553,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3851764832447784,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4723887480304048,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1392110420535553,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.852042818985824,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4096535311787417,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.124905845145985,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3164603237363846,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5485209593772702,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6549558655834598,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3626739071253726,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3435790539778996,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.691771082214417,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0680282929412541,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7296977162652714,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2479518009076598,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.316268296326581,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.20389419968843,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4806836777996766,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.031600483006702,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1499165233271365,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.142300803942197,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6878789976169173,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6308443009231326,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.597129651332262,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1647978488490387,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3554757502299015,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8308305018732614,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1905265166136956,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.007387525474351,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2461766923643456,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.309711627755183,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3886955919398667,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.078478875453992,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3578024848120105,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.113475191922418,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2144956430695895,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.704098804322117,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5020857806406096,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8854734553591093,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9130933150262894,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2816523975770853,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2508448580307863,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.140049635550771,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2270303929683644,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8122272095521508,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.224905144553081,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7097244247234507,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.043737812161776,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.28171355601412,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1356350604930676,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8119469066527363,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.4159619067231475,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5903098785856953,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.062444157095284,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.32888220405799,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.826197352070088,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9182994627835025,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.154391353099811,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.702953720227949,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.034796716112329,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4893793840595562,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.26532521956944,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.173040403976327,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.757557314085403,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2534784394533345,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.301840788680852,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.561575219883399,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1190639424149142,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2728299172440694,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3387160717665196,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.570472854215694,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.63633256539643,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.565937096109349,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2724859714853751,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.818346970361953,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.938635671890566,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5140625179981966,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7406154032276584,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6948339229756424,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.11284311953107,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6100453108978563,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4144820923811972,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1690629700480204,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7660443356413142,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.60987068537738,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2257048825473988,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7867499832712327,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6375255501585297,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.526410347984732,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0355247771695266,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4998345978955117,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0431977736820612,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.98803236277479,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4897271959340934,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3768652587165913,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.251799731688658,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.143464339462398,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.489443243381304,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.11382441523289,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3814766242564331,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2337438918832015,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9241205716190493,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.878021707647061,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.312065559003574,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3180716888289328,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.338227720931037,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.011289478382119,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1850979123370724,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1244684074652103,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.6129708869473784,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1084308114043604,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.247269646492984,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.235622110460375,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1023409415967467,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5506617490235124,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9927841781516686,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1745324276509828,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1812005674883357,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4464027088808663,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.447337343354482,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45106661810814463,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4547182916836945,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4717049061879133,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45384168969950506,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45312360337680385,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4734235393567861,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4604257775912886,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5069197571213607,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4893607720926554,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5395661842445478,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5577606528508725,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4632281831018059,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6059920111772198,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5623789781509387,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8037543188408989,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.469379306057217,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7812675473521326,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6056322462532264,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5308446824050217,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5823195085035173,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6454793037219289,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5554230326827028,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9030643899374788,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6894087721726755,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5740330805867268,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5859629584348757,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5712517234424548,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7156435980182217,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6958646091915615,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5859734615222338,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6349325598700184,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8837782752247747,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.02496521185145,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.542221208229966,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48830731448948617,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9142020785067969,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.824821456113282,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8836111081463681,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9455314521537332,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9793848605294939,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5647220322472613,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.690537353614188,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.613972508212671,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9953749188771093,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1922094763006126,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8845542924885242,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5785725810148659,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9551401923333571,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9689802888937409,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7058082100582024,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7063384947070575,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7568306170332701,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.593063351556748,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9320025667076965,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6487422734385959,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9556551854491435,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6747976199240344,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0051709962842166,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8306113123475595,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7024759658715072,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7770671967400182,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6019480052931029,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1890846203632062,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2382506498211738,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7957908844296395,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5013241634733194,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3574258562357486,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1703296307246804,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9734901077570717,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6882092709842323,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5492008445382315,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8092259526369896,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.649605696558517,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6973083956360606,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2410839518412113,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9918268708738216,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.138077338947045,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6746910164740927,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6176151874345157,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1542547291213021,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0118488781514736,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8152212706234998,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2608647248587355,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8428810293657371,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8309908349528439,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.954055371233517,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9205032628819328,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2729296703651838,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6676267794534252,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2368221944581146,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2824080295367721,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3504504409867621,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3060854536359887,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1174956622504166,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1007186872358505,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4413622478215111,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9302739219931118,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4409998425465418,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2170823560700121,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0694111376227067,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6650125595793461,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9112829269112159,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4228284363441213,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7545622371411612,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1346509357529753,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8000474275389611,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0704131911248844,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.079048493758569,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8411059302637702,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0754764168849698,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.154021144156869,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.892447572123235,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2807726863533306,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9591226488428947,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.173377366250924,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.413417051125168,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7438581301550593,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8077351941901432,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.38159129665716,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0511734288071795,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8538573035756423,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9113912396067033,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7659827695993987,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7855375148371864,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7832315293652733,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6060210386611683,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6650885845689085,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9671750528106762,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1346721521588226,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7686070216719694,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8904551218523546,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9225906019112043,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.782297060942022,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7929017172137125,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.089608704363972,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7856468952500917,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.306459592964179,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.277244061948335,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2661511078529413,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6163508863027873,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2448134678173854,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7134419456500436,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.648482067538227,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.381870737667176,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1904334120924285,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9791716642012973,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0896351643980977,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6271035888527163,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6974598828926628,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4219915326948223,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2928266425982877,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4184313490683174,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9765473818410317,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.759695944435379,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4102245786839915,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5131020196598526,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5534597481064165,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.657566482978812,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3448667967351864,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.545014404095832,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8912594441431263,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4369886005027248,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.038589207487571,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4654052184938764,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.161002903330988,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9321998489719567,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.4755053011205055,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9809938322824414,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3445314144742024,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1100411298062831,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8744688875421487,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8608807988801758,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2081103371763082,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2560282066142276,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3423877399661766,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3453492773394737,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.087517360851063,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9164248843726694,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8155610076736883,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5030951404928095,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.119522060964183,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8351662017997277,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2412749609684568,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6404575656602305,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8654308061674425,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5026145913687983,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.135633032692487,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6705099870977262,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4556270301843872,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2302263830819418,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9840740350299635,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2307169702725527,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2423593526333938,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.185695432762646,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.580291182404378,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7545057421650077,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.406497280536577,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.026531223540764,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.674772413060875,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.157921030273102,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4589414614259857,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.108461685918367,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.259819831334318,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2477525062456,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1381481487224696,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.090565814152004,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.162702848304662,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0625545861133916,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.530989719288362,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5182252223168566,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.067544280645824,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9610189994893252,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3581851573365806,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4681572697978718,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.24564387035553,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3604231850381727,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1984555447666563,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4276179563582352,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4801751030501518,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3603163696126628,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8569411673574723,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2474798448058397,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.529663694240356,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8907927980943557,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2405877571046302,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.145688524248079,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2013810114512338,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.823240732067974,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.356680241757205,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.256220136015881,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.490493882252987,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.351640466307884,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.707079802555753,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.371347543993177,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.310741064893119,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8300076927385318,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9668747920843448,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.129128252833952,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4175119897756927,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6133734625191454,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4395654711156671,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6925206906696462,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4465982869262692,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.210328240636655,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.09720955969797,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9772971459123225,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.197296103001011,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1218913312753471,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.037602562553182,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.434591715104045,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.488978484563866,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8524708371262606,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6198538172532597,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.645775962149425,\"sequenceIndex\":509}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.546514687455079,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5493953868808967,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.594740298179645,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5755864625455219,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5551490213157744,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6247831341717176,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.602548456664254,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5888682257986289,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6006216651860541,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6212225856230348,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5577107661138884,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6874523258216996,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6515123605939124,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6302178818481434,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6256481196282556,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6109760181372657,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6064243057536036,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.61116374415964,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.853218560063389,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7171450034414271,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6549019415674112,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7058575745185988,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5690885324228165,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7101926471878427,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7425208847203907,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7644127471506131,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6666871463432955,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0523754765865179,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7784214115771092,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6708835792300155,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6659975969588943,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.667363370333931,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7744382184009259,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7388512863572764,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7501698932673961,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6850120283420686,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7896105691533823,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9619530018078013,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8652055849459257,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7181616836260112,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0558601059910844,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8655124707500794,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7942094173533268,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1117076228144251,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7749782045580909,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7585673409650232,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6718854221347226,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7645603680359028,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0711277554057612,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3751086584822245,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9574437972498357,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8163762141912689,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1259883079203419,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0484523337728318,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7080348753604414,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.616729092000196,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.268946292725009,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2316386362759324,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7901116398052825,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7195352815958922,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7754731490747407,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7687996724750551,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7166257190400679,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6688542966957889,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.296732367816131,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9908954661789855,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6529032293678754,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0992105310972446,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1805254969294006,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8700950954339267,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8132157950396584,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2070088955400946,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.298195527224425,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0405158332785072,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1841538655359918,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0236036157205712,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0946161746471206,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7674524331726587,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9308099705866033,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1897321289425,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9709880940838252,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6211422509777917,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0759813465525818,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9048873864169368,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8729537814791762,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.741900001964695,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6616683311884552,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3195231501988407,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1919184727537253,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1133090918402184,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1638009891821002,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9430004743599414,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5357303347963494,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7318981741013121,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9070808771662214,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3098950074137108,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8134048830326748,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0820441877659657,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.109292777909107,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.601295992783001,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5750238449663463,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2799125208023305,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.376297436305193,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6698659152871704,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1899410365596936,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.182162676386752,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4809878377898238,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6514936120876764,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3883975797248966,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.950375325288186,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8163928713501847,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6171628660572512,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1935441037317327,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9401876892246928,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3334209555728371,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2688884826350795,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5179652407622246,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.887055575074267,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9161114654959076,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.117713529948815,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9045332868516583,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8496226464808059,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3254444978364317,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8036242725018521,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.027838413350464,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0533163239936807,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.258649441242071,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6738153635402229,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3662014647838454,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3015088308960794,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6055360783794448,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1200182472885407,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1315024723714007,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.310900605078839,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.922266742089911,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.290392132148596,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3026409578717815,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7332542316669404,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.623308866819355,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.692463144941148,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8177755294923728,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.284819477349927,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9976563546278912,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2404282941249174,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2166376533968482,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2543521705355785,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.108734857978181,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1270742933250204,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.109696126498351,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3809161069567337,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.502396448776844,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2967939615748265,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0901609766204663,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4891669222199457,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3098178094968707,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.100311077877881,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5845105163752535,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2466737993554853,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3816785615885396,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.584979692101912,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1041665704394794,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9908872272462662,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.58332946447074,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1637947984856245,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6351094879517971,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7117996688464163,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2952414303002233,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1786802391136257,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.092965912660855,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.182124047075843,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2916314488957847,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8767260158941825,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.350849697635394,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.75803134530283,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4615233540588037,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.838347374601918,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.433286567076634,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.642045048238077,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.737910993718098,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.383213625617188,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.007966552058207,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.590656079957613,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.283077332080044,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7864275795786797,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6642012677308655,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6354980900903233,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7435624363720126,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.947840482303043,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.411521869857225,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5308390163216696,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.346589402772135,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.908980770490697,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2319908980595917,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6726310263210356,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.499445084010276,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1106026655673795,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5716053946625443,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.081071244285161,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8354104881241216,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0217259938940657,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.929248707330805,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7235624603424942,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.918603690464333,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0324124390493896,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6302240013571274,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4661351060533239,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4479189058620032,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.025128231761602,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9847377225488345,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3807825660445874,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5058488207844578,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.154756239799756,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2735333727173384,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4955945184401327,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9965003804264532,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7464628777424005,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1322044125338886,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4311285718519062,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0964872767320966,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.966949108795893,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9215143091137883,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.205566827319391,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6633849187769538,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0884812446079972,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9192189214744295,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3060819462318887,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.240315936720175,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4066754616069064,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.203283372069934,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.406030050842845,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.174936532394174,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.699969679622575,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.157446150753026,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-11.054834533413633,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5947003443472,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.667075791488838,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3525394708280445,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8193224303775875,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3509590811193948,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.560083583746113,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1294293828993873,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1827149370862389,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0228984783028274,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2523186822127714,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.21510027519209,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.804702272657793,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5952725686009739,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0161786934540706,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.384447358014826,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.529303645492517,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.028637455687704,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1071895842852115,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.049382516758875,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5956121352562196,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2801968996017363,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7584554061749608,\"sequenceIndex\":130}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4245402545142029,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.42761761021691325,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4373634448673042,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4495743196763768,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45590571179003936,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46616419539728665,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4464926283336448,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45209322574352695,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4669361887101153,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5241327815696623,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5803080210452414,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4916301355822429,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4691963267990482,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.523205116750115,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5273708652853656,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5530740905459745,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5665267850092313,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5009817150920671,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4960466704440313,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5252321157224729,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7515175492242769,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.749144996203222,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6968148301726946,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5165033035196044,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5328128023251664,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7297163116448433,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5508389154629085,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5559666558299698,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5682459603459526,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6004991012685099,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6405615880969655,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5585147789498048,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.887901125782575,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7678877798696764,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6554543618797759,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6104269142781987,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7525767362138371,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5859687745690954,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8841019545147893,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6218492583101923,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5649462446073674,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2798713245504985,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9762414169305106,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7549030034280073,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7653679126249846,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7058615329157159,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9680374700381943,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.553907783220941,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5378256466231421,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2075444609303234,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6925221401085385,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7409216096262119,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9849763381526706,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6908664660508264,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5523863829048833,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6345579272558313,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8210636635303337,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2131104451597734,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6534772490840998,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7288201347456673,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7097105325877606,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6732033389831533,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6841024925377285,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6178847375456501,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7231501684970841,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4505749950358822,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9136828416241283,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3937300291514323,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0929593545360703,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.518009311589723,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9592686336597569,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4251299717549892,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7673674219976777,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1084933157912291,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1151074071683722,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.950596740718165,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.869405092578305,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3648147883117625,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4027971412979447,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1745345741465176,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8759232711189773,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1000210397778052,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7688382852566829,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6893181658499632,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2691529230877903,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1789109350428453,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4653150799614425,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7675068824620144,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3916286249459546,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9329407585755383,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8476904958742433,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4468511555025936,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8930340793271484,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.594486278620267,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5129083378238206,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1388447912006976,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7234488711146039,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1064178397635143,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3925347558438232,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6726315553591438,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2198873305571152,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9898812769610961,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1674500080055044,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2217909202767732,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8688962592884122,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0327755019879865,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0436243476783524,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2377213866287595,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6819748283088574,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1524667028325366,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7451777929085425,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.18138505169585,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.772280916820836,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0937704400816148,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7766185077091057,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.423824792974761,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8796212039840703,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8603942100355908,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2657802917793508,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6471529892104295,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1493895054746461,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.972252084138383,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8657424268555746,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9653830896800304,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7945459219452196,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2585912941044235,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4741677473945412,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6700797979107271,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2221537608135247,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8131488857713143,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.977552758637854,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.784208655497571,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6335388010764844,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9357948604608253,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9857706951051306,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.670839835607138,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7384952607202782,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6643136513233932,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.555243573977445,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8304865578862697,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.928351217548292,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5351167313673615,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.392409775236867,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.320135827102347,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.62527592503415,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8492441599458225,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3191201896086555,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7780319647350975,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2233807680029065,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.6212821405911875,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9073880224135007,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.592672369649087,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.565087441575272,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9595017385165223,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.154994893606544,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.167128341342926,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.717771163048333,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1701315521495332,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.625151402757812,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7756222546750946,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2715480688895937,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2758130801013707,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8970269604295482,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3404625577802394,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.862038022022778,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1416428862247154,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8166928428382328,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0403361111363636,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7130367109243325,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.038276951750943,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.653986072998711,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.322526511185925,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.858946130891659,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9193708012368755,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5460987557844073,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.986296030058569,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4435392319329683,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.845224295180476,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6060932289012817,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5459099189089582,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7100116264869616,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5441143156772497,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1378815001062783,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5599389228367686,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.802016150342857,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.731336483363755,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.084694795731381,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.87369013798577,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5073297785004303,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.559894882224792,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.857657828238342,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.257423883698831,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.293954756547118,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0446876575951727,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.099499424793411,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6523469228000347,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7698608671132714,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.4300133037142775,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9517640895835486,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.238107464918438,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.758578189839916,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2657213520641788,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3436358061770513,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.32501672374607,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.646893054225647,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.338877130674424,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9602062481579434,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7211662993172872,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.504755380002727,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.019983291292974,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1467700387788384,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3662193100790203,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2671596492430504,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.27340578839354,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5995009860196907,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.0250916892773185,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.70868804619341,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.956039653769438,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.750399218756337,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.317776728355683,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3751170792647693,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7650460661034912,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1140648909477573,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.81413009270984,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.942979785444234,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.042700970984803,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9214709758734123,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.433588388350291,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5015748582906563,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.982097844450168,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.163547091320041,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.696328845753428,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.242785588367965,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.091416554200782,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9063771126226428,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.630632190286712,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.171134638793058,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3970228203707737,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3017518081950996,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3000942920911585,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.032284816991065,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2926280296670098,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.309444958194634,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.789602446670803,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.666994533826686,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9710011757769252,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1972024644502917,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9512788279573905,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0529127166255887,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.864089218835444,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7940982966929373,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2822834361810114,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.519858515885342,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.596099928314291,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0281874145810583,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0846811766574522,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.42577157128223564,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43098314785390923,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4549389503032981,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4459601670612897,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44403351653395323,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48733934608621066,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5423646722484142,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4499620416675082,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4466321523087702,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45547253044592517,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44413372032678755,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.531296368194988,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5699007003516177,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5507515223528424,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6000968814686976,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4633774987109342,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4503611164637571,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6364943623430289,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.589155013016897,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6047528256340658,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4628017436149688,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46248434500014546,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.596390685063324,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5924213556441269,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6440469807203737,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5871090377082834,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.631409107157898,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5692539988827584,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6487595081670657,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6485298998338335,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7231674227419639,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5080289939319849,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6464945735356906,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4699727935934105,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5154403132791351,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8083941910529774,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6493948841746351,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7756615490662808,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6052094088124635,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6433180390926775,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6359818976340655,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48174969334204853,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1768058882756078,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.969194663850387,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8975059845552877,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.763038594597804,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6407429914442662,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8500012241484614,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6132492323426534,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8264782059608461,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7027890944245433,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5886533873585007,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6662293925433723,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0368861072552191,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.124673338918852,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6472796776139346,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6119362059092694,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6821665751495988,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.016645531901621,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6635265316109871,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9358871782398848,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7871303976593977,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8409709016076512,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7477282822280064,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9777132335860164,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3347930910309416,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1257669388215938,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9866502773551973,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7124516552740541,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.976701962448851,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5422575828320445,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8270964357250807,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.51334946474567,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8314475183483154,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6822608187364414,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8542844695788023,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7793258911105261,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6451395599636623,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7585732810081675,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.385950425661054,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.194040289038836,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8613928970163192,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9911097874115143,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1182540883424485,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5871155772733536,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9660240649075291,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1819057178454313,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.520902556387541,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9992468686747257,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4711047081860302,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.209128389937666,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0582163118947967,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9545793753322744,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7708691759557037,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9971357841852914,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2494400617001664,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9992117825468564,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.961929038163236,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9982400582225045,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5723859200470434,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.454288636198124,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.402818674803514,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.190786790109033,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.960146572910703,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7402869350925158,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7098810990398742,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9159099932854018,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4602192385109953,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1515022451853996,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6521909581100376,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.286994332561126,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4806114963353956,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8594349982715559,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8065530524561866,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7259631145388034,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9051347841974696,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5614718373500256,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0344494810106935,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3943813388566169,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.62413958987453,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7207083876105156,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.02100554162771,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.235847384567449,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.263628781324693,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2563996418728627,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3324853611413736,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4212096372833167,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8301069129201664,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9925416455668638,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.191437998698116,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.05342874168283,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8091870732673476,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.32199109101835,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6202395157126288,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8883329722194666,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0999961935539864,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1482671199341041,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8384452981332016,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8786318079321955,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1889418408619292,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7016394982628293,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2999359868996447,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2585111739340107,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.870039699252982,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8580554124134714,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.099397878403296,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.697393578604065,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9810440416772627,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.240177346841719,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9970815438635987,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1572082272247584,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3137881092039077,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1161979765385062,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9140105880697458,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8624108341178605,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8906415108766663,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8819881715942999,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.397086719419744,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0700765722282741,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.346947777795787,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5361853152754252,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2289129230122025,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3061566852610147,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.9695978489296895,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.188280478455236,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.38681041790401,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7461879997223926,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8510528340781385,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2245500000294915,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.440966152217052,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4560881963821783,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.14669780158999,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1074835692123126,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4283932938511072,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3362284733754446,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.831085775126966,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.075430906357838,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7290266751770298,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6573581385435436,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.045149078414731,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.576356872129711,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.344669530083995,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.496271950652654,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.664197548839216,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.922795777705017,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.714744285543127,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1903426126500902,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0890895212982534,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.937311353150331,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9382892093110855,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1568837632641753,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.044601739057143,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.930569158236135,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1970353805082612,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.809445433797473,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.469686231392909,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0224763089709756,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2984603398110295,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4218184140685362,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.741377500710625,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7941382027159385,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.629807623763654,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.439311850102927,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0435899715528874,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6671180554219927,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.857691865884579,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.582329102589408,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4133937000282926,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3466766436824795,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9605580739294703,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6964482449937461,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4488193924972266,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8245002247740936,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3755173238634033,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2242270766685968,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.686841350570513,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5162705531647607,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1694503952764876,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.877524815967249,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.485829885957578,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4502072840161757,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.427133849658036,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5762639401869625,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1667770307463403,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9165244764465832,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.071999444143728,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9880547330233453,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.298828350832225,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9862911667749998,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.002904827649652,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3980754245368487,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.751822511335502,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7938813317048368,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0579482136885257,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.652110583578865,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1800136309782934,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0434168412348401,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1118228935325507,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1365356875585606,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.356186776257128,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.052797906123566,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7232116196967049,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4427481821178767,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0829010349699466,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7654944358077582,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.792372084934134,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2977326942973353,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0972354595704443,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3103992772203066,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.366778179819457,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.918464168891595,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.451136752261278,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7580742523050026,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6182259786017603,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8259912979360844,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8215019854963914,\"sequenceIndex\":505}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5230277624172739,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.532819274920722,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5236619573318523,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5552797164243609,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5360622551108103,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5368494745575265,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5416998574087696,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5916532135422167,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5720249185252707,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.57764367467834,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6122916306595552,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5936276514761349,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5652005249423813,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6302658286263276,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6250026397366142,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6486157072338647,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7366604913805621,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7306652497372076,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6144577393282542,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5922468049685518,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8195574059301386,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7482404469328932,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.676872758708282,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8160809579822433,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7228117460127872,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5721888206165145,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6639459644443285,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8733764741062298,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.63551726045542,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.651871199204806,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6591515221165627,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7340042325547169,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.661376669220937,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8844969294034866,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9752107929081006,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0463644148550884,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9378695757296456,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9438712648188142,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.768307539098603,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7156816241625581,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6150822328044925,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8360312842905221,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9129252648441304,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3362804959479064,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8237708213829863,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8192702062066931,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.720960594301267,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6401518637325812,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8882498092725748,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7666995718399776,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8821014136066245,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1119854823773179,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6257571041815826,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0083743949084703,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0100021284681542,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8965158450224696,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9276138276476438,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7127699462279998,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7108307788005315,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9642217769485625,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7524208320615376,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9340315944036639,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8568906814995019,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8960629145692631,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5376098833283576,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.372310975590303,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0028967933626265,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.868156435862296,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8874267969752457,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6213603378875576,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8632641662729914,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.959458988256981,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9484483355190763,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1538666706565,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9887613261533049,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3860823574920702,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.276541953002777,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2963423505445468,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0727441916799334,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8469539109934414,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8833201368066492,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8759671177300477,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3994684163396922,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1473235931720998,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5795028841826073,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9161815416919143,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2674652748066055,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.097567525119564,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4618366354068209,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.326695031381563,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9944224241610572,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.200533873023884,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9720676426411408,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2109876419210357,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8194600655718111,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7516369956472801,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6597077875206028,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9514822544235234,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0707077694778684,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.532611528749292,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8489037951364916,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0230021614975204,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9999831624725194,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5643012437777744,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.571709771403457,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8709516909686861,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6319839256673145,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0149466673822198,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.086065177146683,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2842985911277478,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0285540990327533,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9807876725510254,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.191217531716323,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.564775313363598,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3417542216604805,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7311606370838393,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8818207583774407,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.91891097941388,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9425544061268041,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2477873141140017,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8137895694924129,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4019468866594296,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.066599955645598,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0920361480509755,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8917208858651162,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.781512536429709,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3465909317708407,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1340510430834139,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.902419662256089,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.537062971558999,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.323312921755332,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8603164981532965,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.203518582809704,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8023242380879612,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1522064253926085,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2087510826434715,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8673446333163906,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.298648773204768,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5386812428343384,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.54381548056268,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1574379762180556,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.020998384625923,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.248911100984731,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.324800111336864,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5169484376878497,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.200969159928418,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2543164209703384,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1229878195864598,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.052257225307414,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.748149213889352,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3606133562601403,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.68323934369887,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8053923813061532,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4602509555162944,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7406712467606753,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3953762738192794,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9663862151148677,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.551141965953222,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5280744891413067,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.655146918823977,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2400551467439191,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1165709965087025,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8092035435460805,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9564659668966701,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4902296006702263,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4078224478445072,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9005599586384696,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.46005492754107,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.995187535397495,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.598910273337953,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.027831187141695,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8238077419505254,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3069365027706186,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4078315993793495,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3334796781398686,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3530751213576018,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.851623944958543,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5539694823366956,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.560959328204277,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6942811839719623,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.66509768678631,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.119679947779673,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0296009739251488,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.691948050368068,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.216645712409021,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7693809808673988,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.742881435673525,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3203849347658396,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5993704546088825,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5225032811977077,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3903387784708992,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4893976863644105,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7856263126804341,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.552381219064778,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.711380524161448,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4023189645272875,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.77850491253371,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6843328725897584,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.734037600270434,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.074356918511944,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.047417344459207,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6337592085719777,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9937013111032216,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6321146421444026,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0506715605716188,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6687446310614598,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.04223388966829,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.422892570294506,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6835139340197853,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8910094086984572,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.750901313171083,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2071022774683924,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7638734911676166,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3993805078329857,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.652042676201294,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2820699490970742,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3591944874653197,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7046419320081427,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.218728862150945,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.548431717183773,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4829199611255293,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9887096392737984,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1866621633032945,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3408192170746536,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4591706530240252,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2803885587977757,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7665084359502405,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5939332523486656,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.560267083530937,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.796778649333592,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.528468506770993,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.104678557754199,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7490634911116831,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0995487230563044,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2269945167852476,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9607885147997424,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2737741086559313,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5357958122650572,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5657168897235714,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.08687587162153,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8449740834981572,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8794845379240916,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8576247318324046,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.668555686806261,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7792962867130826,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.943445887646523,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8711722427128021,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3410183858459463,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.484210013616401,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9827721479926372,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.406377952613741,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7719014994607907,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7594121891476306,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6231079449609815,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6915637675990416,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.574249656009918,\"sequenceIndex\":510}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5068634977673558,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5249783630202474,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5152033415481417,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5272038459500563,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6001688204671001,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5317454177943866,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5390384509225771,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.534359297387376,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5351669704209369,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6109078720195577,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6210817266876889,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5444919182650165,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.599228915024357,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.606863437074423,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5808824402038705,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5949150318621946,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9629004213386375,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7208076163201385,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5678830695710431,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6631359909504524,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7158746846699892,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7243292976884375,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7729073409588213,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5636010557233948,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8387837308398401,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6512348778369313,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.684627632318572,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6953237780389272,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7118724195755398,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5903034036969197,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6113515610872363,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.603188023556497,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9213213082902447,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9758009669603028,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9673224288424005,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7929881752136811,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7430616646035791,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7320636908692261,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7167876274042788,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.831679290374411,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.931694499466751,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7869087098149725,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.826794305397445,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8086820947558429,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0417905164225576,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7952806537866556,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1660403948758342,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9424131969833678,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5917321918057208,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1181986144461304,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9368006145748159,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7272298625296629,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.762041448813835,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.080009109089037,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0469784273061555,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.736050163671123,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.799578782749118,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7564820813824161,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8932265281523211,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0735529052114023,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6999419072803632,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6303623851585866,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6464251320293698,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6877266767492101,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5467790226294065,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0935560167120182,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6492764036944796,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3791436283402454,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0920252037208684,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.661940374523941,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5183131667863947,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1161984811371752,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.45247176258811,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7868623450095806,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3595821360940836,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9802589399761998,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1499061681740828,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9420406268798117,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1939161498043156,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8668778395821551,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8925041246817846,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1070739378706018,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1360054632705092,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8719813314075873,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.065446997579111,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1658307711519125,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8845996574654575,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8214377196693483,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8090313724589803,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2492535156345985,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6247764541087204,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3600475285860365,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8736533511997896,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2949800939916982,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7667990029016316,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0995074953257358,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2811402021868414,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0235171982411095,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6093770868745867,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1413944452206355,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3233591379706129,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4599794974576075,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5316529781022508,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8827993962076878,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8308324679612485,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.467692594751057,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9200020760595394,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.647506663909057,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8075614881530035,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5131160869491957,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4089978871445428,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9449091111602756,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1334353980757137,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8256067895973387,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8786560936940074,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9961241120207079,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9452127004259228,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2996737464560064,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4003561074642408,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1019533136771018,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.416605465895425,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7450128027706133,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9980083895853699,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9354495143212924,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7586229408357751,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7353476194767626,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6967148573966611,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7149171608478486,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5125771168503443,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2995211427525675,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7740868801051266,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2793752134417544,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2044963438141463,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0237139276826106,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7154481183718822,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.560297341738511,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4158007750899735,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2850585225965168,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6074420219276813,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.304875418446263,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1780940633940826,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6089872734219197,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8031365453627313,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2257289568545864,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2395529448116283,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.784461622215491,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9258114961437602,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1402296913674554,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.194402685731637,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9792108705963631,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.766524771528255,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3968816619463436,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.982605933438259,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4111070693596193,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2095700560725415,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0007970991430177,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9947912602294147,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4529889408659495,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4506468281866605,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6745000607028,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.150597038081514,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8783718252422763,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6162118272342403,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1267200099219585,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4839011131713375,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0247669691982733,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.550482509748206,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0290636211743187,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8817187932198841,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.351707998193851,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1792288325556264,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.012223082314785,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.503673479363684,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.15548625419936,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.842322705495397,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.236849227266275,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.781062342088189,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1057428341103894,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7237409860098674,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5302325516136672,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.830247400735522,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.490340648836831,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0514534803555238,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4932692150174163,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0768676422294616,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5104058216319611,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1940326501900904,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8542130790999516,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.495789533135124,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.383150363050087,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1713927893203686,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.867559338255734,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7397301533926686,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3142448278297187,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.432996839923339,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3997311369339918,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.104659590081106,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8722929196657654,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.717091535693213,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.360385931338322,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.438671897810818,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.455401043577622,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5253923538173806,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4811473328237312,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4677069371382212,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.637172266419253,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.43178556723566,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0771075275699025,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5940052227241313,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1197886778973976,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8520109105475945,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5347273534619092,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8876436003206685,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6975299978884553,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6825344904822765,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.043573659797206,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.826890268714669,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.410531619988267,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.654062118987474,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8535391789851574,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9511817073983242,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.850860607470206,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7827801386103657,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.686668363749988,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7230194422815703,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0742831458281654,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.157175003670858,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.448429568864497,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.136541132649379,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1073367968175165,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6111859753168691,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2834408128830135,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4817289533555658,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.924467527000446,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0644200914528077,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.020130152126836,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7883577706154177,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9627865548621406,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5548993483152354,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.291267483541281,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.427198639853804,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.067876338871545,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.424257650289127,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6254305232805284,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0133086927678534,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.82423643511386,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1484227970691605,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.73241317345649,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.031318922581167,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3680085121233,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.278842370183535,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6157064996553374,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1375343263361457,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9840249252632839,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.409231079827465,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9627971910820161,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.375956989818724,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.38204828101391214,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41192701421709216,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3942871148945088,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.39147876595221603,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4402745538565418,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45411254213006874,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41678655247565355,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4021644228679754,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48119589432205834,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48390381909790514,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45404100402587577,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4490895126540375,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.525609681752838,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5728488739246892,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46330494042440806,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4667628180183102,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5483970015014177,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7095560131581206,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49784332738068304,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5657421992941672,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5260414822809162,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5085728959156779,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5017689530131889,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6601617888722145,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46677603200834294,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.453278007937721,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5370519046158729,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6840752684307437,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6427880969531861,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5906371128769787,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4989873171108715,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6881089232150426,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5651145545808014,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8507116359286856,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7948654362362931,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6784349510879903,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8123476014937865,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.003551158590757,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6444665808953434,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6594322360203233,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0054348855447786,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5733335915067519,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8186550645145098,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9157418174833836,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8494490099167883,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8786917140959195,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8070594618047868,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6836230182404551,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8691471630785634,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8137352495926454,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5598155192199581,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.571507483140447,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5537600269322952,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5492620709701801,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7185441821268926,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6456558359515389,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7217561602042379,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7545822949586986,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1114399373228732,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6436171367950774,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6345022223642907,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7687019228336709,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5015480039964209,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7030892254888077,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9750960659052128,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0683354904162197,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4750251602353757,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6490832665919165,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.838455293178337,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3693198228955978,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1683775072865736,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7218878694609818,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2394769857208405,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8415051914068234,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0834072548403273,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8704219592429432,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2086311478675618,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0350126017939483,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7550993933424044,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1263172263808299,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7087295778597178,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9756384908486928,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0107155459215642,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4753159166406349,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8631856838904586,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7597559553328752,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.420698618647671,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0086657591593178,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.194257880685538,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9113106206813169,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.284269791820163,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8568873687505111,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.382814526570804,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9082966137778781,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1560227012413806,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1363586240183225,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1983884389684123,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.08536130762281,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.261624008305191,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.911653029313237,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8335155728020982,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.346578466402973,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5659166313515263,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5689995093103046,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7385515765262644,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1221562258912001,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0003533417648407,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7226761586996098,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1225358822772882,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5735747692779295,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1511499699210563,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2956155486633052,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8090880951970538,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8184462524535315,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0139099778337535,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2359749756453051,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0644296675389653,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0411100017201784,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.065465444977423,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4170231268850637,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7387392538513646,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.585139479227392,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6803607081932095,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6483919642666371,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9764282660497399,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0765291177634861,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5404894944497549,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1064254143501797,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4330769824926914,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.070749210802572,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.421803814275727,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.404492093093241,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1602863510478103,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7274865265078225,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6407743926181113,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.591003957447064,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6714617020140887,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5906644741331257,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.533150015611929,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.362832925178965,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8875543410694353,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.152249107983622,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8983473600765,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2448133777968604,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.829027877755778,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.101902597093417,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5157093945323743,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3745952162890234,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9160103496807578,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0303342095712593,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9821601469049517,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3818555274444613,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.488254721694493,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1196607692057663,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.319178308527544,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3087295017767544,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.390280033737896,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.802464614187685,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.842716505224216,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.569620969166265,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.613151663924232,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9252073068258242,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8532855756843749,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4167911917955793,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3840962658044806,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.443844852340601,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8419685921423123,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5700003772875708,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.097246388570896,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7358589097183286,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5821208011703827,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2014889237355488,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2246287887198126,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.982973093273886,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.765982396931733,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6384753436877006,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.44422348204136,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2697512665585806,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5875954576876146,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.785323133108521,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.517644302160094,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.581578687424253,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5772553247379437,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0166988368706273,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9364040345818427,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8977727240681883,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6285560258208824,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4814819232333587,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4608724904998027,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.122342001756343,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.122278788118774,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5751104696998053,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.0223025122186105,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4214176761526236,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5535797886639067,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7855258214240275,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5273845448819074,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2846979937567038,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.831322053736005,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.434039995972805,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.96336878921894,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8181250797670128,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9413216149801944,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6082729835921652,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5129188794790744,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.94463844781503,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3271451596651744,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6945574916142226,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1629020495730615,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8641137896818389,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.309734500108227,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.305487361384865,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3012104644586904,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.468172552410225,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.398834718710973,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1382332252909364,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4858723587182006,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.057561998570165,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2052965218137652,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4110826764343731,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7651106821571176,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0670776149612238,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.084192914008404,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.634263826957662,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.992189273758706,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1443187242211037,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8316514521878595,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9082869660122277,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5808135334900486,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.016743410204568,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.66059762588516,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.18605290654646,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.463733445454966,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2363320151348693,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3213644956069517,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4517242051468475,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.500773781347804,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5086038867682012,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.722470916877915,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.182219937127718,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3555611562779815,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3607067686382344,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4019177445133613,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6393679186135974,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2842439447219043,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9862571403759182,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5169835497160555,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3199620402167227,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.573472918351591,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6019965441087647,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.368112061680178,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0139685752038534,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2638454510619535,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8478652187177875,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9023104838165564,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5024753368840857,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5080933275580308,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5079930002282356,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5236701093684022,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5798242727465741,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5915994276332387,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5610183916697495,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6262704194746648,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5263513793338832,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6266649769746376,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6374765927145796,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6347642998216707,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.601618465635262,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6708417134493972,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5930836622081311,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6269432366636343,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7147553199866872,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5394907909847405,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5356312076040994,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6333580844148039,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7473174944277143,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6566698826736361,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6527068519953815,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7192946145925907,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7339385299099045,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.614457465391421,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6218850445026209,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6724079678398011,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.819153452103271,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6602748909050583,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6981300666112301,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6330806156040021,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6673775407694705,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7262124992289085,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9078908062566559,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5787588852329881,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0207036055584817,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.580684573471894,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5657347649965584,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6849578829392312,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4155088492218078,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1016869123767548,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7781088001098723,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8206903258283657,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6777033691055732,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9804632209010425,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9083931997094808,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8375032997941084,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.779885546853009,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9659499441010011,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7532068533465066,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7481350412989376,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6915040528294829,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6362379386632497,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8204729756472278,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7022396400920632,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8033504087862564,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8435729843875013,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9812650198407341,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9526638560284353,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8028370564191554,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8817735866360787,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7348618823852462,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8078183686730194,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1074787495632084,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0508128943492694,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.712572937620918,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3542053632443614,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8730719101095493,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6020334960520597,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0121913337240354,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4075213951741774,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8054451103132236,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8761918050716797,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.079048321267219,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8739397518434175,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6457709718690119,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6177934323565214,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8597757008702476,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8408837733375113,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0769607191407926,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.207660723582936,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4461668376740022,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1491906412590938,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.078947893379366,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8925914347783251,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4579739011387187,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1919580314972995,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1055133700609476,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7799630956489743,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4511336096591403,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.110644436940822,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3566554550992058,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.396911414537891,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8041980459348306,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.034539933276764,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3104844738995198,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3783677841426747,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8602536002317314,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4581052882389454,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9870367267440114,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7577738930205238,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5833702637194016,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0769309271587806,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9076151583687426,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0514076578742644,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7189762500083862,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0838351310463539,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7171724844540559,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4739959593829899,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.888614167241276,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0855459316489728,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9043145629209451,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.337257975250804,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2810976889495338,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8843145646574062,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.199637035487792,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6571338692450577,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5538466296940832,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1032724591434429,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.177806413821199,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7899844360534114,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.395760863536974,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9434426946474925,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2613122051937933,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7448990687065625,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8077967627980361,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9454425409751943,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.412560031911186,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8387966605085386,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1502118968268795,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1286660126727146,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5638324889757218,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8598891494501344,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.826266125091502,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.640987212772707,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9571126242222374,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5512813137227437,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.065399822420787,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.734767442721488,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.165514709519218,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7763700680787282,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6949680533380589,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.371093042852682,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8000828384195144,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9483795521320406,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3792767175327234,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.317942347378974,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-9.393883305112322,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4832718735548727,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5947675355794144,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3200179310962443,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9625201867540588,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2193199743817211,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5960965196407673,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.992638451195355,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0718681249716395,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9527307201882967,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2198428034626836,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.642840652614644,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9482743398316171,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.482370970282011,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8373178095318248,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.994223965021067,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.40590523002778,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.184980194188484,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5350279148964021,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8545138177333134,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.180647473092042,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.148839056059376,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.799711943245712,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9120935946188189,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.294116267761889,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9549659075524997,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4711770946372664,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9592161906859926,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.612932491828352,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8423306256032999,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.23317966678314,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.609381472436669,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3000558438861405,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7032513271231786,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.149870212483826,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.098183978762898,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6249283834545012,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2328310880818387,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.586292330387125,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0360207548649942,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.355597853256601,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.327754117377899,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.185674344998144,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3125997257713113,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.272253459861274,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.3593904878592955,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.742953165210485,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5993639948003107,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.238797021329826,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.901247881086546,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.399297944873072,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.977864264353146,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4918683407874518,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2237262440351977,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0254033660468538,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.877971299750405,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.905309208794016,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.864906391782169,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8710411457184466,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2220553824062415,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9384139482183798,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.559036149293423,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3184990864921944,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.77941943520946,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0921925910742227,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6298903453487568,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.680011476470475,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.742782720311456,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7047986324458846,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6022724496372476,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0355771748832514,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.735021332481046,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7397643965538645,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.535785539690684,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.152496839846353,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.693995594152332,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0884271061576343,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4439046368071713,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.198187820390376,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4815678870643803,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8895151831346197,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2824355036151498,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.9681025086906825,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.122399234579905,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6625176153095358,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.834013745062748,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.389211630750894,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8829815908339707,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.47486839983935,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.495615031862007,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.242569114545663,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.949347252373557,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.964564483209742,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9362231851977696,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2421474767009197,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4320049600007496,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5960343529179477,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.416057416652445,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.105580825493438,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.669048899720794,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4114718992158535,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.340496720429176,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6192096389133588,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9711869015358098,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7912478097747524,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0026181880600282,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0883032440055413,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.200407927975351,\"sequenceIndex\":508}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4696946263196493,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4775503257462027,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47237376250528496,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5081695830265399,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5449656941751957,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5373383766116789,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4831229795266621,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5105212670727101,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6047052438577166,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6166017475767,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6550458173574275,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6195835590396825,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6028603892375274,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5258157854925499,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5375954720177065,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5382681609148998,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5419730005084289,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6749002143443674,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7107481918152774,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7272217162441669,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6655853442091085,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.782944887412175,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.753267969035806,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7094376545366144,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7286306322152968,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.677836077891732,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7275553590453897,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6462124113917103,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6101945907064006,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5386214682890967,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6536919529656744,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6241887358285654,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5736346711943635,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7281421540621088,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9111669184942083,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.728289567653208,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7159092635540975,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0249243072569232,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7620448363361961,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8636807152777546,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7591296978855409,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.792570950121294,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9527519778265503,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9126642860102236,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9935972671907854,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.966107468761581,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3514758782187808,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1124425864317038,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7324048708989292,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7478740735566152,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9168125475325449,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7525267153959594,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8776605427553614,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.737410914882925,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7672442106478025,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1700034048853265,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6528301986878218,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1165315460640433,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0400154037169385,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6620928105913546,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5876313532512488,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8203327468251481,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7659396642829901,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7155782996431368,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8553624970781566,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.774066701326078,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9005295223348577,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7576412494213441,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.742769572990515,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0804059188165602,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4912237510059523,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7523747591390627,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7891818369136666,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7462445484908358,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1910967415612745,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6317982014330783,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2563885112467152,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8301223096457419,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0116997712291467,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9494922322240292,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8486790977992489,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7784652372540363,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.089999272827802,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4937819627913378,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0153258682859398,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9877671892458211,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0436997818623996,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4843316683045757,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4944686055271978,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4585241831794375,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.014945405218839,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4697266759459438,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.154313528122665,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9917065284176891,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7934396109310387,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1579451316281155,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1946363841147571,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7220003567386855,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4909938164326852,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2189553548999241,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9733962645636977,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2779956463085285,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2398657010120469,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2374696344434701,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1968866259695483,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0776089013324897,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8829403314475244,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7707379950365119,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0801562004877407,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7967326801486523,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7830465130472719,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8802292349229264,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.170688768798378,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1321130089369589,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1953749056609189,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2256888470523644,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4538810010766885,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0601765029972192,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0410657101366674,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8208806878702102,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.857954303445751,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.617764890350269,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6576608953429857,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5751273604433345,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.892698841444881,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9960711482971141,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1552540519913785,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7623895097782043,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.853406484020898,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1148004384767947,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.211565494666844,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1832630854892874,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9891356648286622,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9642870964307232,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5841273505784865,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3004452156778277,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1460090379887533,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.909033008343921,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.72582212496615,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7193809250311909,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.270923637792374,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1649458493556715,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3777285632808383,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8848499479979113,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8103801346042628,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9874442910185145,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7262398257114553,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6106704972866988,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0909988464470985,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.610406535390216,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2036227795194578,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.202070698005034,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8778003657153102,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.936274594021773,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.294310256277842,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9103049875463016,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.247275416371403,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3021135550550025,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6730899767142582,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1444959595296056,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2215676540878246,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.171402179883018,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0876162034342824,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3800283419918533,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5974117172822582,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4628946065677677,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.342312244800927,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7541685124267974,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7598679026750679,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.928166261242888,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5024184015628248,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7952845040826055,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.004364971205934,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.66033763791692,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3195892801905587,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.8091381915796205,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.57389092823718,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.727423853065797,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.269552145887816,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.807892725594686,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.157248211884078,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6644748459135523,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.710332725556065,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.25938115979885,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.507402233021038,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.743900599705851,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8205849101201808,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7550138259684815,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5546654441512757,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8388445619891227,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.663814605255016,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5554311296262717,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0742864481134675,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2928473270987895,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1809729709814674,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2406086723236083,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9076964834086987,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6325257998969973,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.359449107963409,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.532519731094732,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1005580384312674,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.331483965808917,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.017794442483172,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.668546292424617,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0060059243149015,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5912528489720927,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2746014986320995,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.170336596265753,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2462123806041032,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0863992384287844,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.524460834040532,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.586300068264179,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1800346045841197,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2275929571046744,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.018991398283685,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8996520354562125,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9523429841078624,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2956428719802089,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0132112096126247,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7135653430045055,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.13122920030905,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8448389614048804,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2629054765522965,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.841433244569187,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7996729847634616,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.479806808503211,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8829277976678425,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2198925417392064,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5366890079313524,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9401847869170057,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.226104614747713,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2592197470782496,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.274807908576492,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6570076878263036,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6672520043104657,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4203976955096558,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.820756699721024,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8270972703745032,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2043025430298755,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.236379675313028,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2678280306571414,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.074757718055375,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4906155876817095,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0203633576310347,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9286964906518795,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3855576810936767,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.046302735379834,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.850804077253578,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1935481832409955,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1913272076566073,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6453861865493753,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2053792194035005,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6840317802255105,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.732642719822167,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0110788493606093,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9555904064099788,\"sequenceIndex\":505}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4196007207288104,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4648047377957223,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4512839909796748,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4759247829174207,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4917215518568215,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48447961883854684,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4620636794877223,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4768844046346311,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.538986314374352,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49318617229695183,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5075242905468994,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5171971220724023,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5450717252116043,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4665250061177509,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4777815656973036,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.607147506383768,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4871160439477013,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5665978610972144,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7225433627589426,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5554649956622248,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5908940821446554,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5181247623825352,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5724430073545408,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7014159785597991,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6368686839544851,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6285791552881326,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6117637253396461,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4747632347634445,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5201814665984558,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48398424095857695,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.576235178248377,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6313169509097731,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7409078983124034,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7977876311517901,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5450563447986339,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5734789101710448,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5946344741282985,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8133662422359764,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7968998653788153,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1443797922858805,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7237302344213372,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7915860629498157,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7797706827121966,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7395624243312959,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8366526144540471,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7309104730631861,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.710265203116906,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.446405910720523,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7017607121871834,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6558751560739683,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8134356449801224,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9680084046144903,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8233767048949114,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8611364066375463,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7826218172780457,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7486961205716735,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7655835948981459,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7673757833882426,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5699727873514162,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5461659743280323,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6508178736374886,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7076864264646774,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0582284030781495,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6889165271719806,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.405856490045347,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9007045703767131,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8334835139569536,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.445664201068942,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2102722614451422,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6908772989767378,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7891679809327178,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4649011444352562,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7598580022415139,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0749441825578854,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6810074375954556,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8164773487028439,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.872750243487472,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8031150464455185,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9256568732315252,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2667201652125009,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1461754680559328,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.715711875095329,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3827894362136224,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4436598473336437,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8855415360116807,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.940283773380992,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0306663493802644,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7619631766683634,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1540788955072006,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8534587517052354,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9479332277191972,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.797186732277029,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.755063146994019,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7128911277119447,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8519468707144524,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5344006952984863,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8347611076400052,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.516911244174344,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4305010184644527,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9773426677184555,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0262450860837184,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.035096821365912,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4144278013267537,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.344481963410124,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3388515622256478,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1189548160233387,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8330574604499233,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2105196389708777,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7513524038665804,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8399645683606085,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5505110809180889,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.178375258279429,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3623765386304134,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.201281378579984,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6600895258349835,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8630452025774512,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7866060252394993,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5288536019527252,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7726918710519328,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.661493837456199,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6383483924290202,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1053928899180319,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7896707349192961,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7979736390170256,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8036025091661487,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2507374095760597,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3226134075610019,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7053633639153597,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.246719441837639,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4476262628085017,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4570140650610535,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.614582120035993,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1378908639085654,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7497221320181113,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6123024639283385,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.583431142734644,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9022868868899385,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3768833537524747,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2298166325388624,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1979559919287859,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7124366921867307,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.481260804943234,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0502431812997206,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.485717531958209,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.25489845072989,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4660321140195474,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.627032373589698,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.023448539739238,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.326300355217892,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.205427243394475,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3827220522443262,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7330392357770243,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9942896944444999,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.03991568996046,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.614806925160759,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2281481127635292,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9779840182041781,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6244968987371076,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1783581195586454,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.531911997059016,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4053984235133106,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.221758393183098,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4243352099583726,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3823451108934752,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.553862400139803,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5240033115876057,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8458805137090173,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4812797557464112,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5516357121137463,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.521232021418717,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.387505127928136,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9950589445758535,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.128111786804459,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2258575605520903,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.405138174215472,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7866106787877502,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9684114477137165,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2918243682814743,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9428360729507184,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2169122992115193,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5346624795685124,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.6424748250948715,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.353186225419264,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7979074943661357,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.801221125838504,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.79729961802777,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.357587041627219,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.161019672171821,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7833410134462168,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.473101945240517,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4336242690107512,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9561142546104282,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.121504310433089,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.807581241424392,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.44786030411703,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.946340848537134,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.507420199895164,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5557527971001237,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7261295445119416,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.412309418918136,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.063438750559355,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6025211951779776,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9189159460299399,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.421342434123618,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2831249920730483,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5373902538278903,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.772344307553218,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1390583834530155,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.541040351597412,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2855087358956596,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3778845885549376,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3191014697749095,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4717612774035564,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7238365479662527,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.847674178962453,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5143445956704198,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7041384052811823,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9835727287915879,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.5060128314933054,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8503841374008376,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.084719097596338,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.672984577005707,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2525459446349667,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8408103827459827,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.066077125212908,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2567809598305524,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.42181668843132,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.696456035534953,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6597004106742226,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4204950489219077,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.037472773525387,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0856350438745537,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.437404928249996,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.586464857246426,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.047949366722752,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.128128757737078,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.914352594414737,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9458785936418235,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1434787030535682,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0087465797812603,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0781033861499383,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.44053340955652,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.039751262241658,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0684546032408897,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.312256588274903,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2189769960237697,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2201160197208651,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0740754902979877,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3446776251742913,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9357298925756506,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1122970557275091,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8873252490746393,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7090237246232742,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.528583622070825,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5072343150083285,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0051040522586416,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48546103845260724,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48824167919150296,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49030345506317896,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5082012853505424,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5026800279364845,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5267346819077205,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5045189776543771,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5370748807557643,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5186632039663763,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5337192059722647,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5040906496531217,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5444675739612167,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5476565325189888,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5229490980214404,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5463293783422043,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5804071206493213,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5728748208025849,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5300375430179589,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6560736291595339,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6372696998900425,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5693965398964446,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6517125754043954,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5317962255366033,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7220426702110171,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6003403792037859,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6226314576785653,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5989370423726712,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6011244801100216,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5510726577765621,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.598107934654861,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.733845694209438,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5847479770772789,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6934753659723208,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6281822919065119,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5857731703293358,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5329783403868849,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0850353184875798,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0502525008019674,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8709274087671769,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6622157340402863,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7709853514904276,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.100530974761394,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6993045273709106,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8233128251624063,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8693284429699507,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8656449480374425,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0857888005482597,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7546978925268363,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7911533880359408,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8754698513422715,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7446417095237722,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.654903142689272,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6392365488991949,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.891354981937197,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7687938122134693,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.88703987469759,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6038682632959768,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5916661359570078,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.703204815439309,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7103237489876261,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6425345749051617,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7478271280200945,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8194718085224632,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7515133611171482,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1888785649763802,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8047628200361492,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2451642760012593,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3060377121407274,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9251247395780051,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0368764975097036,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7673455395117503,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4492063652402198,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6800482060478708,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0859664771871145,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1371122271851066,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5107963537422973,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0910043148960753,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9613076593953008,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.605169607219219,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6485739243334825,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.678458166909683,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8254427813297099,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9538551164331011,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.447377288988498,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0159756741969943,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8643033658578838,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.742402923296259,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2090350173794877,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.181679988642882,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.367864177167768,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1345864212269219,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6600403550511307,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8710978852810531,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1833879289108424,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4118494726884967,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8331081268469035,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9095137284846053,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7324796145085781,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8441942471119394,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9715197016716692,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9247725004960733,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3721482150958404,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1191788044061388,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6881672713613205,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7311969054223668,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5177055638147379,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.739130686479158,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1613006605134257,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1291021673442727,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0211833154331942,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.166276859586318,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9630206527412526,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3451959777462446,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.609596599954885,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.967271621218315,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.919512518883659,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3476599127489501,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.08911299149694,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0855665776817445,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8118154110313254,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9738702155257553,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9535974039504134,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7245459940470211,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9470806308788043,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7787720627469491,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8824351518349294,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1654738768728283,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1931542958834833,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5322614103169085,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.917223183041928,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5856029233916948,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6545105364705754,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4777212187104665,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7687493858917196,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3293648139093708,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6505368993729306,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9355763532847243,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4725064753681147,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.092267576488146,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8261096645920247,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8311997416721526,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9933394373988804,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1954801910710737,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.592918002940256,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3073981418346095,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2852215583768338,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3925229374523242,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2497077015913622,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1899438515699703,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4929549647887412,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0313330759662085,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4072437022413657,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1342494927828857,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2025732364053692,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1018525549085103,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0076310200186,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8085524841510168,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.761607510249474,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1715296275562217,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.032031559042889,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6488640390686022,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2404146640460163,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0029922528908408,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0237519197120126,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5240695513056355,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3809518280538002,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4288711433554826,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.739768855399813,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.987495837688397,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.202573938492217,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2430069520974145,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.062797172827161,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3948123504093954,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.670866545122725,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9440574796236634,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5493968463300618,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6112122978704222,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.854308020416697,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6121062848262335,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8080863255273605,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.935261945412735,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8777159776690855,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4119848188195994,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9442264675431704,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.538850058098651,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9558615435646243,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1011502631018701,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0706012312905444,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4478610074853897,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6673842636177834,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.20014070833913,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.059978277849837,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7136967272005936,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1141927869033106,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4748786054403764,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.17427739792381,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.96728378111496,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1385634661898596,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8618302510385023,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9931831567150774,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8851402402840296,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2524530364440738,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.763688643947997,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9175855723472113,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8251563682814513,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1476604674117066,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5991740910317478,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.118193881411274,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.742548271776237,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8771616961999595,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7360415971724608,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1199312334097735,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9053643455250384,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9225273510964777,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1808144678531252,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9152764892735206,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3300098867494885,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1954478522387424,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4103170601314599,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.832001189926993,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1469368441786005,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3442055251290854,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.120527489666096,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.17660405232428,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0251645101121234,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.569945833880864,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5726601068448627,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.846645921776994,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0679206378971244,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.483633165148771,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2468382046309239,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6467774453805104,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3827763463649863,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.54748043654028,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5718492982058492,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.654608787841057,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0075567699246766,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2116898207700832,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.335921966391445,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.512177561279445,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.146542224444293,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0182623462272558,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9237237305690345,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3905161831159845,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0217448373681675,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7831545863519964,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9473737696543414,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.649338930325164,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6167037022774173,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.311938041653325,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.525231328219062,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8498481758938117,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.771076507424776,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4680636525298696,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.510431299288723,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6326944614733998,\"sequenceIndex\":507}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4508760769154083,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4585539020003295,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45977660438628254,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4598572915460619,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.460944885395275,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5012512032507415,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48822824227361855,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4619914103242761,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4814038450820997,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5182885087155084,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5927557753565897,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5559404267723463,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5639034005012573,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5509378401375296,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5044371096121937,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47721572982783556,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5826632147997421,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7165451054631299,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8132231431786143,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5534523633707018,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5541446681716833,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6927871214916558,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8028336851857083,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6443119792537737,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6325469993360622,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6250315953758744,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5785186863556788,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6265336032528688,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5525478632000301,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5112247845811936,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5667846692816839,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4943913532920661,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7890809740668542,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7538388176538424,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.237894893238782,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8398616296554009,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8041231338685754,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0297484494593518,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8414914403187265,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7472271009695204,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6469572856908988,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0504310809933914,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5988614756053623,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7648978985591133,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0655665874685276,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8076328868185878,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0016129751326923,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8007266691843398,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.091130220185101,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6871046290244163,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1728270560660161,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6807561386987515,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0310033883750205,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5933539253121627,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7227769658657934,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7413782034089875,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2283566261545733,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6032118047696998,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6837268444600425,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7558651705254581,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6758488549583614,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7413021469546793,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9380518663371702,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5304718013412167,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6308443405266109,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2737011997506482,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9520531309926566,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8063973385700745,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6561043105263182,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.430439900906951,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6801759410165393,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3380700863922776,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9731163153994493,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.453137872349073,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9670268104776184,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1310758434423815,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.292977485032187,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8463595647872125,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.207549246158878,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9063934505807913,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3092789897418977,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7360368252250349,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9666655175800144,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.578166289562913,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3666644029124526,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8399617223347223,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7036064538537246,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7827701509005286,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.376960209016898,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7115336759109427,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.376795273809599,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4071562347634208,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.001084280639619,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5274432194583827,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1803494920094653,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9968038203069312,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9366927096531507,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5470159076325412,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7942320142538417,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0795268693772666,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9779144423715521,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2329476563100457,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3188759274502382,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5123408072827158,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6848037832111173,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1742246080047147,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.200350017015076,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.685152699519325,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6113459352806339,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1728236556783758,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3817412582979325,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.466109797864763,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5653656919158339,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.683566018629922,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.559533302283591,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9540833435584385,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3833225139972485,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.763228096511144,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4981905995783382,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.824734264881194,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8170288616539629,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.239533913350134,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0971243987873909,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.026263973503801,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7629350146897509,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.071915114460894,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.163996812080282,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5534535966680838,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.46373977179032,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6678717565718513,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9542682705059944,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5152090408627696,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4929897101098566,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7960434123314133,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5303125250827434,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.10320475799481,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8757026696044794,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9128090390430479,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2563024169935573,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.33666333477023,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4531168732213446,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7147241941497735,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3185414098171737,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.43578394778199,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.281995028195736,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.265142385131699,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4396727941940342,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.419043829849337,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5933205803377066,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6555561254962528,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.211300848970291,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.138793622855262,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.11658574785146,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6681478410962993,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.692942709870044,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.229548668713239,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0561852363293402,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2988872943850454,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6155269929993905,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.068648012159996,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6114002475599158,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6894752960410573,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5488256993590301,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.7244486205241305,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2488401382680414,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4211886796766429,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7765874519867106,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.2172474580415855,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.091155726561295,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5291057004101944,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5787036604665703,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.857079996660391,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5897727148555385,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8618370067460277,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7332353634538493,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.465518939859864,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8326334868365395,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7730672380835357,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0689663932412943,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3752044259639327,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.160289609591437,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5728280245469446,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6290608065572638,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.069888003319444,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.507982845661858,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6144157512808002,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4171350513998542,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9078613878490263,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.379464709029456,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.195197212350207,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6784596124213897,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.5986696929988495,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2076705256718387,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.870801913812779,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3886992585020388,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5225025069857065,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.787038815667053,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5260254593059575,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.244258100265348,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6455055793013074,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.620728260493675,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.467016945487195,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.205601608431475,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.131182249368325,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0811857778657723,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3548673981516712,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.252503777696349,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.479509963611722,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6422066926358216,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7936872816985974,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.92241021625564,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4882128529038054,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.183357757308836,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1329580901237915,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.973680972420337,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0758623164919587,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.681192098543813,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7307293456039152,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2545749483318378,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1829369874163245,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2766199333964339,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.594595790066826,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4951571182637913,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.393568671148253,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5278038296826537,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.791523508980913,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8047880335723305,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.0636531954807085,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.661028589699005,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9418648270950813,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.138305929089322,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7015365332484962,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3386152603691779,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3413088510230806,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0701633175664362,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7037968841481756,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9428018822044308,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.621660075727143,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3160497250820855,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.491916368855795,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.456976011847764,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9793024038750875,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3429430729303307,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1546940725342396,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.680169534915636,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6493329415142381,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7766474357859448,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.323738220387624,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6493663207177676,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.803482615836966,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8234731861146789,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.129381956886729,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3007720867860881,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.80083053105747,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7573374537147441,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5545937468148092,\"sequenceIndex\":444}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4628559144039275,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46680792371176144,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46457581998014796,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4706042285762159,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4932468781660201,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4724022304556878,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47102162488074045,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4707252186468207,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49590253821122987,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5403925917504349,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5069781461311791,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5085609555357291,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5064302655666227,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5349935635609494,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5841155012745776,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47475853804835905,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5032145548131917,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5702231342001687,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4998497579529356,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6280619034114331,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6100924142423066,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5705030836899959,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6616189077499723,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5536964115353279,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5770361143056953,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5607997472841934,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.633508469891156,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7446208549924316,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6538732504981697,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5950728146506006,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.685930498044856,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5123226724013653,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5542682460123048,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6798217074166272,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5424870875919175,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7576424894642104,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6602750244053508,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7270922143862881,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6220295867232986,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6818481241878374,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6307792166833867,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.660304941105105,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6508957933289852,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9795466409854137,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.481765221898312,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8108035458106635,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7271979967908717,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6670867687140987,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.166913337849211,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6308755707205711,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8396780631516433,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7146566243143803,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.663816623209334,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.037826716689538,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6700144087491546,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9287863592947421,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9724143853930025,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7888337164108656,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8718094671574007,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6920420179922167,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6746547447403121,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.09394624172279,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7385226554707961,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7672731322427802,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1938544850766402,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8104411932986846,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2999271311074398,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7193458819228266,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3712848007849483,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1880067592715085,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6448369281320693,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7685114857684698,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8711895409402877,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6843717203395214,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8876874711346681,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3281874343781133,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1767212013611867,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7129366424004177,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7887353613921221,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9239002533632867,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7861272007644827,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6589842212017445,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2853483882653935,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8145130018404977,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8431621578821662,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7112754762658783,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8913500222622973,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1196162857655265,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.541134528790261,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4932227093897927,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6589818549798108,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9619941321247761,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3782893717932576,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.028352397506332,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7973749917010404,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9637160740236854,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9213057135320203,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1298493393594633,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2965100272247914,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.918824535862091,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9402284504383703,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1361786264250964,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9158095818482302,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.165481865482143,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4377652748879903,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7679738403728413,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7868214737483942,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1613583593802241,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0624577257111796,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5200339802755198,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9657860666875834,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8996472941477927,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4399112626828354,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1895890766603436,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0309997494021568,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9729343834555184,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9055221652821132,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.687098835930683,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1533025447636738,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1938760927966834,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2280593994956845,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8646979496980892,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1775571859134417,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.606476067734801,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0398884980553893,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7932537165453415,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4669021842007341,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8727729267286026,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9425786505545768,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.113259050433013,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0015220540082197,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4849230976352314,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.393248656755901,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.535334643986675,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.166821039084895,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.231883692783792,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5075193436780503,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.924935074001095,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.228488894215977,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.832233515280021,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2163345722102055,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4996640508407142,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9858970508427901,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2035893734174357,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6444708256512235,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1534880634843514,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3474642252302431,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1190625509672942,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6276162766781384,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9522967541777576,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1445268959955945,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.268039106367208,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8149150546488606,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.110127986605183,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3810188242639323,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8073109603713806,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8380555115863662,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2031254585246027,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4385050172900535,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3517648221654412,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.562420777179426,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6713006029421864,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8144390595497589,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.187499203506212,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.141605970371257,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.402504684192796,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8998516915355235,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.659757657054002,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0389883420808395,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0597041085706156,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8998458885845144,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.044299708883186,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7391023109749506,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.541177786426053,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.874009727919253,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7906177216851744,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3030724923924382,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.83482730990789,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.535005196138084,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4553613297830874,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.617017781244884,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.395075666993777,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.170478399240818,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9803761195687312,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4000976721088783,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7276548775812388,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6628003164410528,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1569373773445406,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1260802748280843,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.733131866485631,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4261091772059997,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9326933245182296,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.491085541785682,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.348639542747065,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2864185958981689,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.999781468663174,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.474477458891769,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.5254052659082955,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.287195981899793,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6421796697234914,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2195763320323088,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3807582163523822,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.749238197492064,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2051490047509317,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6253418810881195,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.826606299134421,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.316764085111127,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.673014358119795,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4645148947494238,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.493228224443717,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0008172191894933,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6749465396902985,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1656739219595957,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3392294437274344,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.546541419051171,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9669372840898878,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3118277593711374,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0420822627519657,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7224749932963654,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1959961331914055,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1099151041820075,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5152700059321693,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.104970748402003,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.246737202379754,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0836982866715643,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8675761012704728,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5657905830103207,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.980411075277421,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.567941391377,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2225362859155022,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.848641932044464,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0666473680007638,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.181582268148413,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.236000420865187,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.953577217723601,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.490691529373041,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7532566572400934,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7550233194216696,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9280104290686535,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9435221210178122,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3619492597497433,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1453053870710064,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6477282214908804,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.767460313988505,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6005437446647717,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.908198151079769,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.507507897286829,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6236669832448687,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5133805411218426,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2240515117055173,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1973666796394924,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8430249951593538,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.02271847179713,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7699821423913096,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8541255753344175,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.245926388329603,\"sequenceIndex\":510}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.3995425460769366,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4162158074092641,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.41849477551061737,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4191175449375748,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44684163512739306,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4583054405336929,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44200532093701367,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6469448750238475,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4328030280763182,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46073722769741154,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.536528867900043,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4595532229664453,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5179321955017963,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45063580996445596,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47366975818663104,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7688929561582549,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7849831402907937,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7698670344770999,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5099419564112918,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5174900324012958,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6813481708572496,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5493940688160348,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6055131639218069,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47798693016204635,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7476066402022107,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5407178873635127,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5216791055557901,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5182278954412626,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5967503846612401,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5552741901078435,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5845360251349007,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8016656215292652,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9375571357864187,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.910067938840019,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8385306558223615,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9309167476758006,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9482840194913349,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.732243338503001,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5351372257691761,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6447246946187392,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8107812621828587,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9670472973741492,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3466063106194324,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7022293452845024,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0893807156015418,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7758939914931601,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8319300039525801,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5461282761372039,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8577137508840968,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9381656763411356,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8464780430039912,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8715251907486329,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9122247149123639,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5379981042167887,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6170824331156819,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6596238804369351,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7835607672216225,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8415844887785129,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8757166685531299,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2342925516152443,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7402019512547878,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5883632002621227,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6196708066736747,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.025424369282366,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6589072084165932,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0926876207678466,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1651626814380498,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5771022941666621,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9482947892801286,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6079754547814107,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6598664471207536,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1031839466001891,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4738013536694838,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4556560090087758,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.317304268057739,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.739202906964932,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9836335458961952,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5831027819302838,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8505014450631332,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.81707205613777,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.018030933476338,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.200994147392315,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.927405907941931,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2070053587751703,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.008436711026636,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4249422226679889,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.247045440604137,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1891911003330742,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8121943865049618,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6645661924231452,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1699921366427941,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9735225307485585,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.156119441504946,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.378172617263601,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9469161774265586,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8536012851132643,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.004901508570201,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1556358614616389,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4881585903133916,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0677478828307505,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0019441617714018,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.004231727502805,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6417196573887232,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.736972312593187,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1361492591029285,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2774215791774726,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1532811625445163,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5739810247797974,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5574808554304629,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6336126081969338,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3246088376506033,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6874212178829633,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7544357202824347,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.672675212675672,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8937457778251758,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9439509131329834,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1081171171026183,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0231545214321711,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9420367864872823,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.54818111620672,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7771613501236259,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7540498321246341,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0877472863191793,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.443192236958581,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1295521610200225,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9531687663087562,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6989185810432713,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1038514213397277,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7270562367383846,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.910991291695049,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1474169423933054,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1659533485121036,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.998918707805815,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.310839660859463,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0605741073444235,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1892455454172137,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8192093440960706,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.376668786217457,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9696503776960315,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1140442601326415,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6652507095793068,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0101793899146094,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6995412787881286,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.21737737001588,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.7779478313117885,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6336744480474392,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9409929767340826,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7117544659789883,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.732460432029655,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4320288625574447,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.010994980181598,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.045540460205551,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.830369937787712,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6337327314262187,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6943921359564573,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.557206600273184,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8962382419288457,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0899776308947193,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8359919612238793,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6190933285739375,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0486132895502218,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5865501380872873,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0624639559775275,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.462196004625526,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4509467502693236,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2011186231392106,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.912212677148474,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7045321782397487,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.118261664182013,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.587836187478853,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4810111579170382,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9663465891790968,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.674787368742149,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2885484442518744,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.988793339581894,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3002302509286525,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.309286109630986,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1737529796148483,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5101216566350044,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.326261569309607,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9883359391068183,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.25442971400138,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8805692423702236,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8842561090332564,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6831091332771628,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8685973933428226,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.9635238911519135,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5271005443815764,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4463080342211225,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.042219541290324,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.576914764258305,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1235473657285657,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.568975252271182,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.402409229492409,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.19302545181296,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2212411023445013,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7912282599796816,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8444390608508512,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8039251585122553,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.276868851579088,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5207957925520603,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.824231075869197,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4903602592252072,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.772465877085266,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.933563226114014,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0967212897636305,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9616538412186935,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.552143222511736,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.53538602580665,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.559480096679402,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7987126515037761,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.602828348473881,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.419150184492356,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.344252599090334,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8169045288584176,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.37672525507425,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6621640068620755,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7532143220215395,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.597227045126382,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9893358748494707,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.906266822928178,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1634819164066874,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7709329647478573,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2432522935945314,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9385073713148706,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6387164137538741,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2239436500365457,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.493043227184877,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1253454164768706,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.660888282727256,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9011400825106535,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.092421439965387,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.33470380618227,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7167450284705437,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.277295429809065,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.087788416685804,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.74107650253101,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.270442970209395,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4690793646428317,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4537901338052603,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.214057342683148,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.767681865840804,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8973845429550282,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0217397823841816,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8296760411662529,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.433406017071492,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1344332795768386,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9782894876903323,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7943791195648904,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.300922830791807,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2993061400412649,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.765674231917039,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3620395629105395,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9438948781561887,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8923263168523081,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6772122219440821,\"sequenceIndex\":509}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49178394783537105,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.515031634389807,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4979383775663946,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5177735335622691,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5195678941022348,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5066024598709726,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5023665878047426,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5427485546787104,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.531756566624228,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5531479610430537,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5815706446048037,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6831644469176003,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5472739134372002,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.533187361147581,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5065941826317435,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5789085315431689,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6128543429806687,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5730018901422915,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5618788986122074,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6977987539322109,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7861142031242656,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6075481054870976,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8479937129528703,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6972040313366328,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.844626448107801,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6322600486015657,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7294877939319426,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7434984026290476,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5522654720544814,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5268908819231387,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5247644712316228,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5952483279212435,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8049002656833699,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6886750062413588,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9846194931491524,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2239388406401424,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.646968397260591,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8130520420630305,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.70494736019933,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9168788257773521,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.74294526612418,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9656873259867355,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3563166615724356,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.73410400223902,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9779617564450184,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9860748848916709,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0528666503749515,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8811425658278259,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8270741330057186,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8905288439636359,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4556994044140694,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0097385083215342,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7031768195465843,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.932460481799264,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8050213953890998,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0852686120866595,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8522867367925954,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5559434684996524,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.596914089917061,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6498517026992696,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6216132882598957,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5431361578763585,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5360002986292752,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6126025750941504,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5974955176847465,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5719930275001213,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8083675122865674,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7570313074670317,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8292862891276357,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5011293971555808,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1264690703305091,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.679236158005254,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3568676459005458,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9460750238234621,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.139083785086284,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.11074790937139,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8588475086331487,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9606491434647615,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5337954685414883,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1241272316140736,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9868656821033481,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8984313893141953,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7936611417814303,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0318366427154053,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1275388964063415,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5571278897727396,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4025199886831592,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8665407010410386,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7483492381282513,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.247871071191955,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4072768849296255,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1448419944445076,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4668571725871746,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5456754270470705,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.42335173053859,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5276866266248663,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9149857848960781,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5226299310934939,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2189028271935793,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.914950751294563,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.967053994519786,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8335337526068565,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0123834147116044,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3872150842312718,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.230160153810486,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1686133843403286,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6522983143338388,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0173315378113048,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.028414730887753,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.523467484351807,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5405603259567477,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6828069955847673,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1318379388587168,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.922460275226691,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.195912767022347,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7108450692967874,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8931351008777817,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6038785389525957,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6969283840975413,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.29548172824379,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9875837150542913,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9642493990465841,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0494804505190465,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2719314499287573,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9946140049249316,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5976677868179228,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0052824453713445,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6885480674108291,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7573167685917037,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.775979049077177,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7345012889409268,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.399594102614303,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4262281319464596,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3415119129961588,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2104753886140354,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1132829450538844,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.049584280151372,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0692790344268963,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.057011503345174,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.720995480653709,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.30859284464909,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7516403166696506,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1271772269747335,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.069513947679671,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3510499412498964,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0317379702358824,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.843812408596074,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7274099835731334,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.567178146418383,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2006355040400185,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8956432799667415,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.500792208358161,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1624229343708594,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0734274061973634,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1952946639451407,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.379818802502551,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5519614180868238,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5957091300068122,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.001369154160978,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2841591681056264,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2525082662188205,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.736860786909597,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1865332736235763,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3588317215553163,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.106785597527102,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.070179719580513,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6883857160647864,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.115943303004396,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7280157668114555,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7099370957827753,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3843634377095704,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6631312463499053,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.253946213764769,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4485165163516716,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6155849851994584,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.740214993280308,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4727589157810117,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3748143814056375,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.832987861763318,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8898662426542185,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4799616424185231,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5065839298555959,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.435669085187242,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.795322695165546,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3355503921408207,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2942991375469806,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.603810767067891,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6316388358244387,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.663911486678513,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.567508526316469,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8978497446648568,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.090585385910482,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.640083125063015,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2695159823594306,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4704118456522934,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.017769041055218,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5649853372682094,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6094099105763175,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9127181747166835,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.962885721895108,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7573388082959112,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.393397786119992,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5835187048029606,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2759880091624334,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4803419773226874,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5799214875576406,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.419468061237504,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.574948223373913,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.351455964751925,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.453442193315547,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3287037966648567,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0902388968051997,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.457372578187132,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.965865195121767,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2386403114946063,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.409421802891047,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5265146226170194,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1159369512186559,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5515003592046648,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.414045130657892,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8540053343614944,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.275578558584556,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.367172949906112,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.618760350637668,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.840166673824879,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8266218777915304,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4140157115907646,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.697421691694783,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8531068592803384,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5997845269157933,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4419391060929139,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.886540640373062,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0972652853047347,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6519590143872573,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9644040000163494,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9255516200926706,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9177059667328684,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4692437362432056,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2240521764925787,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.622406882790635,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9836205786459828,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.888019245992226,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6577002836149126,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8853473428973424,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.499229556564067,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2362037832556985,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8166585262956088,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.175441342638713,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4291596747124644,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0768515905423919,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6015269460104569,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.344995404011268,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.364982034151774,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7553721041151396,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4750196713306805,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2444055615232008,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5002513989052614,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5052019799725902,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5012684293382467,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5339311866410741,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5295296729567297,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5140253999030949,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5231682935557783,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5460816582811427,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5544985623581493,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6153051714507074,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5469396595139193,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5257885429396376,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5140447165709725,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5657933498366777,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5264881958765563,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6515122253100539,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6056902748581363,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5654091219891548,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5678616772226973,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8257581106911485,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6376285105181074,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6170047066120213,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6722743723361397,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5533654354222212,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6800967695987692,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5834458220289322,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.555736016683024,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7277951465946569,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6904930457854759,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5465924069461956,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6119872311900408,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7877176007283478,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6592655333736693,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.677405306341263,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6539072696739472,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8440401366150604,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7328099827507288,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9882725320670483,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9336423026002105,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9900653296167775,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9394528496361125,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8011201330529981,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0047403121792997,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6776501512903338,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9216222065114452,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0296422592784085,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8560516033483953,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7081795190116004,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6184923844560283,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7398888726665582,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9617843936272263,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5836126442150178,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7237079567604527,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6278405493954857,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.565501541202847,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8119923027971202,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7910050415599703,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8818208882512109,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8533583140044536,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5768440795029163,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9266592864067289,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6592204776318296,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6451440245271826,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1112616062650098,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4399348171030364,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6801739672840708,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7697413211097687,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2336645955013974,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.266553698604022,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.401950302916341,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7851752298052325,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0835681560424728,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1459316160785695,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7878292113587726,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9632716958248841,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1056860555098305,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5877784168643712,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2827244894862093,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3702401223812042,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3437525368669976,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.662225327751824,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.068672890186903,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9562761527644588,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9340705293485084,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8317666749219775,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.605619711255208,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5566241298205947,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9578724532379994,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3157033867862398,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0422042278781658,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8146013639231264,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.076957457033655,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0713153585137563,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3896594947980938,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1242491026776529,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.343789836513835,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.786013409428125,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7835230854611983,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.198030070993287,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9143327700626983,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9764356341029975,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0741998428696187,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9917651940791633,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1474873566431807,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3954843125209453,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0538134420361494,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8479271478569783,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6323054128961929,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.802980569141209,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2054385137514176,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7340460138190863,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8611596792633431,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5000358419914421,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2207800323039129,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9770103712113692,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0597791546865087,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1745077942293354,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0241076262622317,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0944048715586217,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6313784433422487,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0377394488689924,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.043879691577617,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0165189164473938,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7324834808684366,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8648487035392461,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1815955162718363,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7949738251555201,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1845520138531367,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1512477415037,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5072221074826482,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.452455044120939,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8851411206239213,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.398383340752467,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7245589449441354,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2353821294749283,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.818004639514033,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.8372574979051555,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1187211048365766,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.457612983297683,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7784486871787557,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1186854498352234,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.663944959806574,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2340242497955118,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4327456310989066,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9634070923238847,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3653349457114188,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7640991958448433,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5294170893564287,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9787059553216757,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7934244646490187,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1581074060076872,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7796095108659695,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.857071601514786,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7596394960491153,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8019440774610118,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.6928153855771715,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.303226971422821,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7477710919594107,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8526931085432814,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.555020040996792,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.617094610797616,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2036379907714125,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5669622315146987,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.560364188606136,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6419453766993353,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.762886887055983,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.829684857064799,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2659668660050885,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0375911127656963,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9350456296425942,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1937409294885515,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.667300002783286,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.197342273227082,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.747885226215343,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.320627114060447,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.842861585989165,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9221490732647872,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6937883027638994,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.245916574995067,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8978042359115883,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5779726589027887,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9273046631251063,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8532773629069177,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.15995231723394,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.304981003456822,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4160389600940486,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7395132432772145,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.946954562140609,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6758094968997916,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.615258453048525,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.411015730460442,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2800854821262786,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.758502121836125,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9646022773215046,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.866767766105444,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9313122492233026,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9160012158756003,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5375159314511246,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6376288939358807,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.007683042946602,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.468716355428916,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9762925303855647,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9831835993112124,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6460979702755825,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7220827983056997,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2769737778168055,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.150227112002445,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.081402099415411,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.61498364690465,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7565857304733823,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2321554937766,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2220377575004666,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.054005397850871,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2842760749891693,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6399729802168674,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5465821559263455,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7605112512903603,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.858278986449094,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9661541224482932,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0319551816724526,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8162111480969925,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8048816367454831,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1191035690385873,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.924184942905256,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9937708997611334,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3731010812769133,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3851660823396963,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3370981568174176,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3774262821821464,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0161064357144378,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0303763523100007,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.229295265942868,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.16803020451512,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.714531860471205,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.89017764503991,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5379802094087018,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.488918652111648,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8659519057684093,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.333137115602353,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9628014538217844,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2204159398911558,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9684107798263777,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0084354907895348,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2061739423068265,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8231935357855173,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7908705807949599,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6159588637467333,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0947650022071294,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4319667555541629,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7074138335776656,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4733301775208125,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9146354081148762,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5644679098988745,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0633925417758758,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.097168817432159,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7360918350266983,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5248350308876331,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5723678663268322,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5462143906391562,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5736485063983494,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5885347967570621,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5644351076462568,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5462177481650456,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6227528184838611,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6844497590785706,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6190463257510948,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7092065652277438,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5707636237927379,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6353778911321729,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5845762417535214,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6014212632017717,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6421793856467785,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8817267436053886,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8073149812390455,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7820278935968178,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6410411105214454,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6510284555691834,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8640710276222529,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7256160045123144,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6567572327582083,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6105303648112809,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6742134585562702,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7269586632288145,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6206079559207041,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7126767588479452,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7085073958693036,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6326846765767141,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7058590306886139,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1505525804659655,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0039074170966458,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0431595622618373,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0819397541256908,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.819242096881022,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9109802036652245,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9575107404658355,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3251023720756816,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6955118133289078,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9537433687731305,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.71169545761316,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8823186683073486,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1531999336480583,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8069829732,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9987889208695824,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8287142272413486,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8277405566287881,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.075207259717247,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9633342283241372,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0425360865152418,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6748043576526003,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2275414700097402,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3979715537515396,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6779261728495174,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7209749422862202,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7280497468863969,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9261110863718439,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9334496694299301,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0610660291611145,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0935007943543287,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7291575160445449,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7714190382712738,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9240781911624576,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3334905874770848,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1577901470845144,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2609441674188626,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4166694795240364,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.127569897963495,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1047040319142005,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3004864151889448,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1090569779411814,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4063101211026925,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.217221959684327,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1798193488710187,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.677250147895251,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.169926320105777,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.262628719756861,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3384878643350793,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7230291972227816,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5501172530479093,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5646425109441944,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9088220243728915,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1119945979227328,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8774678631697128,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0184492133115952,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.386050023338359,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4653533809150232,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5205289090204528,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5748599052816374,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.025401260003416,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.346032142049385,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0448110408030797,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3395228204841874,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9068095530710467,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9642404836167222,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0723973221760896,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.319059087403,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.324019233262144,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4587627185801153,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1413854004125457,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8540672897457404,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4469056184108149,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.39066421876726,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6479633761420047,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4107760282578228,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2388380901289129,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.278718515222746,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.829142953259742,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5270971142808276,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7237785247512937,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.82753015085188,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8807287495317249,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4033609887738592,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1666932480127106,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0575936136693418,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2392692837343815,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0345304132256272,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3043622523654967,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2507178253203544,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0811938784892352,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0960601669915648,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4688604966528285,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2064362660698107,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7412604792957946,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1951523006302929,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8023056720458465,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.598208719158489,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.350624337403337,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4020230938054536,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5486659626489152,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.698855473865404,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5875163519577566,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4570156090087898,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5149847819096927,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7276490898265744,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4205264264227988,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.475384349971375,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2104997847382148,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.579045399357825,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.150813621119193,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5029370159855975,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.371134612472052,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.637017497019791,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9092105876242573,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4422648097877848,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9630554451239846,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.971633752827694,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.289868073597945,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9569218583882424,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.224437144135665,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4786841681789153,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8469307123796692,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.815402981030696,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.492870967537261,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0106882540648603,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.614533895103961,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5362280451805295,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.055289405839548,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.701856132221906,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1692244011438007,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1260566852868465,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.056762788702066,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.217202436865887,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.656225244478179,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1321970732239937,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2259714141554428,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.795477981932231,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2629493764298596,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6438164114881582,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.93218494730056,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9182724677691663,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5860103906721754,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.367177925926457,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.986212015117925,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5291913607712,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8692335496582384,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2829918051140115,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6305359918421014,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0402729990512922,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.736460879408271,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.0070707086974755,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0679797084907707,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7374563214905328,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1043518759853104,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8987530405974575,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0954121533955448,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4130527305595262,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.464577533541111,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.340901236210182,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.220217358267083,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.257089749335829,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.232737253496326,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.830202528942534,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5511926140415044,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6300563627931517,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8825172896851772,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9240767688646454,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9735985563977139,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4330770985575854,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7507388747687989,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7827461972198484,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9101178331074806,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6112801447139344,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.172279536416798,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.781106870690471,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9567964677188716,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5827816374151815,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1844830777935984,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.252803317870239,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.070933769635768,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7820351456299308,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8724041552282875,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.514220737590013,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9381324079435864,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.21176561550635,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5722087658343686,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2547432357346313,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.374597980798318,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.837272591616831,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0361106762026275,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0859148062058006,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.769202793705984,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.404670778132997,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.461367850442871,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9838933232478444,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.593228525612356,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1303204163338787,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.11183026406802,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4133964056298067,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.808678354819195,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0203694690109666,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.167754103030759,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9854282863115045,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.850266281922636,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1278496890180953,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3119758225579883,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0403373589543932,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3132157857894353,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.885630161866249,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4855434032731587,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9284633688273154,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3496391331701763,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1386117029669145,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9328307037174377,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.155042675678357,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.893093422566238,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8553718996346245,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6725292325914185,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2817881858806546,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8736829958449923,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8884383087331984,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9668087254001287,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5888666449237279,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.4058552915297655,\"sequenceIndex\":510}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47403196714226253,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4761201641618797,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5293432143239618,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4889744340452282,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4770509345414727,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6099597631671893,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5361811615325505,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5007702046377726,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5949357208352063,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6288313878825021,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5979643760687395,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6319369585789513,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6446393244030565,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5426616657966095,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6284605928352625,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.502308136972194,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5817118249813611,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8502229424660268,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7209080488683592,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8672772021107643,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7963009888475316,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7191395144721839,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7356807755312359,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7232814659912479,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6893254789732187,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6938555131744774,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7966104303419567,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.547536373064348,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7545795671680648,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6701676532060274,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6483262387174957,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5156086559552945,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6339268891331751,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6496128296432544,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8908754263356597,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8742984252844538,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.952623487431557,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0392457108120632,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8510394991971562,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9745400912579205,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0864712967693229,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8083794049164731,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0649059874000888,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7654097474265753,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1564297979941358,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0208106251237354,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0177204002267286,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4274212889201539,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0373289637070424,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0675480315630295,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7027419432602352,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9379820607531351,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8964527233975126,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8725867145902922,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9591564592423588,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8512170539061101,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8220488653150649,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8374298087525158,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.761787743449249,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7506425178968658,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.702436781950313,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7317443117566255,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1827724123952785,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5864935667232424,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.847364712709977,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8999307946595527,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9297685883440617,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7668928569032701,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.121551626380294,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0314920225716486,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1722093903891309,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0056551259369562,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.557585639242022,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2040917920988832,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2740594007134398,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.668853058777878,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.249308232134304,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3513744427974363,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1142127612899826,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.070864227077064,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1258277008046125,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2342010096254141,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4698493017472805,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0163892974976008,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1036463623312156,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5102320917024485,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2606240222769822,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8443272607053663,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7670238978672985,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3546830787921516,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2515091989047251,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2689798793316502,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.135384708412701,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9867669435157354,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.327126789848619,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4478122096754187,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5685811766931934,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5496211052212487,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1199581680394306,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0071577953044444,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.607490347922515,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7350767159041525,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8761547316237218,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0219957331335834,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3171601857672124,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.184273765279172,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9566374478463543,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9760349868821225,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8935657562515373,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1977885798571897,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2665823989708762,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8980152392715544,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1510521152729214,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.195420830356687,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8851537750240338,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0227929944480167,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0464324221550378,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1534640372126197,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.393998430537716,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3950090742166772,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7918476502783556,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8114920699521464,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1989173364144408,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9903576759920276,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.827651524569804,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.373806200499869,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2118470409921958,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5919297604792345,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8912213741326505,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2790943785533335,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.562874430599606,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.074404519653415,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5385680279964546,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.878040429540384,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2959257735090133,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5620814909207317,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.347691966134,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4689127093571377,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.412223655966854,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0290495341143937,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.088728259473719,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4791738715019804,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.032843367283536,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.03378384211137,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.375034109308345,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8193989509550441,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.989041152343621,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3403679013703196,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.945624696367644,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.108409907716805,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2840028829210333,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7718615742382386,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.862209065120442,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.914887795349461,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.445238327415378,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8811643048016378,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.848156813400115,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6998462213103176,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2119713555364813,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3816014333528694,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.718967837077148,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.270250050242825,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.06502474096503,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9914431182973351,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4838274278036605,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.096707512974246,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6009907657321436,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7465035490272605,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2744177238869903,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1922849881818194,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7456234930374874,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.274795711330053,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6842917892834952,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.086891631666707,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7399191624037225,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2994590318897727,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8639016169244291,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0302744830333104,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3484883119595343,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6194912648227953,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5761449794517077,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6558094422239038,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7260720610994125,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7614956852595935,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8626956161538413,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6143254735052266,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1707935851190387,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1542923301120185,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.984345713716033,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6481576083492975,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5437521087639359,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6987717545599486,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9247547910313854,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.604655697642882,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.219005143643359,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5713396263898518,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.510269234179218,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.668284429151234,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8939268862692535,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.140055155699046,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.215031224581435,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0248960277239854,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.4199581245186055,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.57567172525451,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6233556334609003,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0525544714791817,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5815949667792806,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6861115810301348,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.8518419964682655,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3916376310284304,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.854062952245905,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5499581265460958,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.371000795762826,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.948204487851163,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9180499590893556,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4960871905288564,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.757582561394642,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9159696073976173,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5597610535731112,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1481806908002876,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3988022459807987,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5004520259985967,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1692102901697106,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.519480255395899,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8060789693314425,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.271552479257223,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7735628077606096,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2357918244021948,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4683793547026098,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.88308411977074,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2589373652996887,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8979925253708976,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0445717687444063,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5638263523650702,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5853147765748785,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.81347855315887,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3420332201009257,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.363113801828704,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.599924642374273,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1280809904362092,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7851268420913327,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8122167188527443,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.19950847384845,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7777121731952796,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.337284599209632,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3876393828056397,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7128685801959338,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0740648628311296,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.067556895209981,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0544129191851086,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.8132618109110785,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8564012880776797,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2494210219667123,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9874156905227163,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.778299383833872,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6237760302552692,\"sequenceIndex\":506}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5010865874662669,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5283647435569528,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5059206878405591,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5540523176798509,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5549925352563355,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5244886347379383,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5164754830198399,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5730650629034968,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6027668838768496,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5856156180559691,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5759013723556802,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5946686816382848,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5255260556013924,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5189054635222692,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.570972875358672,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.580557741360928,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5883059534174215,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7198486696057091,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6052453845471591,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7889525057927912,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6119642500544846,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7444096875779714,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6188073085442549,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7077407849196138,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7382933641343132,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6673417269817522,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6978896185201989,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5424980691533576,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7048791282291937,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6348615346477751,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7094851537564415,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6640989292455921,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6137869406689809,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0629979116228687,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7253522531537614,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8857098139263122,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9463529974431752,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.913895842440654,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6978661482059303,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.238404574711743,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0241531140448277,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0975010576224618,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9677088045727748,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.856296846424046,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8025379702052986,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7396236240032539,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7205065085333846,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7210238019018115,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7089520056952681,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4668948502919088,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.006005266315025,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6820137067624488,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.698550358464332,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7260451616802589,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.727353396902102,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7505170116124732,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5946505530778132,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7152343098966831,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8151515846474962,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9750113374306082,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6754557256172349,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2522302431936032,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8093980143606323,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6772176122561457,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2580095162947817,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9068852614009588,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.986280028109186,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4938110424918831,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2335061109672745,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0571650416878522,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1614799239277525,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.846169410127692,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4348603267081095,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9504070333659737,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.005040300001082,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5513513896414495,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2626436554646343,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1162962604220212,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8717989789705942,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7330895697004773,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6939744018898188,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.255908969964736,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3817884138330916,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.410113824050407,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.137788706447587,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.049479071671016,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1457340282582555,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5689882625477092,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7430971207395236,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0530196159794492,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.095926152097151,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.05272623843487,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1544706800380604,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7552859386727017,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.122275064502734,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3718361055372013,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.63989368585159,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.738796960869088,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.220411467433343,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4762430111133267,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6162214285904104,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0445744419547178,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1299814896127232,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3045437616041884,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2618070933235273,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1407300979987205,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.837280891330197,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1588215916236537,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8176830565521569,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9514123810509454,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.627008349389556,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3751934640958878,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8951750199396505,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2787788798822517,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8238018818633566,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8417816145651524,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9153364754142271,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1437819270639467,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1429068254370964,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0071404305533629,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1229690482442818,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5916806422891314,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9550800910122728,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5839646752791627,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4896532278568446,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.155168951342704,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9854561999284396,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7332982684729812,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.365636229232519,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.844027459716879,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6018349176839681,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0414202716422616,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4671180267955923,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5554798891581085,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4407963922530094,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6162977804659002,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9252869090064677,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1182148965658367,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4591321211997577,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7573962402544874,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1846949802321902,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.296430119025408,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2777886937198795,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.294046503986773,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.332989713394109,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.531722200212347,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.313521889773207,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6654910956074989,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.690864154328227,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.738440972687105,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1138985500196044,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.612562278476573,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0816625497320906,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1686044705215815,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4070333294749355,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5992041320529469,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0714221328426334,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5301551506399917,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.918227624194914,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.066785151234676,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0650121777935286,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8425037822806514,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.654426248400127,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3580552967760922,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2730380242878514,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4989043574394796,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6312046331798435,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6774933745179283,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.21535935061256,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4439208779802466,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7223232929043493,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6843863993365282,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2883078853682344,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6302269832028609,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1767633852635424,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8023555169638463,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6335189919567525,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.27809251331945,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3458661646931085,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0620908443594053,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.841525070805899,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6929887366669423,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.306058071774053,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.546308848037948,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.411287017822044,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5313077197499882,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.535221611435818,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8561881314501198,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0493326512261212,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7719554697168687,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.44498482711013,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4047377439615865,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.853381143685042,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.610888707739604,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.951344948779945,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.971889586014228,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1546186697079421,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.008211597257842,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.861908257009875,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9784685790037846,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.09988014836925,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6898465642441185,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1355897828985873,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9515946916030684,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6483558163930114,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.089735613220531,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3461197357308254,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0956399350439443,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8594938718781384,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.221381896329991,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5402566965443474,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8005403841235441,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0781460422256526,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9497029194771498,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5611429523537477,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.865016177401685,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.717698853263078,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.835844944782672,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1570165825604466,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.004013860633541,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3868914176524416,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.033868175582044,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.06572907922179,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4131693757010457,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7138709990568253,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.565161024118278,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5236029565237654,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4596900168642086,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.744895581705088,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1901198900343195,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5128881435082346,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1701988834750536,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2840433584273803,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0226570954636711,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4725761178494496,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.76088557467283,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2753478114510306,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6621884869232235,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2529548994499966,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6368354087259505,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.836989180808962,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0521610158630303,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.505794906446189,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.788336349834012,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6904678655311494,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.165651269510456,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.5529249169171715,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.49779844040168,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6298169008454555,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.202048980373728,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.251181549181952,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2838188917685012,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7873296778132821,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.762539533347345,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9997988253607173,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9312326408305112,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4708764704403767,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4742645460706711,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5017109004441257,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4924918008878485,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49823480854171814,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5137798859808531,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5435733870273423,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5556665846953192,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5646538702333588,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49903928892907473,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49877332158463145,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5521378216904419,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5504874895882803,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5595966900100735,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5653207343367279,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7085866351495349,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5739263364047277,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7413673999096855,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7284355010253647,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7919054677670823,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.619890644984445,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6904722310227175,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.731578094301687,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6206893968251296,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.594421912416598,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6007427108695695,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.636808321308723,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6087366714145387,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5907266845178631,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6849154214075792,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7147426097475318,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8368171215875903,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8555199779250947,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6799930072764816,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2413869490503426,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8148935236572874,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2976205075816107,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7497714632213948,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0691789887701468,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7931784890065817,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1500779569898334,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7190179968892811,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.18239787795611,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1263050766772629,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.764129561410468,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8193675085843619,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0040700983556397,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6825278050360724,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9610042739148688,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8215044780139593,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7385844298693209,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7306434296416265,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1059556919811815,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7147307345074965,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8716316763791165,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8554466315920297,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8587423033461686,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6198069466032818,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6624392732268702,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8380030318931962,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.689130066206737,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7519628217345914,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2600604336293002,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9694953921293037,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3576728850651698,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9054787939847432,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5259198218748025,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7257692899885801,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.763073006807806,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5864117509457443,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9129894939937762,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3335201313293303,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3491451671949353,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.876827381199239,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4230475035014831,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7788760165219623,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.464106028568947,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3297765229139582,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1452580763266968,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0735444219201076,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2035608693839066,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.807161270796184,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2631900518401724,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2062238529000957,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.46570354099532,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2330812873422625,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1996508457207495,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9329063078857516,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5586239750159305,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0016552811490138,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8489740206228379,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1258700382486018,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8861727278757002,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7761640075820118,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2802677993308107,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0584787620866423,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8990538033021506,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5711118189173587,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4332995558410278,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0914265737455178,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6107058255140403,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0846812427748198,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.55214002322126,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.41701220129322,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7439151916246387,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3671978298932228,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.600882895693472,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8683515455597546,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2296852545284849,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6661176064409062,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0189567630147616,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7473736217163485,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1425776875434261,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1285956033675903,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8919256713713022,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.019885982504114,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3983463124480242,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6771321623418192,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3807848606295707,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9561717082230816,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0723959930198974,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8280009684561922,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9829063714663288,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.31357038602358,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.471469169921775,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8352083526949627,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3042359571554125,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9849264740011408,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8836322075697236,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9801909554718238,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9982731411492403,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8894641805391565,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2429468472122183,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1434444622670927,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.980233172095435,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.893152223543488,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.827687407441442,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4198017501065618,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6454029078860173,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6896903654602582,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.375794047987341,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.140809394137558,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1409179155213636,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.073482563110553,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.781546700144639,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.524153334025105,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.530605946871293,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2818830936200922,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5973776050365536,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4813226045017847,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.331125243124053,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.378599711564807,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6780534783592356,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.851221170725803,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8598479286906127,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3716265632944582,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0484045727489635,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4364953219814622,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.688815751825218,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2311127078767725,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6911065302902164,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5609255510309215,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.383884234803494,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8901664599115264,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.271769518606389,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3398948776063966,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7265657314834724,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2938277316141857,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6750576941879531,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.147321404629809,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3044548777551697,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2361592161820185,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3816859184189236,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.42191080533481,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.368147602176657,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.039627831610243,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.042378744447934,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3776791590221387,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.265272013915703,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5795233657983863,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.801429407782205,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0479948642038868,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.360082097799591,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1495804213928411,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9320729899291569,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9849363459922194,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3672579302374075,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.064933388434837,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8115623224212816,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5799769302583737,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6718917366749835,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6097169184854763,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0784187615856387,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9562179882422412,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4294865265325656,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0428932958978487,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8675250599064892,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5164565421090668,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9420979191501313,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5882898832066816,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.213220662449798,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.75339761295087,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.362223790985698,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4151372581118435,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8483960739809973,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6147747990099637,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9089520000459175,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6835658087258114,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7563120849517637,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5106133723949644,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.112518606113937,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4544745914444785,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8060398835060003,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6031890537559264,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.01016170905964,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.565033336399512,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2575064357217907,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0503982369524834,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.023982452524348,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8049203629539503,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1436342726533577,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.2079978001983696,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2665554523646714,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4858342749995472,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5464741315552293,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4252413197513862,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6409379670479673,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.649912347048025,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4560479806749598,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4994511871067695,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.540444408715276,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.341672490794531,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6713033492077831,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.524883000854573,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.516784019265156,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9363472520431928,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1231143001415531,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.642699009001657,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.505920969228378,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4988294664969346,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0973440482059797,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.111980080901454,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.829854633955344,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3931194901200676,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9659219648888355,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0607387130246018,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.395481829481765,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.468697285779144,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.40186639251894,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5886494584201891,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.624692411104703,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2131599554964407,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.274700909682283,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2951393621759952,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5389168825715966,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0106736069423488,\"sequenceIndex\":495}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4858229754207344,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4872620510375193,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.490356066397699,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5299213764717216,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4898391177076135,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6880508048162892,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5351721461916753,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5496647849647347,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7191509589543584,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5373372384250258,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5798419514077974,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7258586836345676,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.761578474846764,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5736044197386476,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.53930014313063,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5887199554433356,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7225956553811267,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7326689369182935,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7563196197327555,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7022039704431122,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8694958891300617,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5874306162308289,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6511045927909472,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7420948397723388,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7358271518519,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7798374879667624,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8190824350074238,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7032169898328363,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.591019303829602,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6102139668024491,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6556316356330851,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6214141977856469,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7158589118162287,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9800837363088313,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8824129399167358,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.829324662269745,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.990964834066355,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7940851842216833,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8193059326080224,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7253285287880498,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7483478889394165,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0480192664620784,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8894529470083126,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9520402349595064,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6887568720477861,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7607340004716993,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6568054217274205,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8051713140176755,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8085700002392576,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0993711343679042,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7528774656226753,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8780791813802057,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8887695360339962,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0106291669414524,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0571535952480535,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.340038425634295,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9180387358841735,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.198730825809331,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6483693353496862,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8007452874529742,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8308881652458674,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7703061511961707,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7445128235595969,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6868193943319045,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.425607729122935,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8475845316542178,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8947822732428031,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.297977659957579,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.520975650281005,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0927791912387492,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8983958705118908,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8819588791612418,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1087528497730792,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9952130745690149,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.004873108003205,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4849225924986345,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1180556308440956,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1208747098439005,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8791016475494227,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7382159315262906,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8392590469118542,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0415567036972604,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.222934420737666,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.243434113915014,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1863171060067665,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.934249606040292,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9470620628587046,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.282611443618607,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9643992020158999,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6941893239964521,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8939586866227409,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9219693840720636,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7780104149083764,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.647707614712938,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.728322358681079,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1045894600363209,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8380361771601437,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4040667138274587,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3393144466921827,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2406401459678194,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.313548567328322,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3249732189976564,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.049259796224815,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7077544719308766,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.180580493235067,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2569540994123924,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4214837172697414,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.361627345967599,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8486979606582619,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8424677932826241,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2479227382249385,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3416166150345785,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5057663791123919,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6607342667775213,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0165659022373228,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8773374523410131,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2009563824631917,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9381310811651796,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4651286766246596,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9640636802748915,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3750872930908342,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2677315210347453,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.006779301138317,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1562385105015658,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7691389006384153,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8477934263823603,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.916354274438307,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2882563588242022,\"sequenceIndex\":512},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4650844585087084,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9029278539346954,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9867599578892787,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9332623608414942,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4373503843362134,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9954787411274658,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3429384709799743,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4872288392059665,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0567832370962713,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.750175724950233,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5807599191120012,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.120613149422466,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1054426195609928,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.512041380921692,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.218223090710488,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6330852272722727,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.901218164238975,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6938012543511443,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.286818806808259,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.581963954775608,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5781313345389527,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5493748236311715,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.680141977650968,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3107974506049183,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.905191421684678,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.861119021503395,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.509487118280383,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.788076226228357,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7172879535781296,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5452728913826417,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0255904437675618,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1193427368775144,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7535478100235211,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.376955755135771,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8551758826722329,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5389450623092733,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2530214406882085,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6981659660850905,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4973261448270145,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.135413914464539,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6338583251517313,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.286920435555379,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.508251865132913,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7979911010839684,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9904578800607242,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5136885371005246,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2941545773048064,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.887948183168378,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.148038917723249,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1840965967961574,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.304745288953973,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3593644221416388,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8631033710432275,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8580995328139411,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9305091418885427,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.601779095477629,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9954924501793836,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0156937338482888,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7512286255001226,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.719906878957141,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.689849970202079,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0890608206785974,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.705932857879201,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.418220346148104,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0969261774340713,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.933003520294151,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0885415668138085,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.460245370471546,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0597705167373737,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1443431761272516,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0105311974397653,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.197456404496632,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.120412467876821,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7891132961125886,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7088997854022057,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8935408576034083,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-9.361889640493333,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.53603728938968,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.20998669741581,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.511901049132939,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.68773923955451,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.890860836943703,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.645350351571648,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.201606756533894,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3635343163089386,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3073599981935704,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.040485633144359,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9830731715041754,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2882920896962795,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.851725986285789,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4414184669483667,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.94379266830978,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.754474475193515,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.025290488086333,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5118593066417145,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.543611857646627,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2735831336865173,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.753172024429554,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6150055051460992,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.865165596683522,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.547918886692758,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6118052000111953,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.4044613545455915,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.882588329942707,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.694664822533188,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1466155553298583,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6509840634401893,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7275842459017428,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8775812866708608,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.017105498299956,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1388808491519846,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.539365079677803,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.041819038793996,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1160609930877206,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8731539859443649,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2386076308002525,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5509303247333033,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1146417573739418,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6372132867674243,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5584885604503909,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.920989668227779,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.188160876395973,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5698232833995625,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9085697322108637,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8479724365655847,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2891388881907395,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6291051777170995,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7163169315260967,\"sequenceIndex\":508}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43218231525758877,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4485291957059978,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46161219556158933,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4555950935759638,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45533098186052356,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5019168357102136,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4772590778795263,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47747780456511746,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4731079867634925,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5786594495877174,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4890804174516189,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.510905874902158,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.534159354219894,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48796139743336425,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5207192057090839,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.510851561908569,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5927418697091994,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49897978718136526,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5597998334543972,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.672444707444778,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5976444447764573,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6911387595292909,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5269748200104902,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6478046108407362,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6218329906823525,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5357925943953431,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5356280984410237,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5172079085066068,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5036350925787829,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7118705959945192,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5517275785757312,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.671044780907049,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.728055953548155,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9870982470736505,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7882774230551995,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6534247763282934,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5807554033191522,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9964800538538917,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6476561692272954,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.920202445262679,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6748993178919759,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8251054141788285,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.047414378451381,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9823042419377067,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8086061845848271,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.536591268393244,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5384159678551852,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7155680095001897,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.653722876759252,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6579668469210824,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7156597292021663,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.730251912795576,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.051566311004025,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9137466327356003,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9122391278770806,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1404621366351742,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5979333578174034,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5652985811985672,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9339378207769127,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9877003741642711,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4402694020885676,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8872526503355296,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5655519042842441,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7144977444070437,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.456676106496991,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5853472998592693,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9478347886701531,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0418510246037807,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2165935865337842,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1421477935736484,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2813557985600237,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.394923069770304,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4660316544003311,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9975702486860947,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9333866634074868,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3795069657691368,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0462117781064142,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9952801330122117,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0394008761629654,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4531928832589416,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2837072507098266,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7006287478046819,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8883882883148864,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4757064018407855,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0248557320218463,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.248223113155371,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0945998991114811,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9844429450763853,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3292595660948998,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8195503102986449,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.954542829773518,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2404231536067352,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7338283669044146,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7998977846757096,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3262057291828664,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.271004212223958,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3326643400824276,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1309030793527863,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6653415235423492,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8759152877767958,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0729951334237686,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8403178507213727,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8743661242787271,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0276907120441028,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0525753035608025,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7699606263113823,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.364339154587096,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.469432462404565,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9336037630831073,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2924571040676256,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9997217018680697,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2693221503858503,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3460158019578996,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7973904893206231,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7115290234838819,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7258306543374332,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9861153558259991,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0463553115132718,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9442058718113867,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0091870916010257,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.361979717481527,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5638720254128438,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5154544490167192,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9290367470552747,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4434530156432408,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7013083465142445,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9151754712046856,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7669367654459347,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7148068328042567,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5745971976508786,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.663441120686128,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.487291402258444,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3057256158253923,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0893912352831654,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.331005593548822,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6318995103769485,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.543249589047329,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3362026363972674,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3675357990978885,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.462801592637378,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6687553982263839,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0805210114621313,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.125269846442948,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6042732391392944,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6966452353680401,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5180395476047024,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7341637879659366,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4595854735255722,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1110449144721466,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9163383667400715,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1450157899351014,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.960587232084712,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3359757886314516,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4021843057179115,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5141349735399445,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.280819766911588,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6089984140258298,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7829193541378647,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9616419662675306,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.675368514994843,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.976457873553566,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3989394698186555,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6214015123889713,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7348510470920759,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1178498518503202,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.85878215827205,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1353055333168434,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.959185375302622,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4863247886356594,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3780717037198373,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9008873230916492,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6421464416286096,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3316419279986962,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6943443735618615,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5322932997654586,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.426849092230423,\"sequenceIndex\":88},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2183780355372846,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.435739533071314,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5130787243887953,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5144697366353954,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6229416502608804,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2845937516547146,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0864141444099502,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3102513497466393,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2898595651763456,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3877450925081403,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4888180737944325,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0317127280312262,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2883135679025595,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.736656622928817,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.500773508797091,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0682422967124303,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8356755799190065,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5746761458370275,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5782476368973926,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2449339565804778,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1847191051477792,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.896653813204601,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.634984746533856,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9762083303679591,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2783431650797608,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7124485824973985,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1246098091035086,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.110790927760366,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9020632063121636,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5308814065524237,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.739580021414638,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.408870339298737,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.285296261847491,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9537183696155414,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4768182390409086,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2594720580803394,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9094038916018234,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.481678216029119,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.203349134014364,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6889719851612335,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9888643330689189,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2216789433160955,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0260475115647623,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9420579471526738,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5862790239825457,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5359438706497677,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0245873175592006,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7081334033296767,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.018712602441313,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9744904585064142,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3692641207580922,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1081676297416727,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9349694669763597,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5372165165157898,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.902314644770325,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9421872586986753,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2705297365096881,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6184956826310763,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9893151767848376,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8629394556916434,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7205275862393656,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3979404634726453,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.32549908198766,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.307695386747366,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7748217269023576,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0961512896298133,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3394329704656456,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.007834289699108,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5233527737035466,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.711204636988655,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7331732436513616,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.207621191768583,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0191590543841054,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9737307954875334,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.047020544040035,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9228875231007905,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.185148608722154,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7035382496424947,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.330422516468464,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8254819844809498,\"sequenceIndex\":508}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4973688102840023,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5060371767345275,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5228097556586833,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.508916993425157,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5151499285314994,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5482705220750808,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.53454875258925,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5225530490501025,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6113083236722386,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.58903167671159,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5470513687515192,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6427510445314031,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5641550097420565,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5574501071702778,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5672631420688377,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5316253900975894,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7684072776249165,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6197315972930415,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6365330094783536,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8698559437695622,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5965944149578167,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5760990148676591,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6878543932926886,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7301476211769456,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.671537360661678,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7129611750850526,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5852064373516835,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6961611631123278,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6383080611443186,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7132447336987471,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6407679853097566,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5345493617656112,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3530351356831072,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1506123890394395,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9165702860494014,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7427624713424404,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7314097974722822,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6725663759454212,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8490224805705368,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9909909082296868,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9609183386313787,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6838595651474582,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6425655273042373,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7180215781361419,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6550137620878168,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7760115056123004,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7178350314328972,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8135197414032338,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9232600757198128,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0658105944331357,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7534092955775344,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7394677830142944,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7938319447519199,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6297571079957155,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7123762492978596,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1748364510328875,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7193955954358021,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7028938437375565,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9276528904723989,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.988434053008993,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7322247250112124,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7268011913981614,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6790470447544434,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5441797305714491,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9505401356369416,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8513411699920355,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5508954193718445,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.480271470225363,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4035429095274903,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1012702137825086,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.412781104327091,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4310840626292167,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8692250698989777,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2204886694044015,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8845248036350113,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4886040865657932,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7534858364428885,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8938879377344087,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3925583504227408,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.168819125574342,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.181877784735641,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0280357326222018,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0707466337180653,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.783294859266626,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9466027287028816,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7326955391513064,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9729227100698518,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7957327054641341,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7358358878267341,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.803331096806224,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5533004521641491,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3774646611321764,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2337261063064482,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9768279055986889,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.17503726049971,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.142343317031489,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0165959053458125,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.923299884898386,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4470484479908756,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4297833814034822,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.237853997396787,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.408925021320803,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1936216512582094,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7428474900783527,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.326728009377924,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.429663521540352,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9388410561173363,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9126703391950136,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7558141620318706,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9607328286198458,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9847367684073949,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.292955342925033,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2407985727229722,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0796767176889306,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7380855513261412,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8854984026321123,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8024427998526135,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.136884972778343,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9143307262500449,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3520992778000418,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.082050871925046,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1347658539170897,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8692379325255752,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8464181387827172,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2061140251831712,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7367839576206219,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7228620193389971,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6047526872726042,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5920039100330619,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0156898433880723,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.594839086835488,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2751140636447733,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8840373284649434,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7398133791056478,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.79233228235691,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.562563243111901,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3117109577500634,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.397347980903518,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.45503342892964,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8943325722387738,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9428933264852677,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.248651428423419,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4166915861479932,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2298393994409333,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5168039247892187,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.585067431604288,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0944079149940464,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5889957426000607,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.543216428014472,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2624254945852196,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1158117372776926,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.03170487677397,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5425826603934818,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.785173076913568,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.728702562602254,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0744846256248852,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7100092124238615,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.889235342795439,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.714297721261049,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.902681970964993,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4779190651142295,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5667077200172215,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.201379946301353,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5546807754352177,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5051999167729624,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0968732746955738,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.3032032527188955,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2412942142500643,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9518417576821687,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.071013690457448,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3968373153878475,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.95004211559388,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8695160101850011,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4739890617759592,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9063730360759397,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2846810856231428,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2849584483998817,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.233376478913222,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6168890470173625,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3112869545601686,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.343693246801004,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.415766789580363,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5467211451791982,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4758683264267756,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.666410381803981,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.403298272070074,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.285371750148533,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0535641200772305,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3313509094448295,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5191687585151352,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4455111422285791,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7305705660392183,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.568329735564515,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1585542013972887,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9397162455224397,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5098712472247195,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.196417433237718,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.134705633841062,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.002433008694086,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.13916910122152,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7257411601558164,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3432438258138184,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.845660616243099,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9162775214734515,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7240801254318316,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5002027461895615,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7726383792445988,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4425215517158647,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0439123858964465,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3806266002277903,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.494824730805039,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6760846851365072,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.794817528630632,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.518090314920807,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.286137409117118,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0242273487644868,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4534546735508385,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2908524110535364,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9693686839215994,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7589710877824813,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.005125535331012,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3783153150462277,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2080035248176637,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5675554155932967,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.118048293470563,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7400986123229136,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9341987272209338,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3870621783279415,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5625485535986194,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9687486649576107,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7683707729496985,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9776721153639978,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.160178930744983,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8398113364400106,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7743096903671987,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.700949340757882,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.757413663867739,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8881852641909704,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5843794420376014,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.976063741810023,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.382928265755589,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.963878075156559,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.217869991258555,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2431537016677248,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.16883508040138,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9338810001723816,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.246248117960478,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8296103041517964,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2986124590434662,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.406768109939617,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.928832142453249,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.454756141637128,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7541595964479679,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9694705789171905,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7362831406102939,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8808857126785352,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.42619771958609537,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43345143258128493,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4293265293904184,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4608304411748756,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43827822187100385,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4315640195397491,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4974204847727388,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46565712963053457,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46781347412024343,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4633778498801164,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4455809073088714,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4586880959255478,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43456635149105477,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5889536026501747,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5575845902843625,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4900278865420333,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5449975124369217,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6005579927363516,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6003932802935689,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5799184962451617,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.779300545125928,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4990657413433982,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5209650118747244,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6403918943393926,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4687540569375934,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4562868877251727,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5369042266955,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6317068758530606,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6615713401030259,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5675303072108515,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6408920830347702,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4932240578057968,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6802716610471267,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.606556831035033,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.61028853182528,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0142378030349612,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7129158830029204,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8513382606769139,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6093381181028767,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8313043365481972,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7963373066802938,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9044608267637612,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8911991055548459,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.249305688132369,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7552034547984369,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5678939024718194,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5785113513450488,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9153824913055594,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7660651386864122,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5953416895727442,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5639325999715332,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49806441566540527,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.65383901522256,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6126386362056737,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1036778126210283,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6646547362442476,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6601495071903389,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.721404220515793,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.82801513652758,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6254747781840414,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.670059738795118,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8568023425316305,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9773101158618809,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.88839324935546,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9528411826277059,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0868595814771214,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.506106954831744,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7401672347916053,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9431130242529148,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.942404984532177,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7806674386636836,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.330446100169048,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0511411896867207,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.859572560734734,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7480418030789947,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0717137262147722,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.279670889872967,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.991702512052667,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7582296274164384,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8433881845352482,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0340588542519702,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.388737218900961,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9221995751668304,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0180777547261157,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9705025281098207,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6613793800179069,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0993417122432894,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3122408831498333,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4443818658074077,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7594089777032923,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7766816833823106,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5857946279120249,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7855512465266357,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8953485300211025,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2617411202345663,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.038689651549418,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0227330751318848,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.003999937843405,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1374609132591116,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4014517607842742,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6314291392369553,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8666843032099214,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9279104199869668,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1211207166523818,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6221739653945711,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7338649692899989,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7877457471682636,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6855119517241508,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1044906215732246,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.823063631823243,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.057603159261053,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8734587022973378,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4289495191482815,\"sequenceIndex\":226},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8946858855185241,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4033653962696095,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8605022749080113,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.791392944805167,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7275083541020984,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.186718314254239,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9124736045058333,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.921685182272235,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7313748217730737,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7253397579329358,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.04414706762908,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4720829820025607,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1770719096187794,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0991147972193192,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0638134251100024,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2031456335223085,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7432184586010135,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.136890584023146,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.594762173247052,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.180518641679706,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7332411156222298,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7426299657522255,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.476204977323509,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.274906013742164,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8368165469683038,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0891403054762818,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6746951847777294,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.872319957800618,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.161085600585867,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.078558963273582,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9730183240232337,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2293564025370594,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3723309620229296,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1714556961807316,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3984609293214811,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2258755915684194,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.185635917062024,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6861352087013788,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.642502735479028,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6382818079632708,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.114302699221238,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.06522473784119,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9901403985251185,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5960867062637014,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3352200921541955,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1691412619416055,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7353887682192233,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.6407855502545186,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6359228238605472,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3411923201055709,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.093672600577947,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.625689367360154,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.750841361004159,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2510011565778136,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.025588382945282,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0262862908236605,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9959041475448684,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8016217154740135,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5317892842336907,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8317893984233533,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.593165601924417,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4905984904414105,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.553926580042387,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4865187712981185,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6023234090189846,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2741100159244376,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.301939424435114,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9139875065644394,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3623488550339267,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8133373462809526,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9996919090581265,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6557403692086934,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3987510753921564,\"sequenceIndex\":93},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.675596777333336,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0930895231733886,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4623786021690424,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.311145591901192,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.056522555452434,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.043364357557995,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.114609029304178,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.602530773565006,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2522333393274723,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5150759925476245,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2707894041405075,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3738285292997152,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.432199302453765,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6081267937139834,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9093748973338174,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.787600615696048,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9916767534345647,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.162710606285837,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.60660893645555,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0667192093565772,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0312643897301779,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.448607670608509,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3251464494887037,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1168221869123336,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.666708403017345,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1663977944948685,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8647084031192648,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9521446545157163,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6873149246766586,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9190967706348276,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0342558250866918,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.297678064508649,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.299060600628447,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.088795421507686,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1181248360630978,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.111122599539255,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.252548190587248,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3036983216670057,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.041861715422634,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.719760816963986,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6969335086028183,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8259927958094113,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8975924702606604,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6268552916093544,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1379848488546416,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5973028858333724,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0488353217009867,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.91179692124984,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5024089791113757,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0701093489347206,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.816026882281499,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3426132890164049,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4817645814823794,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.307135413388725,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.880462993186049,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0781695700360863,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.46647582996601,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.900901575946849,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7383410547096612,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.856708610394352,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0292440032531234,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5048004451475878,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1703071952953255,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.406942678389322,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.990931517287093,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9783730794023422,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3419407966288404,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1836035922029595,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4614755364268803,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1695235024964705,\"sequenceIndex\":483}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5064663693551557,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5213468144887055,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5130101393466253,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5525095094856098,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5335350536075059,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5603817970363063,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5408690278602389,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.558052777651177,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7097211213778862,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5942721471483361,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5939374988113116,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5677921335868918,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5706653311075429,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5481639956494487,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5449735635648337,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6515813201662238,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6622861852131647,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7098923988307566,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7487940084431258,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6359561015433761,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6346855526039374,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8675628985632032,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6190513535715249,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7126498435760181,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6752004958878829,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7491972926757735,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5876859621522363,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5740197099453279,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.585526106774722,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6220583146954146,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5719812248557312,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6566487933063825,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7422922120470783,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9075918151385738,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7287006770008825,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8789152416089542,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8199936216378704,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7957906580674321,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1882033999062453,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9182540387545719,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6770912792523323,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2481081940484968,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7669423326173418,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0636093419040695,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.225151020811381,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6355730442111865,\"sequenceIndex\":184},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1504864670530401,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2988912910803927,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4207805747025573,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9458855680117887,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.688657467667933,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0527065498979402,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8028763708200612,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.709887598101311,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7643360691590508,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9889248793690937,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7945904124438725,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.722960296583395,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6343440528785628,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7430017387109499,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6908168408094095,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9749865075092569,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5814360803306684,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.678027342977704,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3803726503763978,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0776777675691047,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1216496870123196,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3708671280499873,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1126247376745397,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7701724017332817,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1180671865327034,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2458768190663236,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0094188020167,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0783716571265278,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.02759099356388,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.216586547240496,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0534585718704141,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2178049590158204,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6613350875285675,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.120212625718705,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.607261828342843,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9576094630707074,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.143215716280657,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4305732420003352,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3535385601133065,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8436798424707757,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7890108656191221,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0766285123625676,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.193521810412908,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3727995424813484,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4182681280976666,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8599075980885446,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9317871567907272,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1986142624635319,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6269214974259167,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5598141950617628,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8544711829266496,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0266358448030535,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4484714335868452,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1144681622260624,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9783912682659098,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8627599099796143,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0860259493829498,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2234447482996404,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8492065159264601,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8884142080725931,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0259047001397468,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1548827808391942,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7815309347740937,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8985701826792846,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8184539626004113,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5262801098226815,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1087755180331451,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1942829112162898,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1030900898498348,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.194333216684616,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.255375216086092,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7532801238329827,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7032128947046683,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3829869214903292,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.777789284925086,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.117988323098341,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.419357526082583,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4615855362793475,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2011102966771476,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8632662420953532,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0728475763464398,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7580128243128355,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.629338501392837,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2429619710905464,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.745778218225133,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5881652428370847,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4012672916378561,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1948594205380318,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.385301965467861,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6454788697530407,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6750489646560425,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1534641907444851,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4512162896570424,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.591103544849008,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.65721737416697,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3755937398049483,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-7.338955631449313,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.268973334270531,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6389778686871437,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4953005950897578,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8317971399178823,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.785012106128601,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.565736526047666,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.055722461659212,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1153950554122651,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6486019451955303,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2254733831177187,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3642288341891278,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4855639859387169,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.143432852753377,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7635832316061855,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1596981154137884,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.2463837318548086,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.734388415466785,\"sequenceIndex\":80},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.885479948142958,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7499963394614746,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.931421234507558,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9977716375314355,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8516480223662188,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5354833400289563,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.154693199813363,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.39149294085949,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6711681309430686,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.157818828206081,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2281501822861998,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0955351238541384,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.194007248875388,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1661233281107264,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6145767407339615,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6849655872224758,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.143031935069677,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.992530515817045,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5632152507280006,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.601433586087968,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.69105444098366,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9807059379320717,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7487103293741963,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3181196587514092,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7699224666562228,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3756965457881254,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4564461768125265,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9739321251755686,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.997612791588815,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1313021072552685,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9740968723508225,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1964239472552545,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6917790506770243,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9499153465917325,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.89341093516011,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.1116637847160895,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.545121392006742,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8072464173843084,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6500981923023401,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.201787388052271,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6547930806938944,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.572057014217358,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9599958151631784,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9777493495772727,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.599702745811022,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.264611165380655,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4684461587142734,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.213972184805995,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4658952496250586,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8451570674608124,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4873510981004436,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4583915973981094,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0545588315589383,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9426760455064034,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.823842791839905,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.7183499112391845,\"sequenceIndex\":27},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.358063842459212,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.751383215839482,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4562924136309707,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6114129235697499,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5157828925212515,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8378637756436289,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1805889429330607,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.055056566131996,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.497900854424913,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.647266955467929,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7884796957693494,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4592945744716366,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4281026174722815,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9930135675929601,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.017924167891236,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7386865207782933,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3646698329095361,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.343654655586407,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8881037623642203,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4995062111824669,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9491566225839064,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.862367004659946,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8057733347626272,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.547175200988337,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.256900198390116,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5860965369138416,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.218807213881615,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3699408025676862,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1481986026621562,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.177677102448892,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4372499155506913,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4886480471487586,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.37514735150258,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4773070976807032,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3226262332779486,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0163754373342826,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5286163099996475,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7298989328712548,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.657796567749366,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.772412475448106,\"sequenceIndex\":507}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.505977781474662,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5059854898127096,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5178533124362862,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5207682278123955,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5361963251497277,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.532939664263322,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5482763248423206,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5625289462542018,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5250723099765618,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5516689446756178,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.555474734215254,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5388123573443929,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.562210188011806,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6705493225978134,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.582684933632113,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6279366736718449,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6465370176114829,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7415040077758435,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5881507594712443,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5852085402584533,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6103019466060833,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5782967521766245,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6093999795987916,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6183654390411246,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5958380385583253,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5858136745798084,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5698103333810292,\"sequenceIndex\":221},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7062156045063205,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.896351462072671,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7855339923155613,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6226277938964764,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6572757374150333,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.943979189163138,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6966538410997436,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7405349108416687,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.152395580785274,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7446388967620914,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6289892016983869,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7261502693876258,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9257082803894178,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6024959399469216,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6768418143610527,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.833735403238033,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8946077348740243,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.857858680057098,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7084031970391405,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6253839378895836,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6269126573859838,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3060295969587594,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7308557360336072,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.650666334080312,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.901513987846085,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8661538819861175,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6303128485938437,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.751433683261802,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8485842622379738,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7627114088422097,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.143811701219382,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0895826561653676,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9079514128706407,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1885923849232072,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8467818984400255,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0884883655868998,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7782675504902605,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9491292664755695,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.307331877941503,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9465827808338818,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8158440705522925,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8737095439581721,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5065140350051394,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1658066104102458,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0699426358733857,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6702427842146745,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1822352356839585,\"sequenceIndex\":484},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8354923374706584,\"sequenceIndex\":150},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6361222221457208,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0633877363070945,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9864061917262106,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7203953329691624,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1279654940356565,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1167086569350668,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.441879918288986,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9110269719881178,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1057051068882529,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6537653016065526,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.088877570905752,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.24666743268922,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4385580333257408,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3217593308028621,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2848893801795935,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4691087091054416,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.222239960565069,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4329185136815124,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7331208921658889,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.265617497800334,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.932700775575517,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9570829436299965,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.364229254575966,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0437457677834954,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5547454073530025,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9111607184343975,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8252028101045511,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1950201963105314,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0476902010873907,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.604854389556549,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8759944923151639,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9538826127109479,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.377333864087427,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6454669511711073,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7875702968236381,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8815158119616491,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0263257603679823,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0671553843558272,\"sequenceIndex\":337},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8365395841294072,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5219715245235594,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2632268954858559,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3708754338950662,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6686917620029045,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2904513984832309,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9083292575659573,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3345292056612856,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.725559029196066,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2285644812600167,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1562036048593556,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0870909552006922,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7908916972847322,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1208442614333478,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7857464924428452,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0702051497552385,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.954177187995217,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5945174352177816,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.323372498802291,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.594930588655321,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9764012400839932,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3358664758415335,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5812082636290123,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1457620375298836,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1493866515461537,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6757181469344657,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.948043867319927,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.264417937406938,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4712532859011813,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1745726440410458,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.071814676638922,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.550158494315923,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.179680636703256,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.15073927550742,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.530856089256786,\"sequenceIndex\":74},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5224994767502966,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4353514687124742,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.48471038394687,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4520929468396173,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.015933535302889,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4893547724929954,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9654119803580636,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.356939579966619,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.383066152507208,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2140400431751504,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.624429358131523,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6457916642698598,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3058940553685416,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7767451722290422,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7344114186566202,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9158067139372887,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2547034553214202,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3970043830471166,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.716348245472768,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9912037989381373,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8977296129222014,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.989041260833898,\"sequenceIndex\":85},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1542091719832457,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2418949002037674,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3628774791231053,\"sequenceIndex\":264},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.502403154284098,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3784856559823715,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1784902571879177,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8207630704618016,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.390730613648198,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.476719195169579,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.737332149076059,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9645518272627078,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1060641272910927,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6840116057706285,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6536717455852123,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.259761434870378,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6612555081491769,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.765578602986112,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7331265610399673,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.081506135766315,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2831074757013377,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4460150586746137,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9668757409472937,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.41212681263999,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.056772495794094,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.533645819266034,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4973546280209864,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.376412901542896,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.837938596022434,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1617378006439893,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9735813687326793,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.390730237915575,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.128057960051193,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3990617227607864,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8612502571363462,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7765465169890398,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1728073994084443,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6667495013520364,\"sequenceIndex\":403},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5247532718254773,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3175950200763067,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8327787010248011,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8274495912961446,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8940013020250777,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.607390655136571,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7546711972102895,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4330910022023406,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.005178841336033,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6977191166258279,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.858829625605784,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4771056981128665,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0055536636730915,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.141920164125772,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2095190103840436,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0587560273087122,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.371398930831987,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.153289490534632,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1038645419594841,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6684618925365389,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8684509335680035,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.186926482776944,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6718151615863501,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.363218686783917,\"sequenceIndex\":286},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3134907351105074,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.79376092799714,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8058791466003448,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1432545772861435,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.115207145473793,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7911382434412673,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9595693665641445,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3801200434696348,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0527958994565334,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9539237416512159,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.894953547331569,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.744266938342351,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.910558182378848,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.199693407960174,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.765158871792935,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4139962734491343,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1891945082023085,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7389426379003186,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8151655434454155,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.058704984073989,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5393469795316106,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0593328076875967,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1006478313706327,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.184205604833071,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.095848568645303,\"sequenceIndex\":485}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4693039934748071,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4840121511509101,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49998281087261365,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.486735142529108,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5611010898022386,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5012807051018184,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5271637626693254,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5422021993308042,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5082465505591345,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5677730798692979,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5880313064278875,\"sequenceIndex\":332},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5043128038679372,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6076316681393219,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6863636877518756,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5471091245492284,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5509402262786786,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5505598331822379,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5225122297069983,\"sequenceIndex\":145},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6355053262311434,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.701503583834551,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.567893541170008,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5907662348240884,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7072740971321273,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5568980045651706,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7418338910274144,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7235232650458576,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6187497038373624,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7788777869726062,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8210079405811754,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5495450386878632,\"sequenceIndex\":243},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9078497185684644,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5891012477713203,\"sequenceIndex\":491},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9551517061250699,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5658923977327448,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8276528365305345,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7172920601707917,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5860191420620445,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.957090211643823,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6781990000962219,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8597732780816492,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7522013083844744,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6493146050755455,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0108340473986404,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6557727976765053,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6871588696894486,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1794119814214605,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8341906176801821,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8195031016721681,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7257407040295712,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7842429002860981,\"sequenceIndex\":101},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.912867079478571,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7615922458606563,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7843977497840715,\"sequenceIndex\":212},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6423142116126185,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6813013440253289,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7860125352256891,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7935881203462887,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.042222776283626,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.939298721489795,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8244293278842825,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7315920354123385,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1312344852875373,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9151352058940596,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2713683789732675,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9974942980785864,\"sequenceIndex\":33},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1449741628601358,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2904431036192228,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.487898602209545,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5933402184049803,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6764617501251455,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8290419952578338,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0448077239239097,\"sequenceIndex\":443},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7546826975329345,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7922481636302625,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7316430104895422,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3081190696086284,\"sequenceIndex\":153},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.480257790182236,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9366783670660684,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9564738973448594,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2747740992839236,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9977291974051687,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7982023625270928,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8563817832050433,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7206868595607017,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7481669338644068,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2776815534781256,\"sequenceIndex\":86},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4604225854925659,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.220472855465804,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5229525711874794,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1001301216400545,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8024742628655654,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3527034604967552,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2476179852970386,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.451249484491594,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.096480426410148,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9824732665326257,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8805796191065678,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.278795977710343,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7554382378200567,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8359945035370306,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2691066380171048,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0253610189493003,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2921027944209784,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8920424111427967,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9009950735496232,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2657759094909253,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8399496366410977,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7168500042708827,\"sequenceIndex\":217},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9250305682075238,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7585514445586108,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1819750945579477,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4011384114411602,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7219745839493994,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6139596096829745,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9708769708940922,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7630623714560567,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.202489918004328,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2538813168178122,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.230765782268019,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.524664538142794,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9317868382662906,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3186797192850157,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5442938653140326,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4502871318144939,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2477189032065115,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1610175341314568,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0154162939234412,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7036098490021319,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4684166719753604,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9080252996790925,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3307029746142691,\"sequenceIndex\":131},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.928294478550343,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7730447964794105,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8234057082391222,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.8710011248350655,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.406216469052101,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.558600329073051,\"sequenceIndex\":256},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6903613234794921,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6033482531013987,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.729324195951773,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8701283063852006,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.045453823508399,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5672281793078375,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.929418511967215,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0161496988123058,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.434012944200853,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.257114067102977,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5758322872760184,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.256474043625807,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7460672076676718,\"sequenceIndex\":343},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7583835481101875,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.595526466065567,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0369262215609436,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.161641177201963,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.550157884158847,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2262095700230327,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.022695249441574,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2477430285654614,\"sequenceIndex\":419},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3334605624354616,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5585595775710628,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2982128613087942,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3049439733886365,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4010933655933515,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5817689168509617,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1316100257008215,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3902601303387454,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2275501181903552,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.139273127935113,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7518080474659888,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8075453441853364,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1539182750182326,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4416789057947337,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3405924328751537,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.397511766027813,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.142243948587143,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.404005877096511,\"sequenceIndex\":11},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.80977136716229,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.889730929881774,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5559965486291447,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.120385382896049,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3259753754128396,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4245404661370982,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1328122125124227,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.633631161060294,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5423984848678742,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5338681616902066,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9421516687728548,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.393988324768284,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5387934287250133,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5583213569931575,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5510305922438934,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3971529471929496,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3736935939673143,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0387468758769165,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9589041798610998,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.193452238795127,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5695132587437322,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6249830063028416,\"sequenceIndex\":396},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6541157682574397,\"sequenceIndex\":366},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6788152575377886,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2852030892433832,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.598111383595755,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.827687638257358,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.691417698226775,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7010400884303665,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.039396640688582,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1238924141290254,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5997064530029075,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9988148333142448,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.604337001671381,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4505098794943754,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5116784422879554,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4359543735090325,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.672610098853346,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6670902368068337,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2340315703026965,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2375071707024097,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6788699421853945,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-8.259000534052335,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5418010977660335,\"sequenceIndex\":110},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.040966343265917,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4269689595693746,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3035992563476353,\"sequenceIndex\":222},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9950990169800913,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4324327783182444,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.016700837134444,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7467854159960567,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2296403182191176,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.325928175787128,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.511023829904382,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0475266771109986,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3673161297430507,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.516311042305978,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.9056404641438123,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.796021448375709,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.132422842226857,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2906931505556647,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.263225268918278,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3835133443502203,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1140644987025636,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.493021059835676,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.120645378701462,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1154990269263556,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.615409380358695,\"sequenceIndex\":61},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4954184464712847,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.078127836055196,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.015400895758009,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9417785868011939,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.470430459850424,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2735315375042235,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3941090522785706,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.249559568815418,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4861818293297353,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.604079715543496,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0531723598862504,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1132054472607527,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43564362268112,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4371250457439498,\"sequenceIndex\":133},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4573606480781339,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.43871853184354903,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4761026856051829,\"sequenceIndex\":187},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47147597368085215,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48952711355892037,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45977557766947996,\"sequenceIndex\":447},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4795020155462564,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.516603477158614,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5246394060286124,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7514994644780828,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5007122458366595,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5158892644557309,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6114227677337317,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45980219242686515,\"sequenceIndex\":465},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46571522143434313,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5554616841789418,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.510595819153248,\"sequenceIndex\":155},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5198492353822775,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.518509450153846,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5418909188306894,\"sequenceIndex\":183},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5288997604871732,\"sequenceIndex\":384},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7556414743810932,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7676259171490208,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5009910145911629,\"sequenceIndex\":397},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9274557184520615,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6209406808373502,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6660301995779561,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8435940339796425,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6166393839335171,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6153898034674699,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7725763538419961,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46594635952470953,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5091454464840527,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6589123055464675,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5585464007464205,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5764501186236005,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5810498642409427,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6347916491618363,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8450782420885469,\"sequenceIndex\":485},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5732235201078799,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6375580059381125,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5508276029297334,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8552904477417995,\"sequenceIndex\":90},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5340973726716208,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7804584364543581,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0794424987270652,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8783936412547414,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8457817327773992,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0012769896978195,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6701930212794512,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6074667961006694,\"sequenceIndex\":213},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3682946617956706,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.001766615882566,\"sequenceIndex\":474},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6316751250822574,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.006038451676484,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0759312328268462,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.686408016832157,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.104956523500903,\"sequenceIndex\":241},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9110490547967139,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8258686099039253,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8023833423942451,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7830189214682873,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0952219781851587,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8575287779547586,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8575000047375269,\"sequenceIndex\":135},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5266251733832674,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6941989275503928,\"sequenceIndex\":300},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6869638387497212,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5725584732447536,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2419977818339887,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2366862654473483,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6957841426671072,\"sequenceIndex\":148},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6870957212300259,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1061313684847707,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6912600947417608,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9356294801402132,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7923888676662443,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.586567230488423,\"sequenceIndex\":418},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0618267006322137,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.859868872761881,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.281519509284326,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5752562833073775,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7952841138506135,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8910597111243356,\"sequenceIndex\":172},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.744488161428853,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0845001510267387,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9677462122054563,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.019511172171816,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2210628922482425,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9803702411925196,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9067446601949632,\"sequenceIndex\":288},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2455415296594428,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8293745282787615,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.107056182589819,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2528677741491028,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9162616906066365,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9800852246672552,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8498948654808381,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0583422725829406,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.10730693634504,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.723535872373937,\"sequenceIndex\":362},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2953161725739883,\"sequenceIndex\":458},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7581128272350482,\"sequenceIndex\":410},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.310884110024715,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8767017378133748,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6803209634234184,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5291519410662289,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3551023123023525,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4707406319916156,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.033655042555525,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3222474379688958,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5707571369570705,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5534755580835495,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6172538862033081,\"sequenceIndex\":324},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6640590654699507,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7653378783243954,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3005265634927412,\"sequenceIndex\":316},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2867924597781855,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4456875231195052,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0028166058141157,\"sequenceIndex\":12},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4368049759460284,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.079706548973351,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1046124118873955,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1909115443719125,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8401790685319908,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8209742044359856,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8800526770745007,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.212355900423898,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6975274221857046,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4005308843317588,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7318947438076018,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9289088092405697,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6363880794308827,\"sequenceIndex\":376},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2050562895442924,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7054571390173594,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.897999633637805,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.32185462067358,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6192556307139117,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.368527976352415,\"sequenceIndex\":129},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0615702836061474,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7079777726642171,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3251836045613137,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4869891566992832,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.729056661882351,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2443745129487724,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.253938397246215,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.288169261847945,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0067008102078483,\"sequenceIndex\":444},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7638694236485958,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7096680304274297,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.79153595602323,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5984631499294055,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0963882181703934,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.682657810051333,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5114623561337919,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1238252138261657,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3337422709242683,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.333602281983634,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8883792968271256,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.820935041860034,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.304476775631747,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9967431044830874,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3809198857705396,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.352928534832491,\"sequenceIndex\":413},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3907507386096722,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9628916849537443,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6454435280245114,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.777638120222282,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.813311265927615,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.9469842008155345,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.111596378003309,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.105812177023676,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8340799034242632,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7886182248301956,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.975550345320658,\"sequenceIndex\":314},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6551198911355482,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4752724931810643,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0790118235072472,\"sequenceIndex\":371},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9166799103856191,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3257894792163318,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1015099585376262,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5889627318779227,\"sequenceIndex\":496},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5138178663435793,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2099152635938057,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1227736306554275,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6172735665201303,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5679419449929286,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.835162710194383,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9239829055935393,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.893076843205652,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8484007631905348,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.239376312686396,\"sequenceIndex\":194},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1495954328433458,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8679107077217996,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.965539150654346,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9122070988316024,\"sequenceIndex\":504},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5426728912675862,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.300437106256786,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.7042013783777508,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.108556673645134,\"sequenceIndex\":299},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4716393263418484,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6161019459591197,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1911854019667187,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.846865418579773,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1846657145094346,\"sequenceIndex\":421},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.359065018435117,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2676667121454583,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.924667748726317,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8196099147217888,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4086262886863146,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3157130262140329,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1398813577357325,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4111810207234985,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.389259063541299,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7342290046658315,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9308904382664576,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9048890141537136,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.666633378944688,\"sequenceIndex\":55},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8731886451049824,\"sequenceIndex\":386},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8467974293097438,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.27297051187847,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0093044219719536,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3251211085451986,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5481764648320095,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.560372863809715,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.902899944984877,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7234512418227514,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2532644319936304,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0849920979654133,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.977103740684771,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.41147224837413,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9278605720815845,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9412501621921257,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1291084303226304,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9009703349809295,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.061128638650348,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6751543501815416,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8393129660859562,\"sequenceIndex\":60},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3239475963105274,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1165837099037477,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9424220107555188,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.869451989788618,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1055962503551244,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.55010745294789,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.624334664972687,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0618772587549405,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3424967625093074,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7048174807477723,\"sequenceIndex\":125},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5976289496378198,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.511121532844419,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.606549529787768,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3824978655327256,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8664743065999159,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2368415807248208,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45611357824685034,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49128962489658967,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4573128599831167,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4996234591571578,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.534785402827346,\"sequenceIndex\":160},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4953470748476184,\"sequenceIndex\":210},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46833240569362067,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5302928482085455,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5901619323612171,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6034381313942498,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6269190952751202,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49646227414073685,\"sequenceIndex\":97},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.515532410191545,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5333667276953433,\"sequenceIndex\":57},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5727717514629719,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5601356095019336,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9141779934407832,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7299170804496616,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7413383451351178,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7996179000226884,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7121365703975346,\"sequenceIndex\":169},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6929228760733441,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8020832413784781,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.670815054416015,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.57830907949723,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5521435192240409,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6099229635039507,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7142994264894752,\"sequenceIndex\":259},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7507660741144664,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6455939887024992,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6189190355112103,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5666430336432758,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9717820164570948,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.160721696148165,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2455647381911374,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8832169696424109,\"sequenceIndex\":473},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.099362695570444,\"sequenceIndex\":486},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8307716372760733,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2057128832245352,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8992116515786022,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8292830312068595,\"sequenceIndex\":339},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.048432420485059,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8227157303695898,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8361587778568739,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.706559472039991,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9476857457342514,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8724210575285891,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8245279467814756,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8558402330545576,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6248148707675507,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5997395451552381,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6098753144324016,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.848339056877536,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9220931774861819,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6808707565279613,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9345063697739612,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1294651303024759,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8018977898070954,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8196815360133274,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8124865104878325,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6539945144120517,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7052177983011589,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0141003532165194,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6096932812323415,\"sequenceIndex\":500},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8448283256400593,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.109146998393224,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4910600391969775,\"sequenceIndex\":67},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7560492092238487,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2920601323339445,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3995691047662981,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5866516575494247,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2570393386622265,\"sequenceIndex\":144},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9225040989547952,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4020800101459425,\"sequenceIndex\":394},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3357456652961226,\"sequenceIndex\":432},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0337398680181544,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9966987908127004,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.282272764487467,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3592596406588493,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.501063228868183,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9957840397485798,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8982876341127941,\"sequenceIndex\":165},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9597730020104714,\"sequenceIndex\":499},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.990453257934192,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8828021779013215,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8786440967561873,\"sequenceIndex\":439},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.915005464074949,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1119524295973928,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2167629093166623,\"sequenceIndex\":178},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8730802745224155,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.724008101299279,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9082730897409446,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5034265065005616,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1041604461761585,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.216090139181006,\"sequenceIndex\":398},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.920632832937143,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9927680956384509,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4597603951188127,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4874526324457578,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1407684389774209,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.750989992024597,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9115843794477438,\"sequenceIndex\":349},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8909490029379313,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6315267159749388,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7269497975952566,\"sequenceIndex\":475},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4975610757768787,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8895695602295556,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1096573678025532,\"sequenceIndex\":13},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.492404473352404,\"sequenceIndex\":219},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8495090086666771,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2715218321455708,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9988916436330578,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5083402790714346,\"sequenceIndex\":227},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2373681976637365,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2742930749704449,\"sequenceIndex\":230},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9341246508328656,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1265247574524708,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2665434491782637,\"sequenceIndex\":325},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8412353297238072,\"sequenceIndex\":436},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.335323852478306,\"sequenceIndex\":402},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.865125075351393,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9483954747457065,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6808536082498433,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8194669009627547,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1697836269269004,\"sequenceIndex\":31},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0490615059621153,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4698376604883723,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6692968047433246,\"sequenceIndex\":433},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.932684817527212,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1661875460550213,\"sequenceIndex\":355},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2726175401208146,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.509215157564269,\"sequenceIndex\":132},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6111678513188035,\"sequenceIndex\":268},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.871087754226711,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9885670526626178,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.178750164371709,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0652820980297677,\"sequenceIndex\":17},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.008178706367996,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8693579196039314,\"sequenceIndex\":340},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.15939637997846,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.517631374489275,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0734670114092375,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8115545258856103,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6095822643497895,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6437419394542703,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.135912114164077,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.51387637331299,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5444699499699706,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.193155344667903,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9907070944088594,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5093881818808617,\"sequenceIndex\":279},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.976108134668352,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1621107301880778,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1857969191159459,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.689074575154366,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3041272200057101,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9692942898638832,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.457014776465921,\"sequenceIndex\":79},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.679553440334005,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5603247556056337,\"sequenceIndex\":20},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.539004303565296,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.079837836658321,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8369509268515443,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7705169592937264,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3107400152426771,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6746893475536957,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.221436640617024,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2058602389591617,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6789667589312027,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9169689160011127,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1808221261509573,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8308623800709982,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0887369379194063,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.647452100881046,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0808117392776344,\"sequenceIndex\":368},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9214342967078624,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5029654790181595,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1661471435536384,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4679933734540336,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2599051586156524,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4526986753409943,\"sequenceIndex\":390},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5996914241585862,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1061927666438334,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.626362201186847,\"sequenceIndex\":46},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2783494626034666,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.874062031529778,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.179714749940214,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0664830501843285,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9794732617163917,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0965058955290345,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.584896600312593,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.407429584299727,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5238027159031,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1534093228805868,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1763537065107037,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.380618564747683,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3991085573885007,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.760435360873271,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5366738166958334,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.328371326593296,\"sequenceIndex\":100},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.230874520161745,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.933099549263564,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6808818709283868,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7068559048634389,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9041336805058342,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.53731777403234,\"sequenceIndex\":102},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8721680183844855,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3789584465567601,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0180016816970545,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7142533532404634,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5142820513139026,\"sequenceIndex\":382},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.927454381432256,\"sequenceIndex\":335},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5549793620143961,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.50154294595571,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9920175787865267,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.655043609155131,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1915920569308374,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6754071592700654,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1098448045681466,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2630379300175396,\"sequenceIndex\":342},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.09113707621043,\"sequenceIndex\":448},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.92794354276137,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.8215288038929933,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3068177006004653,\"sequenceIndex\":112},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1292970683253765,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8795451551709412,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.116825618525983,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7187968793140398,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2702498418545884,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3389654511290723,\"sequenceIndex\":115},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.049741252710293,\"sequenceIndex\":311},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9922123309142719,\"sequenceIndex\":232},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.025775921138196,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.745595962484196,\"sequenceIndex\":357},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1824223248748695,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9855512657593426,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4035775793922374,\"sequenceIndex\":428},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.509108850112804,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4233409526463934,\"sequenceIndex\":388},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.505916443078246,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3770464926434418,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4343966413197866,\"sequenceIndex\":121},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.202868401592053,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.85833741408244,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5850013573695085,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.050911256610319,\"sequenceIndex\":246},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8569195246889327,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.428143587237047,\"sequenceIndex\":124},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.77281133774056,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.118106246182926,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.878781199371494,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.898294177561965,\"sequenceIndex\":63},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.510607739576585,\"sequenceIndex\":510},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.321650526269306,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.13087707158632,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.062313906113535,\"sequenceIndex\":511}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4505467472266587,\"sequenceIndex\":257},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47060671875635995,\"sequenceIndex\":466},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4721397362321465,\"sequenceIndex\":238},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48992676011847147,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4805108381909878,\"sequenceIndex\":42},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5142074583969133,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4723709013545723,\"sequenceIndex\":59},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5016151745653069,\"sequenceIndex\":483},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5619495921785637,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5909431934729423,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5595301729963194,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5294133366516,\"sequenceIndex\":203},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5730883993101591,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49889292319461487,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5107394032680804,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6701234511195411,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6208012344616243,\"sequenceIndex\":71},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6496154684510249,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6403435897297929,\"sequenceIndex\":156},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5938097717468713,\"sequenceIndex\":272},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6912667033002402,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5811018232415707,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6469744847668194,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5654229561320572,\"sequenceIndex\":196},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5788532584576657,\"sequenceIndex\":438},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7235347093769163,\"sequenceIndex\":211},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.574621671681497,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49973752713858594,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5236111216211713,\"sequenceIndex\":58},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5127779993323636,\"sequenceIndex\":15},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7467972952090134,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6851101494229829,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.120634802314606,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6228900388975498,\"sequenceIndex\":282},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6669986804745184,\"sequenceIndex\":68},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8907443018655017,\"sequenceIndex\":297},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8071849716106119,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7140332449419733,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6823660792224674,\"sequenceIndex\":19},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.79247734795905,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8304714813808263,\"sequenceIndex\":40},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7652254667505656,\"sequenceIndex\":315},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8846652633027089,\"sequenceIndex\":351},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7137216342715588,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8415755192072673,\"sequenceIndex\":45},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8039347793914181,\"sequenceIndex\":92},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.783836057154483,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.662003041432246,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8023687238962685,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6768512776249329,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8023127087121188,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2366178626124689,\"sequenceIndex\":307},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.738330492852392,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7737168760148693,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7652984647961438,\"sequenceIndex\":426},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5372280141458186,\"sequenceIndex\":113},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5748902486273043,\"sequenceIndex\":231},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.759187901577425,\"sequenceIndex\":497},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5473958387650285,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7958932237534162,\"sequenceIndex\":242},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5157409409011411,\"sequenceIndex\":247},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7943567475746286,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4209769343078165,\"sequenceIndex\":252},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.695273601519673,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9480112605586886,\"sequenceIndex\":284},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8434788077997846,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6660713921595303,\"sequenceIndex\":66},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0185100516918768,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3199000622625654,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9886538174654538,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9870426215226308,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6189523993964947,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.538953275463935,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8494420552771748,\"sequenceIndex\":430},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0388316458258195,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8182605100950073,\"sequenceIndex\":77},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8659126970888256,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.912397002627945,\"sequenceIndex\":290},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3691126735627273,\"sequenceIndex\":159},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3338711674108266,\"sequenceIndex\":277},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9891893983118201,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8790695219957502,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.319762005495565,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3275731471283272,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.567528937922838,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0752670218268707,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.910193934181423,\"sequenceIndex\":280},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2030908978274808,\"sequenceIndex\":176},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9100503612087194,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9977468059836339,\"sequenceIndex\":455},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6214804394603193,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9924848094530941,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8745825971857577,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9092720670614962,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9909094866313783,\"sequenceIndex\":494},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4377915853043677,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0545296160969482,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0029969650719128,\"sequenceIndex\":364},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3288074281669193,\"sequenceIndex\":199},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3969158776425736,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8619564256291283,\"sequenceIndex\":482},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0960354121584372,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8676619145114206,\"sequenceIndex\":327},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.453470669725685,\"sequenceIndex\":208},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5154955351522712,\"sequenceIndex\":104},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8958789318034466,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7762371375542728,\"sequenceIndex\":26},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.186193284885259,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9249104687159854,\"sequenceIndex\":346},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1867688796625577,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3349352879446854,\"sequenceIndex\":303},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6066703708986159,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5756334816366735,\"sequenceIndex\":319},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0044322367594711,\"sequenceIndex\":56},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7247367269399204,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7477881749618436,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.278472789062652,\"sequenceIndex\":235},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.56519658128201,\"sequenceIndex\":236},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.717110518961455,\"sequenceIndex\":119},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0308457544933831,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1053536684871863,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6573574519957743,\"sequenceIndex\":420},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8212049108060395,\"sequenceIndex\":454},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.666689496674353,\"sequenceIndex\":248},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3633398773985819,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6828467924203832,\"sequenceIndex\":489},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7511480646289297,\"sequenceIndex\":127},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0905474172766971,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.650450478053789,\"sequenceIndex\":275},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8147908194382496,\"sequenceIndex\":64},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4126173577671426,\"sequenceIndex\":478},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9902393552248117,\"sequenceIndex\":32},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.240154009287208,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9540995805917007,\"sequenceIndex\":134},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1626303132606375,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.617192550301928,\"sequenceIndex\":16},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.813466545599038,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0348887378625133,\"sequenceIndex\":138},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.610974440632379,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5895065780482927,\"sequenceIndex\":35},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.104894266070398,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.07896022238319,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.423692401224896,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4006706599690384,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2708374284833623,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4860673932933746,\"sequenceIndex\":73},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1523902096484346,\"sequenceIndex\":147},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4350359726813466,\"sequenceIndex\":274},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2154271414826385,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.453540559332801,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.366186682681578,\"sequenceIndex\":151},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7968255678816778,\"sequenceIndex\":38},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0151069847771448,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.833599180111765,\"sequenceIndex\":445},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.189725210740324,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.579409472423491,\"sequenceIndex\":78},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.764648110344672,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.433873993031992,\"sequenceIndex\":158},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6508045771474953,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4087898182674006,\"sequenceIndex\":3},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.197130572515787,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3881726460667485,\"sequenceIndex\":81},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2891852173466962,\"sequenceIndex\":354},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.367977578025331,\"sequenceIndex\":82},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.359732237167334,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6460842916299474,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4461614636375792,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.477039589235158,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4923458331776287,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6628789298560835,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6013446935026727,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3103466974213145,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6384984545261987,\"sequenceIndex\":173},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.92664422060941,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5936458292007634,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.631565889779794,\"sequenceIndex\":464},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.67146466061629,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.361973106772181,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.180081692511917,\"sequenceIndex\":424},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1572976785115714,\"sequenceIndex\":360},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1318878763151246,\"sequenceIndex\":181},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.911513134311358,\"sequenceIndex\":91},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3349823054799868,\"sequenceIndex\":363},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8169231056669664,\"sequenceIndex\":389},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.247498989302157,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.327853523488458,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5607460517839904,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3247197094766237,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9588908842865445,\"sequenceIndex\":453},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.004540624824326,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.317497900733914,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.566778394706553,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8579727493788667,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5912843505290604,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0803277107028786,\"sequenceIndex\":292},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5446824018577703,\"sequenceIndex\":2},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.16317384507045,\"sequenceIndex\":380},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.989971108297856,\"sequenceIndex\":99},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.93217262049976,\"sequenceIndex\":391},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3166791493723053,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5484322723797188,\"sequenceIndex\":25},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7558393737000073,\"sequenceIndex\":369},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.386063688610757,\"sequenceIndex\":331},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.043473901492627,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.886799696009048,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5392205470046625,\"sequenceIndex\":206},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.922473211898003,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0072635409362345,\"sequenceIndex\":52},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.27712082750346,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9866033255124296,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.609958599403672,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2549797833940053,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.088325532918351,\"sequenceIndex\":271},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.228869609428676,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.12542282444888,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6015206204710117,\"sequenceIndex\":108},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5974370628369803,\"sequenceIndex\":506},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.272530163234187,\"sequenceIndex\":54},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0194196384729737,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4938766894581144,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.629799590606496,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.128285758431476,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.368760977034978,\"sequenceIndex\":471},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.5348412319447924,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9531657670379778,\"sequenceIndex\":358},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8067466000195498,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5129484596739116,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.516637153433466,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.945918197066419,\"sequenceIndex\":322},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.914359466997721,\"sequenceIndex\":508},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0324620144162764,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2293178947126115,\"sequenceIndex\":116},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.225237798444808,\"sequenceIndex\":7},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.409868593108424,\"sequenceIndex\":117},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.381038901497581,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4167996801403588,\"sequenceIndex\":505},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0700465891987316,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.254596224141901,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9459068618165772,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4319626052837904,\"sequenceIndex\":240},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.42660494612085,\"sequenceIndex\":320},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0634576586560818,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.477263865770193,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3974381772031725,\"sequenceIndex\":244},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.166501190133886,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.104286698719499,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.302648970276084,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.84404489446913,\"sequenceIndex\":62},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.7199946136143978,\"sequenceIndex\":249},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0571104757620615,\"sequenceIndex\":326},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.6484684774409843,\"sequenceIndex\":323},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1755997770182236,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1811638061099012,\"sequenceIndex\":253},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3333252255134944,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.126215061068308,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5944602841747246,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}},{\"sampler\":{\"weightedSamples\":[{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4409256727741692,\"sequenceIndex\":204},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44141159340224634,\"sequenceIndex\":186},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4462064848647759,\"sequenceIndex\":269},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4494976417413684,\"sequenceIndex\":39},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44739839725491637,\"sequenceIndex\":309},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.45814980462923,\"sequenceIndex\":6},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4465049493259178,\"sequenceIndex\":409},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46110883832824157,\"sequenceIndex\":488},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.44993579465894684,\"sequenceIndex\":36},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46488857673788037,\"sequenceIndex\":4},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49074282052202073,\"sequenceIndex\":23},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47334317413554183,\"sequenceIndex\":353},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.539232476504889,\"sequenceIndex\":321},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4553384010672422,\"sequenceIndex\":114},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4523437016920186,\"sequenceIndex\":285},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5100957634833727,\"sequenceIndex\":472},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5068308677332861,\"sequenceIndex\":8},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.49668128340638473,\"sequenceIndex\":291},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6184197122942443,\"sequenceIndex\":154},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6059434099730148,\"sequenceIndex\":370},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5836385148507931,\"sequenceIndex\":168},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5934252114362919,\"sequenceIndex\":179},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6245498040650852,\"sequenceIndex\":5},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.48538561360240506,\"sequenceIndex\":411},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.47404922020462015,\"sequenceIndex\":202},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5580616555274834,\"sequenceIndex\":106},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7248220228428799,\"sequenceIndex\":348},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.46947566428311316,\"sequenceIndex\":228},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.4909497640666451,\"sequenceIndex\":118},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.558455553090478,\"sequenceIndex\":417},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7825622633774046,\"sequenceIndex\":379},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5237379923075237,\"sequenceIndex\":442},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6340256601422767,\"sequenceIndex\":381},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5147255211926068,\"sequenceIndex\":139},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5344680089711398,\"sequenceIndex\":141},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6661379932912055,\"sequenceIndex\":467},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7078187583371991,\"sequenceIndex\":262},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6675708530451963,\"sequenceIndex\":367},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.644765775954502,\"sequenceIndex\":407},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7089613071686106,\"sequenceIndex\":163},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6243174925282583,\"sequenceIndex\":164},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7844655864551862,\"sequenceIndex\":21},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6863345508500148,\"sequenceIndex\":304},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1559087655796392,\"sequenceIndex\":22},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8321181968175589,\"sequenceIndex\":182},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1197061181292343,\"sequenceIndex\":479},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6812591257570456,\"sequenceIndex\":190},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5764847181523284,\"sequenceIndex\":193},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5691231626853785,\"sequenceIndex\":459},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5054480810281154,\"sequenceIndex\":263},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6682314266174023,\"sequenceIndex\":207},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6757061446886466,\"sequenceIndex\":476},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8043912893742795,\"sequenceIndex\":107},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8304904327056604,\"sequenceIndex\":416},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8219473051333882,\"sequenceIndex\":111},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6314293026766652,\"sequenceIndex\":225},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9357620591398222,\"sequenceIndex\":29},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6006641107451602,\"sequenceIndex\":296},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6005996526905083,\"sequenceIndex\":239},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8861647109555236,\"sequenceIndex\":435},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6044710395679177,\"sequenceIndex\":123},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8089028679212585,\"sequenceIndex\":250},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2678963043689726,\"sequenceIndex\":334},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.5627006534361164,\"sequenceIndex\":301},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6809055183138144,\"sequenceIndex\":469},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7789987185355638,\"sequenceIndex\":365},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0432741666975052,\"sequenceIndex\":451},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5090016309402974,\"sequenceIndex\":137},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7870492078362121,\"sequenceIndex\":298},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6320518588543987,\"sequenceIndex\":460},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7951544941073423,\"sequenceIndex\":441},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7027284167497256,\"sequenceIndex\":383},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8419374897803427,\"sequenceIndex\":18},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4341226559827294,\"sequenceIndex\":149},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.317840329655498,\"sequenceIndex\":461},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.050167116369164,\"sequenceIndex\":152},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6873079448166308,\"sequenceIndex\":312},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1036813488615216,\"sequenceIndex\":414},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8207642530282121,\"sequenceIndex\":385},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9519308763909178,\"sequenceIndex\":261},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8100706482364007,\"sequenceIndex\":405},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7317757374064361,\"sequenceIndex\":41},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4209428299699403,\"sequenceIndex\":166},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1782432808452072,\"sequenceIndex\":293},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6042627822908844,\"sequenceIndex\":408},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.697394441565312,\"sequenceIndex\":43},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1379644695066542,\"sequenceIndex\":87},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4599925786737307,\"sequenceIndex\":313},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3386710900788985,\"sequenceIndex\":89},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8402980850734034,\"sequenceIndex\":44},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3839482542021508,\"sequenceIndex\":294},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2940730925296122,\"sequenceIndex\":470},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1438774474585274,\"sequenceIndex\":437},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3979504713313229,\"sequenceIndex\":94},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8840891792369917,\"sequenceIndex\":188},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6404097147497101,\"sequenceIndex\":48},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6777803655396072,\"sequenceIndex\":195},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8508277529004109,\"sequenceIndex\":197},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6136425799020759,\"sequenceIndex\":198},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7089345725892877,\"sequenceIndex\":50},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.205497170805399,\"sequenceIndex\":399},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1693751387679903,\"sequenceIndex\":393},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9192059365876626,\"sequenceIndex\":434},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3134353186296306,\"sequenceIndex\":209},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0941908833242686,\"sequenceIndex\":308},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5200725584951893,\"sequenceIndex\":338},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1492767494364005,\"sequenceIndex\":215},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.792213851625103,\"sequenceIndex\":310},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0423831716948277,\"sequenceIndex\":109},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9100796792265119,\"sequenceIndex\":220},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1580443023740443,\"sequenceIndex\":258},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6424971479660729,\"sequenceIndex\":401},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4448882167463197,\"sequenceIndex\":328},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1014890559453052,\"sequenceIndex\":511},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.970043393520296,\"sequenceIndex\":480},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9781647798651729,\"sequenceIndex\":233},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6228838910653001,\"sequenceIndex\":234},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.738634349410046,\"sequenceIndex\":237},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9511145657309665,\"sequenceIndex\":487},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3790053486442833,\"sequenceIndex\":395},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9887440649009485,\"sequenceIndex\":120},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6972641502995232,\"sequenceIndex\":245},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8274627235414078,\"sequenceIndex\":429},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.898193728074703,\"sequenceIndex\":404},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9229900117957947,\"sequenceIndex\":251},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4811959180664591,\"sequenceIndex\":289},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8110207132576235,\"sequenceIndex\":347},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8335254029865209,\"sequenceIndex\":507},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.777267842085972,\"sequenceIndex\":377},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.8738314517993993,\"sequenceIndex\":65},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.83381615620499,\"sequenceIndex\":130},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1050732500294858,\"sequenceIndex\":378},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9644785937600389,\"sequenceIndex\":423},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.797079780366956,\"sequenceIndex\":317},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.331501131106359,\"sequenceIndex\":306},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2950013469339243,\"sequenceIndex\":136},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9661232016594452,\"sequenceIndex\":34},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1610397172367035,\"sequenceIndex\":69},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2247714703704515,\"sequenceIndex\":468},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9781843655752824,\"sequenceIndex\":70},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.6655758204369446,\"sequenceIndex\":140},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5035018589526077,\"sequenceIndex\":142},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5386350820213814,\"sequenceIndex\":143},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3917310419676432,\"sequenceIndex\":72},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1142498958878257,\"sequenceIndex\":333},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.094541745920552,\"sequenceIndex\":146},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.842533329905653,\"sequenceIndex\":495},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.9095254161376185,\"sequenceIndex\":37},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.6167884802088333,\"sequenceIndex\":273},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3392379569365365,\"sequenceIndex\":75},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.398652663777298,\"sequenceIndex\":267},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9900246258684975,\"sequenceIndex\":76},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3297813335057211,\"sequenceIndex\":493},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7081063567410634,\"sequenceIndex\":9},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2629966000263675,\"sequenceIndex\":375},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.0474484838040876,\"sequenceIndex\":336},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0331201422463807,\"sequenceIndex\":157},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.387353413792321,\"sequenceIndex\":128},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3492735862051715,\"sequenceIndex\":356},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5586565838972595,\"sequenceIndex\":10},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.480863072428841,\"sequenceIndex\":161},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.66145272828821,\"sequenceIndex\":162},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3762633901325767,\"sequenceIndex\":276},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9181901200844955,\"sequenceIndex\":477},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4445827663906983,\"sequenceIndex\":278},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8482420245419044,\"sequenceIndex\":83},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.001388974568315,\"sequenceIndex\":167},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.076261712087481,\"sequenceIndex\":84},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3236806271341166,\"sequenceIndex\":359},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6183992056548229,\"sequenceIndex\":170},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.393734939718761,\"sequenceIndex\":171},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3406839333207512,\"sequenceIndex\":374},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.370664665841993,\"sequenceIndex\":361},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.573225819902174,\"sequenceIndex\":174},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7383053830156,\"sequenceIndex\":175},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8975481532352427,\"sequenceIndex\":502},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5823943107932703,\"sequenceIndex\":177},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0235675975636696,\"sequenceIndex\":456},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.496867960498022,\"sequenceIndex\":305},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-6.899071933277866,\"sequenceIndex\":180},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.207203876142821,\"sequenceIndex\":1},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.437116334634678,\"sequenceIndex\":373},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2960109973168406,\"sequenceIndex\":412},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.135937793123457,\"sequenceIndex\":287},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9453875210660276,\"sequenceIndex\":185},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.397697716806775,\"sequenceIndex\":463},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3966591362774687,\"sequenceIndex\":446},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8178728315568486,\"sequenceIndex\":47},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.7138334885928703,\"sequenceIndex\":189},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.5507724712621347,\"sequenceIndex\":95},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.762226206111389,\"sequenceIndex\":191},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.590290755756962,\"sequenceIndex\":96},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.463145270623116,\"sequenceIndex\":192},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9823469755698535,\"sequenceIndex\":24},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1856297022772333,\"sequenceIndex\":509},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4495460890835015,\"sequenceIndex\":98},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.333440939297787,\"sequenceIndex\":372},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.364960396037589,\"sequenceIndex\":49},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7798808950015981,\"sequenceIndex\":422},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4005553374664401,\"sequenceIndex\":200},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9515409187718223,\"sequenceIndex\":201},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3120918996747681,\"sequenceIndex\":492},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0423812636701464,\"sequenceIndex\":503},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.764070196842062,\"sequenceIndex\":51},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.30552539118187,\"sequenceIndex\":205},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.5664657046343455,\"sequenceIndex\":103},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.955790261598473,\"sequenceIndex\":341},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-4.408358767274582,\"sequenceIndex\":270},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.1936819649930515,\"sequenceIndex\":415},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.318154029509137,\"sequenceIndex\":105},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4539073837438405,\"sequenceIndex\":425},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2331850579769528,\"sequenceIndex\":53},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.3671750740373843,\"sequenceIndex\":260},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.2845845385468546,\"sequenceIndex\":214},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.891213103008755,\"sequenceIndex\":498},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.8227392066347954,\"sequenceIndex\":216},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.765478924296321,\"sequenceIndex\":295},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.301943270144005,\"sequenceIndex\":218},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.2043120138803984,\"sequenceIndex\":302},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.4751096289359236,\"sequenceIndex\":387},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.3628855424999498,\"sequenceIndex\":344},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4700014460498712,\"sequenceIndex\":481},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.593064322037851,\"sequenceIndex\":223},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-5.404662823182272,\"sequenceIndex\":14},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.7663634712594724,\"sequenceIndex\":224},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6594589085081752,\"sequenceIndex\":350},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6123649380325624,\"sequenceIndex\":281},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.950892528522452,\"sequenceIndex\":501},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.6681707182328864,\"sequenceIndex\":229},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.963066337379958,\"sequenceIndex\":431},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.8447313904311504,\"sequenceIndex\":318},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.4485056751485947,\"sequenceIndex\":28},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.032888062432252,\"sequenceIndex\":406},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3868588278321516,\"sequenceIndex\":450},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.71605398814051,\"sequenceIndex\":283},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0243077887713796,\"sequenceIndex\":345},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.28203973270908,\"sequenceIndex\":400},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.1990204575703178,\"sequenceIndex\":266},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9993902861412431,\"sequenceIndex\":352},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.596678725235261,\"sequenceIndex\":30},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.903086056325348,\"sequenceIndex\":427},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.0952910328081917,\"sequenceIndex\":449},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.1175046523190058,\"sequenceIndex\":330},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.9232312661216866,\"sequenceIndex\":122},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-0.9176542924546596,\"sequenceIndex\":462},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.273726560826445,\"sequenceIndex\":452},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.021034675004877,\"sequenceIndex\":490},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.0787283022619043,\"sequenceIndex\":440},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.3453793957739955,\"sequenceIndex\":329},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.4734205658454713,\"sequenceIndex\":392},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.376469898617029,\"sequenceIndex\":457},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-1.492587210947598,\"sequenceIndex\":126},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.581588668660513,\"sequenceIndex\":265},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-2.006071877755358,\"sequenceIndex\":254},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2027951304914786,\"sequenceIndex\":255},{\"point\":[0.730967787376657,0.24053641567148587,0.6374174253501083,0.5504370051176339],\"weight\":-3.2294894102291583,\"sequenceIndex\":512}],\"sampleSize\":256,\"lambda\":3.90625E-4,\"random\":{},\"entriesSeen\":512},\"tree\":{\"storeSequenceIndexesEnabled\":false,\"centerOfMassEnabled\":false,\"random\":{}}}],\"totalUpdates\":512}}}"
  },
  {
    "path": "Java/spotless-eclipse.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<profiles version=\"18\">\n    <profile kind=\"CodeFormatterProfile\" name=\"spotless\" version=\"18\">\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_ellipsis\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_for_statment\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_logical_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_method_invocation\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_after_imports\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_javadoc_comments\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indentation.size\" value=\"8\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_default\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.align_with_spaces\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.disabling_tag\" value=\"@formatter:off\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.continuation_indentation\" value=\"2\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_before_code_block\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_switch_case_expressions\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_enum_constants\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_imports\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_method_body\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_after_package\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_if_while_statement\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.indent_root_tags\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.enabling_tag\" value=\"@formatter:on\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.count_line_length_from_starting_position\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_case\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_multiplicative_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameterized_type_references\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_logical_operator\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_annotation_declaration_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_enum_constant\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_multiplicative_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_statements_compare_to_block\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.align_tags_descriptions_grouped\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.line_length\" value=\"80\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.use_on_off_tags\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_method_body_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_loop_body_block_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_method_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_abstract_method\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_enum_constant_declaration_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.align_variable_declarations_on_columns\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_type_declaration_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_catch_clause\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_additive_operator\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_relational_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_multiplicative_operator\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_anonymous_type_declaration_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_switch_case_expressions\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_shift_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_block\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_lambda_body\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_at_end_of_code_block\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.compact_else_if\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_bitwise_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_type_parameters\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_compact_loops\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_simple_for_body_on_same_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_relational_operator\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_unary_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_annotation\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_ellipsis\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_additive_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_string_concatenation\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_line_comments\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.text_block_indentation\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.align_type_members_on_columns\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_assignment\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_module_statements\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_after_code_block\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.align_tags_names_descriptions\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_if_then_body_block_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_conditional_expression\" value=\"80\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.align_assignment_statements_on_columns\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_block_in_case\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_arrow_in_switch_default\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.insert_new_line_between_different_tags\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_conditional_expression_chain\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_header\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_additive_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_method_declaration\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.join_wrapped_lines\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_conditional_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_shift_operator\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.align_fields_grouping_blank_lines\" value=\"2147483647\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_bitwise_operator\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_resources_in_try\" value=\"80\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_try_clause\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_code_block_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.tabulation.size\" value=\"4\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_bitwise_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_source_code\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_field\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer\" value=\"2\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_method\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_assignment_operator\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_not_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_switch\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_type_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_html\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_method_delcaration\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_compact_if\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_lambda_body_block_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_empty_lines\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_type_arguments\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_unary_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_label\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_arrow_in_switch_case\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_member_type\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_logical_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_bitwise_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_relational_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.format_block_comments\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_lambda_arrow\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.indent_tag_description\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_string_concatenation\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_after_last_class_body_declaration\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_statements_compare_to_body\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_multiple_fields\" value=\"16\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_simple_while_body_on_same_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_array_initializer\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_logical_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_shift_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_between_statement_group_in_switch\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.parentheses_positions_in_lambda_declaration\" value=\"common_lines\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_shift_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_simple_do_while_body_on_same_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_enum_declaration_on_one_line\" value=\"one_line_never\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_enum_constant\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.brace_position_for_type_declaration\" value=\"end_of_line\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_multiplicative_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_before_package\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.alignment_for_expressions_in_for_loop_header\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_additive_operator\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.keep_simple_getter_setter_on_one_line\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_string_concatenation\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_lambda_arrow\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.join_lines_in_comments\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.comment.indent_parameter_description\" value=\"false\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_code_block\" value=\"0\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.tabulation.char\" value=\"space\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_relational_operator\" value=\"insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.wrap_before_string_concatenation\" value=\"true\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.blank_lines_between_import_groups\" value=\"1\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.lineSplit\" value=\"120\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation\" value=\"do not insert\"/>\n        <setting id=\"org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch\" value=\"insert\"/>\n    </profile>\n</profiles>\n"
  },
  {
    "path": "Java/testutils/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n  <modelVersion>4.0.0</modelVersion>\n  <parent>\n    <artifactId>randomcutforest-parent</artifactId>\n    <groupId>software.amazon.randomcutforest</groupId>\n    <version>4.4.0</version>\n  </parent>\n\n  <artifactId>randomcutforest-testutils</artifactId>\n  <packaging>jar</packaging>\n</project>\n"
  },
  {
    "path": "Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ExampleDataSets.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.testutils;\n\nimport static java.lang.Math.PI;\nimport static java.lang.Math.cos;\nimport static java.lang.Math.sin;\n\nimport java.util.Random;\n\n/**\n * This class samples point from a mixture of 2 multi-variate normal\n * distribution with covariance matrices of the form sigma * I. One of the\n * normal distributions is considered the base distribution, the second is\n * considered the anomaly distribution, and there are random transitions between\n * the two.\n */\npublic class ExampleDataSets {\n\n    public static double[][] generateFan(int numberPerBlade, int numberOfBlades) {\n        if ((numberOfBlades > 12) || (numberPerBlade <= 0))\n            return null;\n        int newDimensions = 2;\n        int dataSize = numberOfBlades * numberPerBlade;\n\n        Random prg = new Random(0);\n        NormalMixtureTestData generator = new NormalMixtureTestData(0.0, 1.0, 0.0, 1.0, 0, 1);\n        double[][] data = generator.generateTestData(dataSize, newDimensions, 100);\n\n        double[][] transformedData = new double[data.length][newDimensions];\n        for (int j = 0; j < data.length; j++) {\n\n            // shrink\n\n            transformedData[j][0] = 0.05 * data[j][0];\n            transformedData[j][1] = 0.2 * data[j][1];\n            double toss = prg.nextDouble();\n\n            // rotate\n            int i = 0;\n            while (i < numberOfBlades + 1) {\n                if (toss < i * 1.0 / numberOfBlades) {\n                    double[] vec = rotateClockWise(transformedData[j], 2 * PI * i / numberOfBlades);\n                    transformedData[j][0] = vec[0] + 0.6 * sin(2 * PI * i / numberOfBlades);\n                    transformedData[j][1] = vec[1] + 0.6 * cos(2 * PI * i / numberOfBlades);\n                    break;\n                } else\n                    ++i;\n            }\n        }\n        return transformedData;\n\n    }\n\n    public static double[] rotateClockWise(double[] point, double theta) {\n        double[] result = new double[2];\n        result[0] = cos(theta) * point[0] + sin(theta) * point[1];\n        result[1] = -sin(theta) * point[0] + cos(theta) * point[1];\n        return result;\n    }\n\n    public static double[][] generate(int size) {\n        Random prg = new Random();\n        double[][] data = new double[size][2];\n\n        for (int i = 0; i < size; i++) {\n            boolean test = false;\n            while (!test) {\n                double x = 2 * prg.nextDouble() - 1;\n                double y = 2 * prg.nextDouble() - 1;\n                if (x * x + y * y <= 1) {\n                    if (y > 0) {\n                        if (x > 0 && ((x - 0.5) * (x - 0.5) + y * y) <= 0.25) {\n                            test = ((x - 0.5) * (x - 0.5) + y * y > 1.0 / 32) && (prg.nextDouble() < 0.6);\n                        }\n                    } else {\n                        if (x > 0) {\n                            if ((x - 0.5) * (x - 0.5) + y * y > 1.0 / 32) {\n                                test = ((x - 0.5) * (x - 0.5) + y * y < 0.25) || (prg.nextDouble() < 0.4);\n                            }\n                        } else {\n                            test = ((x + 0.5) * (x + 0.5) + y * y > 0.25) && (prg.nextDouble() < 0.2);\n                        }\n                    }\n                }\n                if (test) {\n                    data[i][0] = x;\n                    data[i][1] = y;\n                }\n            }\n        }\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/MultiDimDataWithKey.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.testutils;\n\npublic class MultiDimDataWithKey {\n    public double[][] data;\n    public int[] changeIndices;\n    public double[][] changes;\n\n    public MultiDimDataWithKey(double[][] data, int[] changeIndices, double[][] changes) {\n        this.data = data;\n        this.changeIndices = changeIndices;\n        this.changes = changes;\n    }\n}\n"
  },
  {
    "path": "Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/NormalMixtureTestData.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.testutils;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\n/**\n * This class samples point from a mixture of 2 multi-variate normal\n * distribution with covariance matrices of the form sigma * I. One of the\n * normal distributions is considered the base distribution, the second is\n * considered the anomaly distribution, and there are random transitions between\n * the two.\n */\npublic class NormalMixtureTestData {\n\n    private final double baseMu;\n    private final double baseSigma;\n    private final double anomalyMu;\n    private final double anomalySigma;\n    private final double transitionToAnomalyProbability;\n    private final double transitionToBaseProbability;\n\n    public NormalMixtureTestData(double baseMu, double baseSigma, double anomalyMu, double anomalySigma,\n            double transitionToAnomalyProbability, double transitionToBaseProbability) {\n        this.baseMu = baseMu;\n        this.baseSigma = baseSigma;\n        this.anomalyMu = anomalyMu;\n        this.anomalySigma = anomalySigma;\n        this.transitionToAnomalyProbability = transitionToAnomalyProbability;\n        this.transitionToBaseProbability = transitionToBaseProbability;\n    }\n\n    public NormalMixtureTestData() {\n        this(0.0, 1.0, 4.0, 2.0, 0.01, 0.3);\n    }\n\n    public NormalMixtureTestData(double baseMu, double anomalyMu) {\n        this(baseMu, 1.0, anomalyMu, 2.0, 0.01, 0.3);\n    }\n\n    public double[][] generateTestData(int numberOfRows, int numberOfColumns) {\n        return generateTestData(numberOfRows, numberOfColumns, 0);\n    }\n\n    public double[][] generateTestData(int numberOfRows, int numberOfColumns, int seed) {\n        double[][] result = new double[numberOfRows][numberOfColumns];\n        boolean anomaly = false;\n\n        NormalDistribution dist;\n        if (seed != 0)\n            dist = new NormalDistribution(new Random(seed));\n        else\n            dist = new NormalDistribution(new Random());\n\n        for (int i = 0; i < numberOfRows; i++) {\n            if (!anomaly) {\n                fillRow(result[i], dist, baseMu, baseSigma);\n                if (Math.random() < transitionToAnomalyProbability) {\n                    anomaly = true;\n                }\n            } else {\n                fillRow(result[i], dist, anomalyMu, anomalySigma);\n                if (Math.random() < transitionToBaseProbability) {\n                    anomaly = false;\n                }\n            }\n        }\n\n        return result;\n    }\n\n    public MultiDimDataWithKey generateTestDataWithKey(int numberOfRows, int numberOfColumns, int seed) {\n        double[][] resultData = new double[numberOfRows][numberOfColumns];\n        int[] change = new int[numberOfRows];\n        int numberOfChanges = 0;\n        boolean anomaly = false;\n\n        NormalDistribution dist;\n        if (seed != 0)\n            dist = new NormalDistribution(new Random(seed));\n        else\n            dist = new NormalDistribution(new Random());\n\n        for (int i = 0; i < numberOfRows; i++) {\n            if (!anomaly) {\n                fillRow(resultData[i], dist, baseMu, baseSigma);\n                if (Math.random() < transitionToAnomalyProbability) {\n                    change[numberOfChanges++] = i + 1; // next item is different\n                    anomaly = true;\n                }\n            } else {\n                fillRow(resultData[i], dist, anomalyMu, anomalySigma);\n                if (Math.random() < transitionToBaseProbability) {\n                    anomaly = false;\n                    change[numberOfChanges++] = i + 1; // next item is different\n                }\n            }\n        }\n\n        return new MultiDimDataWithKey(resultData, Arrays.copyOf(change, numberOfChanges), null);\n    }\n\n    private void fillRow(double[] row, NormalDistribution dist, double mu, double sigma) {\n        for (int j = 0; j < row.length; j++) {\n            row[j] = dist.nextDouble(mu, sigma);\n        }\n    }\n\n    public static class NormalDistribution {\n        private final Random rng;\n        private final double[] buffer;\n        private int index;\n\n        public NormalDistribution(Random rng) {\n            this.rng = rng;\n            buffer = new double[2];\n            index = 0;\n        }\n\n        public double nextDouble() {\n            if (index == 0) {\n                // apply the Box-Muller transform to produce Normal variates\n                double u = rng.nextDouble();\n                double v = rng.nextDouble();\n                double r = Math.sqrt(-2 * Math.log(u));\n                buffer[0] = r * Math.cos(2 * Math.PI * v);\n                buffer[1] = r * Math.sin(2 * Math.PI * v);\n            }\n\n            double result = buffer[index];\n            index = (index + 1) % 2;\n\n            return result;\n        }\n\n        public double nextDouble(double mu, double sigma) {\n            return mu + sigma * nextDouble();\n        }\n    }\n}\n"
  },
  {
    "path": "Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ShingledData.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.testutils;\n\nimport static java.lang.Math.PI;\n\nimport java.util.Random;\n\npublic class ShingledData {\n\n    public static double[][] generateShingledData(int size, int period, int dimensions, long seed) {\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[] history = new double[dimensions];\n        int count = 0;\n        double[] data = getData(size + dimensions - 1, period, 100, 5, seed);\n        for (int j = 0; j < size + dimensions - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % dimensions;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                answer[count++] = getShinglePoint(history, entryIndex, dimensions);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getShinglePoint(double[] recentPointsSeen, int indexOfOldestPoint, int shingleLength) {\n        double[] shingledPoint = new double[shingleLength];\n        int i = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            shingledPoint[i++] = point;\n\n        }\n        return shingledPoint;\n    }\n\n    private static double[] getData(int num, int period, double amplitude, double noise, long seed) {\n\n        double[] data = new double[num];\n        Random noiseprg = new Random(seed);\n        for (int i = 0; i < num; i++) {\n            data[i] = amplitude * Math.cos(2 * PI * (i + 50) / period) + noise * noiseprg.nextDouble();\n            if (noiseprg.nextDouble() < 0.01) {\n                double change = noiseprg.nextDouble() < 0.5 ? 10 * noise : -10 * noise;\n                data[i] += change;\n                System.out.println(\" timestamp \" + i + \" changing by \" + change);\n            }\n\n        }\n\n        return data;\n    }\n}\n"
  },
  {
    "path": "Java/testutils/src/main/java/com/amazon/randomcutforest/testutils/ShingledMultiDimDataWithKeys.java",
    "content": "/*\n * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\").\n * You may not use this file except in compliance with the License.\n * A copy of the License is located at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * or in the \"license\" file accompanying this file. This file is distributed\n * on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either\n * express or implied. See the License for the specific language governing\n * permissions and limitations under the License.\n */\n\npackage com.amazon.randomcutforest.testutils;\n\nimport static java.lang.Math.PI;\n\nimport java.util.Arrays;\nimport java.util.Random;\n\npublic class ShingledMultiDimDataWithKeys {\n\n    public static MultiDimDataWithKey generateShingledDataWithKey(int size, int period, int shingleSize,\n            int baseDimension, long seed) {\n\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[][] history = new double[shingleSize][];\n        int count = 0;\n        MultiDimDataWithKey dataWithKeys = getMultiDimData(size + shingleSize - 1, period, 100, 5, seed, baseDimension);\n        double[][] answer = generateShingledData(dataWithKeys.data, shingleSize, baseDimension, false);\n        return new MultiDimDataWithKey(answer, dataWithKeys.changeIndices, dataWithKeys.changes);\n    }\n\n    public static double[][] generateShingledData(double[][] data, int shingleSize, int baseDimension,\n            boolean rotation) {\n        int size = data.length - shingleSize + 1;\n        double[][] answer = new double[size][];\n        int entryIndex = 0;\n        boolean filledShingleAtleastOnce = false;\n        double[][] history = new double[shingleSize][];\n        int count = 0;\n\n        for (int j = 0; j < size + shingleSize - 1; ++j) { // we stream here ....\n            history[entryIndex] = data[j];\n            entryIndex = (entryIndex + 1) % shingleSize;\n            if (entryIndex == 0) {\n                filledShingleAtleastOnce = true;\n            }\n            if (filledShingleAtleastOnce) {\n                int position = (rotation) ? 0 : entryIndex;\n                answer[count++] = getShinglePoint(history, position, shingleSize, baseDimension);\n            }\n        }\n        return answer;\n    }\n\n    private static double[] getShinglePoint(double[][] recentPointsSeen, int indexOfOldestPoint, int shingleLength,\n            int baseDimension) {\n        double[] shingledPoint = new double[shingleLength * baseDimension];\n        int count = 0;\n        for (int j = 0; j < shingleLength; ++j) {\n            double[] point = recentPointsSeen[(j + indexOfOldestPoint) % shingleLength];\n            for (int i = 0; i < baseDimension; i++) {\n                shingledPoint[count++] = point[i];\n            }\n        }\n        return shingledPoint;\n    }\n\n    public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,\n            int baseDimension) {\n        return getMultiDimData(num, period, amplitude, noise, seed, baseDimension, false);\n    }\n\n    public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,\n            int baseDimension, boolean useSlope) {\n        return getMultiDimData(num, period, amplitude, noise, seed, baseDimension, 5.0, useSlope);\n    }\n\n    public static MultiDimDataWithKey getMultiDimData(int num, int period, double amplitude, double noise, long seed,\n            int baseDimension, double anomalyFactor, boolean useSlope) {\n        double[][] data = new double[num][];\n        double[][] changes = new double[num][];\n        int[] changedIndices = new int[num];\n        int counter = 0;\n        Random prg = new Random(seed);\n        Random noiseprg = new Random(prg.nextLong());\n        double[] phase = new double[baseDimension];\n        double[] amp = new double[baseDimension];\n        double[] slope = new double[baseDimension];\n        double[] shift = new double[baseDimension];\n\n        for (int i = 0; i < baseDimension; i++) {\n            phase[i] = prg.nextInt(period);\n            if (useSlope) {\n                shift[i] = (4 * prg.nextDouble() - 1) * amplitude;\n            }\n            amp[i] = (1 + 0.2 * prg.nextDouble()) * amplitude;\n            if (useSlope) {\n                slope[i] = (0.25 - prg.nextDouble() * 0.5) * amplitude / period;\n            }\n        }\n\n        for (int i = 0; i < num; i++) {\n            data[i] = new double[baseDimension];\n            boolean flag = (noiseprg.nextDouble() < 0.01);\n            double[] newChange = new double[baseDimension];\n            boolean used = false;\n            for (int j = 0; j < baseDimension; j++) {\n                data[i][j] = amp[j] * Math.cos(2 * PI * (i + phase[j]) / period) + slope[j] * i + shift[j];\n                // ensures that the noise does not cancel the anomaly or change it's magnitude\n                if (flag && noiseprg.nextDouble() < 0.3) {\n                    double factor = anomalyFactor * (1 + noiseprg.nextDouble());\n                    double change = noiseprg.nextDouble() < 0.5 ? factor * noise : -factor * noise;\n                    data[i][j] += newChange[j] = change;\n                    used = true;\n                } else {\n                    data[i][j] += noise * (2 * noiseprg.nextDouble() - 1);\n                }\n            }\n            if (used) {\n                changedIndices[counter] = i;\n                changes[counter++] = newChange;\n            }\n        }\n\n        return new MultiDimDataWithKey(data, Arrays.copyOf(changedIndices, counter), Arrays.copyOf(changes, counter));\n    }\n}\n"
  },
  {
    "path": "LICENSE",
    "content": "\n                                 Apache License\n                           Version 2.0, January 2004\n                        http://www.apache.org/licenses/\n\n   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction,\n      and distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by\n      the copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all\n      other entities that control, are controlled by, or are under common\n      control with that entity. For the purposes of this definition,\n      \"control\" means (i) the power, direct or indirect, to cause the\n      direction or management of such entity, whether by contract or\n      otherwise, or (ii) ownership of fifty percent (50%) or more of the\n      outstanding shares, or (iii) beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity\n      exercising permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation\n      source, and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but\n      not limited to compiled object code, generated documentation,\n      and conversions to other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or\n      Object form, made available under the License, as indicated by a\n      copyright notice that is included in or attached to the work\n      (an example is provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object\n      form, that is based on (or derived from) the Work and for which the\n      editorial revisions, annotations, elaborations, or other modifications\n      represent, as a whole, an original work of authorship. For the purposes\n      of this License, Derivative Works shall not include works that remain\n      separable from, or merely link (or bind by name) to the interfaces of,\n      the Work and Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including\n      the original version of the Work and any modifications or additions\n      to that Work or Derivative Works thereof, that is intentionally\n      submitted to Licensor for inclusion in the Work by the copyright owner\n      or by an individual or Legal Entity authorized to submit on behalf of\n      the copyright owner. For the purposes of this definition, \"submitted\"\n      means any form of electronic, verbal, or written communication sent\n      to the Licensor or its representatives, including but not limited to\n      communication on electronic mailing lists, source code control systems,\n      and issue tracking systems that are managed by, or on behalf of, the\n      Licensor for the purpose of discussing and improving the Work, but\n      excluding communication that is conspicuously marked or otherwise\n      designated in writing by the copyright owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity\n      on behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      copyright license to reproduce, prepare Derivative Works of,\n      publicly display, publicly perform, sublicense, and distribute the\n      Work and such Derivative Works in Source or Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of\n      this License, each Contributor hereby grants to You a perpetual,\n      worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n      (except as stated in this section) patent license to make, have made,\n      use, offer to sell, sell, import, and otherwise transfer the Work,\n      where such license applies only to those patent claims licensable\n      by such Contributor that are necessarily infringed by their\n      Contribution(s) alone or by combination of their Contribution(s)\n      with the Work to which such Contribution(s) was submitted. If You\n      institute patent litigation against any entity (including a\n      cross-claim or counterclaim in a lawsuit) alleging that the Work\n      or a Contribution incorporated within the Work constitutes direct\n      or contributory patent infringement, then any patent licenses\n      granted to You under this License for that Work shall terminate\n      as of the date such litigation is filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the\n      Work or Derivative Works thereof in any medium, with or without\n      modifications, and in Source or Object form, provided that You\n      meet the following conditions:\n\n      (a) You must give any other recipients of the Work or\n          Derivative Works a copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices\n          stating that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works\n          that You distribute, all copyright, patent, trademark, and\n          attribution notices from the Source form of the Work,\n          excluding those notices that do not pertain to any part of\n          the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n          distribution, then any Derivative Works that You distribute must\n          include a readable copy of the attribution notices contained\n          within such NOTICE file, excluding those notices that do not\n          pertain to any part of the Derivative Works, in at least one\n          of the following places: within a NOTICE text file distributed\n          as part of the Derivative Works; within the Source form or\n          documentation, if provided along with the Derivative Works; or,\n          within a display generated by the Derivative Works, if and\n          wherever such third-party notices normally appear. The contents\n          of the NOTICE file are for informational purposes only and\n          do not modify the License. You may add Your own attribution\n          notices within Derivative Works that You distribute, alongside\n          or as an addendum to the NOTICE text from the Work, provided\n          that such additional attribution notices cannot be construed\n          as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and\n      may provide additional or different license terms and conditions\n      for use, reproduction, or distribution of Your modifications, or\n      for any such Derivative Works as a whole, provided Your use,\n      reproduction, and distribution of the Work otherwise complies with\n      the conditions stated in this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise,\n      any Contribution intentionally submitted for inclusion in the Work\n      by You to the Licensor shall be under the terms and conditions of\n      this License, without any additional terms or conditions.\n      Notwithstanding the above, nothing herein shall supersede or modify\n      the terms of any separate license agreement you may have executed\n      with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n      names, trademarks, service marks, or product names of the Licensor,\n      except as required for reasonable and customary use in describing the\n      origin of the Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or\n      agreed to in writing, Licensor provides the Work (and each\n      Contributor provides its Contributions) on an \"AS IS\" BASIS,\n      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n      implied, including, without limitation, any warranties or conditions\n      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n      PARTICULAR PURPOSE. You are solely responsible for determining the\n      appropriateness of using or redistributing the Work and assume any\n      risks associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory,\n      whether in tort (including negligence), contract, or otherwise,\n      unless required by applicable law (such as deliberate and grossly\n      negligent acts) or agreed to in writing, shall any Contributor be\n      liable to You for damages, including any direct, indirect, special,\n      incidental, or consequential damages of any character arising as a\n      result of this License or out of the use or inability to use the\n      Work (including but not limited to damages for loss of goodwill,\n      work stoppage, computer failure or malfunction, or any and all\n      other commercial damages or losses), even if such Contributor\n      has been advised of the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing\n      the Work or Derivative Works thereof, You may choose to offer,\n      and charge a fee for, acceptance of support, warranty, indemnity,\n      or other liability obligations and/or rights consistent with this\n      License. However, in accepting such obligations, You may act only\n      on Your own behalf and on Your sole responsibility, not on behalf\n      of any other Contributor, and only if You agree to indemnify,\n      defend, and hold each Contributor harmless for any liability\n      incurred by, or claims asserted against, such Contributor by reason\n      of your accepting any such warranty or additional liability.\n\n   END OF TERMS AND CONDITIONS\n\n   APPENDIX: How to apply the Apache License to your work.\n\n      To apply the Apache License to your work, attach the following\n      boilerplate notice, with the fields enclosed by brackets \"[]\"\n      replaced with your own identifying information. (Don't include\n      the brackets!)  The text should be enclosed in the appropriate\n      comment syntax for the file format. We also recommend that a\n      file or class name and description of purpose be included on the\n      same \"printed page\" as the copyright notice for easier\n      identification within third-party archives.\n\n   Copyright [yyyy] [name of copyright owner]\n\n   Licensed under the Apache License, Version 2.0 (the \"License\");\n   you may not use this file except in compliance with the License.\n   You may obtain a copy of the License at\n\n       http://www.apache.org/licenses/LICENSE-2.0\n\n   Unless required by applicable law or agreed to in writing, software\n   distributed under the License is distributed on an \"AS IS\" BASIS,\n   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n   See the License for the specific language governing permissions and\n   limitations under the License.\n"
  },
  {
    "path": "NOTICE",
    "content": "RandomCutForest\nCopyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n"
  },
  {
    "path": "README.md",
    "content": "# Random Cut Forest by AWS\n\nThis repository contains implementations of the Random Cut Forest (RCF) probabilistic data structure.\nRCFs were originally developed at Amazon to use in a nonparametric anomaly detection algorithm for\nstreaming data. Later new algorithms based on RCFs were developed for density estimation, imputation,\nand forecasting.\n\nThe different directories correspond to equivalent implementations in different languages, and bindings to\nto those base implementations, using language specific features for greater flexibility of use. \n\nRandomCutForest in the randomcutforest-core package provides an estimation (say anomaly score, or extrapolation over a forecast horizon)\nand using that raw estimation can be challenging. The randomcutforest-parkservices package provides\nseveral capabilities (ThresholdedRandomCutForest, RCFCaster, respectively) for distilling the scores to a determination of\na potential anomaly or calibrated forecast respectively.\nThe package randomcutforest-examples showcases several example scenarios for using the repository. \nThey also provide examples for some of the parameter settings. Many of these examples are built in tests.\n\n## Documentation\n\n* Guha, S., Mishra, N., Roy, G., & Schrijvers, O. (2016, June). Robust random cut forest based anomaly detection on streams. In *International conference on machine learning* (pp. 2712-2721).\n\n## Code of Conduct\n\nThis project has adopted an [Open Source Code of Conduct](https://aws.github.io/code-of-conduct).\n\n\n## Security issue notifications\n\nIf you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public GitHub issue.\n\n\n## Licensing\n\nSee the [LICENSE](./LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.\n\n\n## Copyright\n\nCopyright 2019-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n"
  },
  {
    "path": "Rust/.gitignore",
    "content": "################################################################################\n# Additional Ignores\n################################################################################\n*~\n.vscode/\n\n################################################################################\n# GitHub Rust GitIgnore\n################################################################################\n# Generated by Cargo\n# will have compiled files and executables\ndebug/\ntarget/\n\n# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries\n# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html\nCargo.lock\n\n# These are backup files generated by rustfmt\n**/*.rs.bk"
  },
  {
    "path": "Rust/Cargo.toml",
    "content": "[package]\nname = \"rcf\"\nversion = \"4.0.0\"\nedition = \"2021\"\nlicense = \"Apache-2.0\"\n\n[profile.test]\nopt-level = 3\n\n[lib]\nname = \"rcflib\"\npath = \"src/lib.rs\"\n\n[[bin]]\nname = \"example\"\npath = \"src/example.rs\"\n# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n\n\n[dependencies]\nnum = \"0.4.1\"\nrayon = \"1.7\"\nrand = \"*\" \nrand_chacha = \"0.3.1\"\nrand_core = \"0.6.2\"\n\n[dev-dependencies]\nparameterized_test = \"0.1.0\"\n"
  },
  {
    "path": "Rust/README.md",
    "content": "# Random Cut Forest\n\nThis directory contains a Rust implementation, that mirrors the Java implementation \nof the Random Cut Forest (RCF).\n\nThe compact trees in the Java version of RCF2.0 was designed with memory \nsafety in mind. This rust implementation skips over that version and mirrors Java RCF3.0.\n\nRust provides memory safety and the parallel implementation of the same algorithm in different \nlanguages allows us to get a (qualified) verification of safety. At the same time, verifying the \nrandomness of randomized data structure is non-trivial, and the existing tests of the Java version \nprovide a qualified verification. We expect the different \nimplementations to remain in sync. \n\n\n\n"
  },
  {
    "path": "Rust/rustfmt.toml",
    "content": "# This configuration uses unstable features from rustfmt nightly to format imports. Follow these steps to run `cargo fmt`:\n# 1. Install the nightly toolchain\n#     rustup toolchains install nightly\n# 2. Install or update the nightly build of rustfmt\n#     rustup component add rustfmt --toolchain nightly\n# 3. Run `cargo fmt` with the `+nightly` flag\n#     `cargo +nightly fmt`\nunstable_features = true\n\ngroup_imports = \"StdExternalCrate\"\nimports_granularity = \"Crate\"\nreorder_imports = true"
  },
  {
    "path": "Rust/src/common/cluster.rs",
    "content": "use std::cmp::{max, min};\nuse std::ops::{Deref, Index};\nuse std::slice;\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse rayon::iter::{IntoParallelRefMutIterator, ParallelIterator};\nuse crate::util::check_argument;\nuse crate::types::Result;\n\nconst PHASE2_THRESHOLD: usize = 2;\nconst SEPARATION_RATIO_FOR_MERGE: f64 = 0.8;\nconst WEIGHT_THRESHOLD: f64 = 1.25;\nconst LENGTH_BOUND: usize = 5000;\n/**\n* In the following, the goal is to cluster objects of type T, given a distance function over a pair\n* of references. The clustering need not create any new object of type T, but would find representative\n* points that express the cluster. The dictionary of points is expressed as pairs of reference and\n* corresponding weights of objects. However for vectors/slives over f32 and single representative scenario\n* the clustering does allow computation of an approximation median to create new \"central\" points.\n* Q is the struct that corresponds to a representative of a cluster. Note that\n* a cluster can have multiple representatives (see for example https://en.wikipedia.org/wiki/CURE_algorithm)\n* thus the entire information is a vector of tuples corresponding to (representative, weight of representative)\n*\n**/\n\npub trait IntermediateCluster<Z, Q, T: ?Sized> {\n    // aclsuter should provide a measure of the point set convered by the cluster\n    fn weight(&self) -> f64;\n    // A cluster is an extended object of a type different from the base type of a\n    // point indicated by T (and only the reference is used in the distance function\n    // every clustering algorithm implicitly/explicitly defines this function\n    // the return value is (distance, the integer identifier of the cluster representative)\n    // if the cluster is represented by a single value then that identifier would be 0\n    fn distance_to_point<'a>(&self, dictionary: &'a [Z], get_point: fn (usize, &'a [Z]) -> &'a T, point: &T, distance: fn(&T, &T) -> f64) -> (f64,usize);\n    // Likewise, the distance function needs to be extended (implicitly/explicitly)\n    // to a distance function between clusters\n    // the return value is the distance and the pair of identifiers in the corresponding clusters which\n    // define that closest distance\n    fn distance_to_cluster<'a>(&self, dictionary: &'a [Z], get_point: fn (usize, &'a [Z]) -> &'a T, other: &dyn IntermediateCluster<Z, Q, T>, distance: fn(&T, &T) -> f64) -> (f64, usize, usize);\n    // a function that assigns a point indexed by usize from a list of samples to\n    // the cluster; note the weight used in the function need not be the entire weight of the\n    // sampled point (for example in case of soft assignments)\n    fn add_point(&mut self, index: usize, weight: f32, dist: f64, representative: usize) -> Result<()>;\n    // given a set of previous assignments, recomputes the optimal set of representatives\n    // this is the classic optimization step for k-Means; but the analogue exists for every\n    // clustering; note that it is possible that recompute does nothing\n    fn recompute<'a>(&mut self, points: &'a [Z], get_point: fn (usize,&'a [Z]) -> &'a T, distance: fn(&T, &T) -> f64) -> f64;\n    // resets  the statistics of a clusters; preparing for a sequence of add_point followed\n    // by recompute\n    fn reset(&mut self);\n    // a function that allows a cluster to absorb another cluster in an agglomerative \\\n    // clustering algorithm\n    fn extent_measure(&self) -> f64;\n    // a function that indicates cluster quality\n    fn average_radius(&self) -> f64;\n    // a function that absorbs another cluster\n    fn absorb<'a>(&mut self, dictionary: &'a [Z], get_point: fn (usize,&'a [Z]) -> &'a T, another: &dyn IntermediateCluster<Z, Q, T>, distance: fn(&T, &T) -> f64) -> Result<()>;\n    // a function to return a list of representatives corresponding to pairs (Q,weight)\n    fn representatives(&self) -> Vec<(Q, f32)>;\n    // a function that helps scale (by multiplication) the cluster weight\n    fn scale_weight(&mut self, factor: f64) -> Result<()>;\n}\n\n\nfn pick<T>(points: &[(T, f32)], wt: f32) -> usize {\n    let mut position = 0;\n    let mut running = wt;\n    for i in 0..points.len() {\n        position = i;\n        if running - points[i].1 <= 0.0 {\n            break;\n        } else {\n            running -= points[i].1;\n        }\n    }\n    position\n}\n\n\n\nfn median<'a, Z,Q:?Sized>(dimensions:usize,points: &'a [Z], get_point: fn(usize,&'a [Z]) -> &'a Q,list: &mut [(usize, f32)]) -> Vec<f32>\nwhere Q: Index<usize, Output = f32>\n{\n    let mut answer = vec![0.0f32;dimensions];\n    let total: f64 = list.iter().map(|a| a.1 as f64).sum();\n    for i in 0..dimensions {\n        list.sort_by(|a, b| get_point(a.0,&points)[i].partial_cmp(&get_point(b.0,points)[i]).unwrap());\n        let position = pick(list, (total / 2.0) as f32);\n        answer[i] = get_point(list[position].0,&points)[i];\n    }\n    answer\n}\n\n#[repr(C)]\npub struct Center {\n    representative: Vec<f32>,\n    weight: f64,\n    points: Vec<(usize, f32)>,\n    sum_of_radii: f64,\n}\n\nimpl Center {\n    pub fn new(_representative: usize, point:&[f32], weight: f32, _params:usize) -> Result<Self> {\n        Ok(Center {\n            representative: Vec::from(point),\n            weight: weight as f64,\n            points: Vec::new(),\n            sum_of_radii: 0.0,\n        })\n    }\n\n    pub fn new_as_vec(_representative: usize, point:&Vec<f32>, weight: f32,_params:usize) -> Result<Self> {\n        Ok(Center {\n            representative: point.clone(),\n            weight: weight as f64,\n            points: Vec::new(),\n            sum_of_radii: 0.0,\n        })\n    }\n\n    pub fn average_radius(&self) -> f64 {\n        if self.weight == 0.0 {\n            0.0\n        } else {\n            self.sum_of_radii / self.weight\n        }\n    }\n\n    pub fn distance(&self, point: &[f32], dist: fn(&[f32],&[f32]) -> f64) -> f64 {\n        (dist)(&self.representative,point)\n    }\n\n    pub fn representative(&self) -> Vec<f32> {\n        self.representative.clone()\n    }\n\n    pub fn weight(&self) -> f64 {\n        self.weight\n    }\n\n    fn re_optimize<'a, Z, Q:?Sized>(&mut self, points:&'a [Z],\n                      get_point: fn(usize,&'a [Z]) ->&'a Q,\n                      picker: fn(usize,&'a [Z],fn(usize,&'a [Z]) -> &'a Q, a: &mut [(usize,f32)]) -> Vec<f32>)\n    {\n        if self.weight == 0.0 {\n            let dimensions = self.representative.len();\n            // the following computes an approximate median\n            if self.points.len() < 500 {\n                self.representative = picker(dimensions, points, get_point, &mut self.points);\n            } else {\n                let mut samples = Vec::new();\n                let mut rng = ChaCha20Rng::seed_from_u64(0);\n                for i in 0..self.points.len() {\n                    if rng.gen::<f64>() < (200.0 * self.points[i].1 as f64) / self.weight {\n                        samples.push((self.points[i].0, 1.0));\n                    }\n                }\n                self.representative = picker(dimensions, points, get_point, &mut samples);\n            };\n        }\n    }\n\n    fn recompute_rad<'a,Z>(&mut self, points: &'a [Z], get_point: fn(usize,&'a [Z]) -> &'a [f32], dist: fn(&[f32],&[f32]) -> f64)\n        -> f64\n    {\n        let old_value = self.sum_of_radii;\n        self.sum_of_radii = 0.0;\n        for j in 0..self.points.len() {\n            self.sum_of_radii += self.points[j].1 as f64\n                * dist(&self.representative, get_point(self.points[j].0,&points)) as f64;\n        }\n        old_value - self.sum_of_radii\n    }\n\n    fn recompute_rad_vec<'a,Z>(&mut self, points: &'a [Z], get_point: fn(usize,&'a [Z]) -> &'a Vec<f32>, dist: fn(&Vec<f32>,&Vec<f32>) -> f64)\n                           -> f64\n    {\n        let old_value = self.sum_of_radii;\n        self.sum_of_radii = 0.0;\n        for j in 0..self.points.len() {\n            self.sum_of_radii += self.points[j].1 as f64\n                * dist(&self.representative, get_point(self.points[j].0,&points)) as f64;\n        }\n        old_value - self.sum_of_radii\n    }\n\n    fn add_point(&mut self, index: usize, weight: f32, dist: f64) {\n        self.points.push((index, weight));\n        self.weight += weight as f64;\n        self.sum_of_radii += weight as f64 * dist;\n    }\n\n    fn reset(&mut self) {\n        self.points.clear();\n        self.weight = 0.0;\n        self.sum_of_radii = 0.0;\n    }\n\n    fn absorb_list(&mut self, other_weight: f64, other_list: &Vec<(Vec<f32>,f32)>, closest: (f64,usize)){\n        let t = f64::exp(2.0 * (self.weight - other_weight) / (self.weight + other_weight));\n        let factor = t / (1.0 + t);\n        let dimensions = self.representative.len();\n        for i in 0..dimensions {\n            self.representative[i] = (factor * (self.representative[i] as f64)\n                + (1.0 - factor) * (other_list[closest.1].0[i] as f64)) as f32;\n        }\n\n        self.sum_of_radii += (self.weight * (1.0 - factor) + factor * other_weight) * closest.0;\n    }\n}\n\n\nimpl<Z> IntermediateCluster<Z,Vec<f32>, [f32]> for Center {\n    fn weight(&self) -> f64 {\n        self.weight()\n    }\n\n    fn scale_weight(&mut self, factor: f64) -> Result<()>{\n        check_argument(!factor.is_nan() && factor>0.0,\" has to be positive\")?;\n        self.weight = self.weight as f64 * factor;\n        Ok(())\n    }\n\n    fn distance_to_point<'a>(&self, _points:&'a [Z],_get_point: fn(usize,&'a [Z]) ->&'a [f32],point: &[f32], distance: fn(&[f32], &[f32]) -> f64) -> (f64,usize) {\n        ((distance)(&self.representative, point),0)\n    }\n\n    fn distance_to_cluster<'a>(\n        &self,\n        _points:&'a [Z],\n        _get_point: fn(usize,&'a [Z]) ->&'a [f32],\n        other: &dyn IntermediateCluster<Z,Vec<f32>, [f32]>,\n        distance: fn(&[f32], &[f32]) -> f64,\n    ) -> (f64,usize,usize) {\n        let tuple = other.distance_to_point(_points,_get_point,&self.representative, distance);\n        (tuple.0,0,tuple.1)\n    }\n\n    fn add_point(&mut self, index: usize, weight: f32, dist: f64, representative:usize) -> Result<()> {\n        check_argument(representative==0,\"can have only one representative\")?;\n        check_argument(!weight.is_nan() && weight >= 0.0f32, \"non-negative weight\")?;\n        self.add_point(index,weight,dist);\n        Ok(())\n    }\n\n    fn recompute<'a>(&mut self, points:&'a [Z],get_point: fn(usize,&'a [Z]) ->&'a [f32], distance: fn(&[f32], &[f32]) -> f64) -> f64 {\n        self.re_optimize(&points,get_point, median);\n        self.recompute_rad(&points,get_point, distance)\n    }\n\n    fn reset(&mut self) {\n        self.reset();\n    }\n\n    fn extent_measure(&self) -> f64 {\n        self.average_radius()\n    }\n\n    fn average_radius(&self) -> f64 {\n        self.average_radius()\n    }\n\n    fn absorb<'a>(\n        &mut self,\n        points:&'a [Z],\n        get_point: fn(usize,&'a [Z]) ->&'a [f32],\n        another: &dyn IntermediateCluster<Z, Vec<f32>, [f32]>,\n        distance: fn(&[f32], &[f32]) -> f64,\n    ) ->Result<()> {\n        let closest = another.distance_to_point(points, get_point, &self.representative, distance);\n        self.absorb_list(another.weight(), &another.representatives(), closest);\n        Ok(())\n    }\n\n    fn representatives(&self) -> Vec<(Vec<f32>, f32)> {\n        vec![(self.representative.clone(), self.weight as f32); 1]\n    }\n}\n\nimpl<Z> IntermediateCluster<Z,Vec<f32>, Vec<f32>> for Center {\n    fn weight(&self) -> f64 {\n        self.weight()\n    }\n\n    fn scale_weight(&mut self, factor: f64) -> Result<()>{\n        check_argument(!factor.is_nan() && factor>0.0,\" has to be positive\")?;\n        self.weight = self.weight as f64 * factor;\n        Ok(())\n    }\n\n    fn distance_to_point<'a>(&self, _points:&'a [Z],_get_point: fn(usize,&'a [Z]) ->&'a Vec<f32>,point: &Vec<f32>, distance: fn(&Vec<f32>, &Vec<f32>) -> f64) -> (f64,usize) {\n        ((distance)(&self.representative, point),0)\n    }\n\n    fn distance_to_cluster<'a>(\n        &self,\n        _points:&'a [Z],\n        _get_point: fn(usize,&'a [Z]) ->&'a Vec<f32>,\n        other: &dyn IntermediateCluster<Z,Vec<f32>, Vec<f32>>,\n        distance: fn(&Vec<f32>, &Vec<f32>) -> f64,\n    ) -> (f64,usize,usize) {\n        let tuple = other.distance_to_point(_points,_get_point,&self.representative, distance);\n        (tuple.0,0,tuple.1)\n    }\n\n    fn add_point(&mut self, index: usize, weight: f32, dist: f64, representative:usize) ->Result<()>{\n        check_argument(representative==0,\"can have only one representative\")?;\n        check_argument(!weight.is_nan() && weight >= 0.0f32, \"non-negative weight\")?;\n        self.add_point(index,weight,dist);\n        Ok(())\n    }\n\n    fn recompute<'a>(&mut self, points:&'a [Z],get_point: fn(usize,&'a [Z]) ->&'a Vec<f32>, distance: fn(&Vec<f32>, &Vec<f32>) -> f64) -> f64 {\n        self.re_optimize(&points,get_point, median);\n        self.recompute_rad_vec(&points,get_point, distance)\n    }\n\n    fn reset(&mut self) {\n        self.reset();\n    }\n\n    fn extent_measure(&self) -> f64 {\n        self.average_radius()\n    }\n\n    fn average_radius(&self) -> f64 {\n        self.average_radius()\n    }\n\n    fn absorb<'a>(\n        &mut self,\n        points:&'a [Z],\n        get_point: fn(usize,&'a [Z]) ->&'a Vec<f32>,\n        another: &dyn IntermediateCluster<Z, Vec<f32>, Vec<f32>>,\n        distance: fn(&Vec<f32>, &Vec<f32>) -> f64,\n    ) -> Result<()>{\n        let closest = another.distance_to_point(points,get_point, &self.representative,distance);\n        self.absorb_list(another.weight(),&another.representatives(),closest);\n        Ok(())\n    }\n\n    fn representatives(&self) -> Vec<(Vec<f32>, f32)> {\n        vec![(self.representative.clone(), self.weight as f32); 1]\n    }\n}\n\n\n\nfn process_point<'a,Z,U,Q,T :?Sized>(dictionary: &'a [Z], get_point: fn(usize,&'a [Z])->&'a T, index: usize, centers: &mut [U], weight : f32, distance: fn(&T, &T) -> f64) -> Result<()>\n    where\n        U: IntermediateCluster<Z,Q, T> + Send,\n        T: Sync,\n{\n    let mut dist = vec![(0.0, 1); centers.len()];\n    let mut min_distance = (f64::MAX, 1);\n    for j in 0..centers.len() {\n        dist[j] = centers[j].distance_to_point(dictionary, get_point,get_point(index,dictionary), distance);\n        check_argument(dist[j].0>=0.0,\" distances cannot be negative\")?;\n        if min_distance.0 > dist[j].0 {\n            min_distance = dist[j];\n        }\n    };\n    check_argument(min_distance.0>=0.0,\" distances cannot be negative\")?;\n    if min_distance.0 == 0.0 {\n        for j in 0..centers.len() {\n            if dist[j].0 == 0.0 {\n                centers[j].add_point(index, weight, 0.0, dist[j].1)?;\n            }\n        }\n    } else {\n        let mut sum = 0.0;\n        for j in 0..centers.len() {\n            if dist[j].0 <= WEIGHT_THRESHOLD * min_distance.0 {\n                sum += min_distance.0 / dist[j].0;\n            }\n        }\n        for j in 0..centers.len() {\n            if dist[j].0 <= WEIGHT_THRESHOLD * min_distance.0 {\n                centers[j].add_point(\n                    index,\n                    (weight as f64 * min_distance.0 / (sum * dist[j].0)) as f32,\n                    dist[j].0, dist[j].1\n                )?;\n            }\n        }\n    }\n    Ok(())\n}\n\n\nfn assign_and_recompute<'a, Z, Q, U, T: ?Sized>(\n    dictionary: &'a [Z],\n    weights: &'a [f32],\n    get_point: fn(usize,&'a [Z]) -> &'a T,\n    get_weight: fn(usize,&'a [Z],&'a [f32]) -> f32,\n    samples: &[(usize,f32)],\n    centers: &mut [U],\n    distance: fn(&T, &T) -> f64,\n    parallel_enabled: bool,\n) -> Result<f64>\n    where\n        U: IntermediateCluster<Z,Q, T> + Send,\n        T: Sync,\n        Z: Sync,\n{\n    for j in 0..centers.len() {\n        centers[j].reset();\n    }\n\n    if samples.len() == 0{\n        for i in 0..dictionary.len() {\n            process_point(dictionary,get_point,i,centers,get_weight(i,dictionary,weights),distance)?;\n        }\n    } else {\n        for i in 0..samples.len() {\n            process_point(dictionary,get_point,i,centers,samples[i].1,distance)?;\n        }\n    }\n\n    let gain: f64 = if parallel_enabled {\n        centers\n            .par_iter_mut()\n            .map(|x| x.recompute(dictionary, get_point,distance))\n            .sum()\n    } else {\n        centers\n            .iter_mut()\n            .map(|x| x.recompute(dictionary, get_point,distance))\n            .sum()\n    };\n    Ok(gain)\n}\n\n\nfn down_sample<'a,Z>(points: &'a [Z], weights:&'a [f32], get_weight: fn(usize,&'a [Z],&'a [f32]) -> f32, seed:u64, approximate_bound: usize) ->  Vec<(usize, f32)> {\n    let mut total_weight: f64 = 0.0;\n    for j in 0..points.len() {\n        total_weight += get_weight(j, &points, &weights) as f64;\n    };\n\n    let mut rng = ChaCha20Rng::seed_from_u64(seed as u64);\n    let mut sampled_points = Vec::new();\n    let mut remainder = 0.0f64;\n    for j in 0..points.len() {\n        let point_weight = get_weight(j,&points,&weights);\n        if point_weight > (0.005 * total_weight) as f32 {\n            sampled_points.push((j, point_weight));\n        } else {\n            remainder += point_weight as f64;\n        }\n    }\n    for j in 0..points.len() {\n        let point_weight = get_weight(j,&points,&weights);\n        if point_weight <= (0.005 * total_weight) as f32\n            && rng.gen::<f64>() < approximate_bound as f64 / (points.len() as f64)\n        {\n            let t = point_weight as f64\n                * (points.len() as f64 / approximate_bound as f64)\n                * (remainder / total_weight);\n            sampled_points.push((j, t as f32));\n        }\n    }\n    sampled_points\n}\n\n\nfn pick_from<'a,Z>(points: &'a [Z], weights: &'a [f32], get_weight: fn(usize,&'a [Z],&'a [f32]) -> f32, wt: f32) -> (usize,f32) {\n    let mut position = 0;\n    let mut weight = get_weight(position,points,weights);\n    let mut running = wt;\n    for i in 0..points.len() {\n        position = i;\n        weight = get_weight(position,points,weights);\n        if running - weight <= 0.0 {\n            break;\n        } else {\n            running -= weight;\n        }\n    }\n    (position,weight)\n}\n\n\n\npub fn general_iterative_clustering<'a, U, V, Q, Z, T: ?Sized>(\n    max_allowed: usize,\n    dictionary: &'a [Z],\n    weights: &'a [f32],\n    get_point: fn(usize,&'a [Z]) -> &'a T,\n    get_weight: fn(usize,&'a [Z],&'a [f32]) -> f32,\n    approximate_bound: usize,\n    seed: u64,\n    parallel_enabled: bool,\n    create: fn(usize, &'a T, f32, V) -> Result<U>,\n    create_params: V,\n    distance: fn(&T, &T) -> f64,\n    phase_2_reassign: bool,\n    enable_phase_3: bool,\n    overlap_parameter: f64,\n) -> Result<Vec<U>>\nwhere\n    U: IntermediateCluster<Z,Q, T> + Send,\n    T: Sync,\n    Z: Sync,\n    V: Copy,\n{\n    check_argument(max_allowed < 51, \" for large number of clusters, other methods may be better, consider recursively removing clusters\")?;\n    check_argument(max_allowed > 0, \" number of clusters has to be greater or equal to 1\")?;\n    let mut rng = ChaCha20Rng::seed_from_u64(seed);\n\n    let mut centers: Vec<U> = Vec::new();\n\n    let samples : Vec<(usize,f32)> = if dictionary.len() > approximate_bound {\n        down_sample(dictionary,weights,get_weight,rng.next_u64(),approximate_bound)\n    } else {\n        Vec::new()\n    };\n    //\n    // we now peform an initialization; the sampling corresponds a denoising\n    // note that if we are look at 2k random points, we are likely hitting every group of points\n    // with weight 1/k whp\n    let sampled_sum: f32 = if dictionary.len() > approximate_bound {\n        samples.iter().map(|x| x.1).sum()\n    } else {\n        (0..dictionary.len()).into_iter().map(|x| get_weight(x,dictionary,weights)).sum()\n    };\n\n    for _k in 0..10 * max_allowed {\n        let wt = (rng.gen::<f64>() * sampled_sum as f64) as f32;\n        let mut min_dist = f64::MAX;\n        let (index,weight) = if dictionary.len() > approximate_bound {\n            let i = pick(&samples, wt);\n            (i,samples[i].1)\n        } else {\n            pick_from(dictionary,weights,get_weight,wt)\n        };\n        for i in 0..centers.len() {\n            let t = centers[i].distance_to_point(dictionary,get_point,get_point(index,&dictionary), distance);\n            if t.0 < min_dist {\n                min_dist = t.0;\n            };\n        }\n        if min_dist > 0.0 {\n            centers.push(create(index,get_point(index,dictionary), weight,create_params)?);\n        }\n    }\n\n    assign_and_recompute(&dictionary,weights, get_point, get_weight,&samples,&mut centers, distance, parallel_enabled)?;\n\n    // sort in increasing order of weight\n    centers.sort_by(|o1, o2| o1.weight().partial_cmp(&o2.weight()).unwrap());\n    while centers.len() > 0 && centers[0].weight() == 0.0 {\n        centers.remove(0);\n    }\n\n    let mut phase_3_distance = 0.0f64;\n    let mut keep_reducing_centers = centers.len() > max(max_allowed, 1);\n\n    while keep_reducing_centers {\n        let mut measure = 0.0f64;\n        let mut measure_dist = f64::MAX;\n        let mut lower = 0;\n        let mut first = lower;\n        let mut second = lower + 1;\n        let mut found_merge = false;\n        while lower < centers.len() - 1 && !found_merge {\n            let mut min_dist = f64::MAX;\n            let mut min_nbr = usize::MAX;\n            for j in lower + 1..centers.len() {\n                let dist = centers[lower].distance_to_cluster(&dictionary,get_point,&centers[j], distance);\n                if min_dist > dist.0 {\n                    min_nbr = j;\n                    min_dist = dist.0;\n                }\n                let numerator = centers[lower].extent_measure()\n                    + centers[j].extent_measure()\n                    + phase_3_distance;\n                if numerator >= overlap_parameter * dist.0 {\n                    if measure * dist.0 < numerator {\n                        first = lower;\n                        second = j;\n                        if dist.0 == 0.0f64 {\n                            found_merge = true;\n                        } else {\n                            measure = numerator / dist.0;\n                        }\n                        measure_dist = dist.0;\n                    }\n                }\n            }\n            if lower == 0 && !found_merge {\n                measure_dist = min_dist;\n                second = min_nbr;\n            }\n            lower += 1;\n        }\n\n        let inital = centers.len();\n        if inital > max_allowed || found_merge || (enable_phase_3 && measure > overlap_parameter) {\n            let (small, large) = centers.split_at_mut(second);\n            large.first_mut().unwrap().absorb(&dictionary,get_point, &small[first], distance)?;\n            centers.swap_remove(first);\n            if phase_2_reassign && centers.len() <= PHASE2_THRESHOLD * max_allowed + 1{\n                assign_and_recompute(&dictionary, weights,get_point,get_weight, &samples, &mut centers, distance, parallel_enabled)?;\n            }\n\n            centers.sort_by(|o1, o2| o1.weight().partial_cmp(&o2.weight()).unwrap());\n            while centers.len() > 0 && centers[0].weight() == 0.0 {\n                centers.remove(0);\n            }\n\n            if inital > max_allowed && centers.len() <= max_allowed {\n                // phase 3 kicks in; but this will execute at most once\n                // note that measureDist can be 0 as well\n                phase_3_distance = measure_dist;\n            }\n        } else {\n            keep_reducing_centers = false;\n        }\n    }\n\n    centers.sort_by(|o1, o2| o2.weight().partial_cmp(&o1.weight()).unwrap()); // decreasing order\n    let center_sum: f64 = centers.iter().map(|x| x.weight() as f64).sum();\n    for i in 0..centers.len() {\n        centers[i].scale_weight(1.0/center_sum)?;\n    }\n    Ok(centers)\n}\n\n\n\nfn pick_slice_to_slice<'a>(index: usize, entry:&'a [&[f32]]) -> &'a [f32]{\n    &entry[index]\n}\n\nfn pick_first_slice_to_slice<'a>(index: usize, entry:&'a [(&[f32],f32)]) -> &'a [f32]{\n    &entry[index].0\n}\n\nfn pick_first_to_slice<'a>(index: usize, entry:&'a [(Vec<f32>,f32)]) -> &'a [f32]{\n    &entry[index].0\n}\n\nfn pick_to_slice<'a>(index: usize, entry:&'a [Vec<f32>]) -> &'a [f32]{\n    &entry[index]\n}\n\n\nfn pick_tuple_weight<T>(index:usize, entry:&[(T,f32)], _weights: &[f32]) -> f32{\n    entry[index].1\n}\n\nfn pick_weight<T>(index:usize, _entry:&[T], weights: &[f32]) -> f32{\n    weights[index]\n}\n\nfn one<'a,Z>(_i:usize,_points : &'a[Z], _weight : &'a [f32]) -> f32{\n    1.0\n}\n\npub fn single_centroid_cluster_weighted_vec_with_distance_over_slices(\n    dictionary: &[(Vec<f32>, f32)],\n    distance: fn(&[f32], &[f32]) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_first_to_slice,\n        pick_tuple_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\nconst empty_weights:&Vec<f32> = &Vec::new();\n\npub fn single_centroid_unweighted_cluster_vec_as_slice(\n    dictionary: &[Vec<f32>],\n    distance: fn(&[f32], &[f32]) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_to_slice,\n        one,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\npub fn single_centroid_unweighted_cluster_slice(\n    dictionary: &[&[f32]],\n    distance: fn(&[f32], &[f32]) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_slice_to_slice,\n        one,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\n\npub fn single_centroid_cluster_slice_with_weight_arrays(\n    dictionary: &[&[f32]],\n    weights : &[f32],\n    distance: fn(&[f32], &[f32]) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        &weights,\n        pick_slice_to_slice,\n        pick_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\n\npub fn single_centroid_cluster_weighted_vec(\n    dictionary: &[(Vec<f32>, f32)],\n    distance: fn(&Vec<f32>, &Vec<f32>) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_first_to_ref,\n        pick_tuple_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new_as_vec,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\nfn pick_ref<'a,T :?Sized>(index: usize, entry:&[&'a T]) -> &'a T{\n    entry[index]\n}\n\nfn pick_ref_tuple_first<'a,T :?Sized>(index: usize, entry:&[(&'a T,f32)]) -> &'a T{\n    entry[index].0\n}\n\nfn pick_first_to_ref<'a,T>(index: usize, entry:&'a [(T,f32)]) -> &'a T{\n    &entry[index].0\n}\n\nfn pick_to_ref<'a,T>(index: usize, entry:&'a [T]) -> &'a T{\n    &entry[index]\n}\n\npub fn single_centroid_cluster_vec(\n    dictionary: &[Vec<f32>],\n    distance: fn(&Vec<f32>, &Vec<f32>) -> f64,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<Center>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_to_ref,\n        one,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        Center::new_as_vec,\n        0,\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\n#[repr(C)]\npub struct MultiCenterRef<'b, T :?Sized> {\n    representatives: Vec<(&'b T,f32)>,\n    number_of_representatives: usize,\n    is_compact: bool,\n    shrinkage: f32,\n    weight: f64,\n    sum_of_radii: f64,\n}\n\nimpl<'b,T :?Sized> MultiCenterRef<'b,T>{\n\n    pub fn representatives(& self) -> Vec<(&'b T,f32)>{\n        self.representatives.clone()\n    }\n\n    pub fn new(_representative: usize, point: &'b T, weight: f32, params : (usize,f32,bool)) -> Result<Self> {\n        let (number_of_representatives, shrinkage,is_compact) = params;\n        check_argument(number_of_representatives>0,\"has to be positive\")?;\n        check_argument(shrinkage>=0.0 && shrinkage<= 1.0,\" has to between [0,1]\")?;\n        Ok(MultiCenterRef {\n            representatives: vec![(point, weight as f32);1],\n            number_of_representatives,\n            shrinkage,\n            is_compact,\n            weight: weight as f64,\n            sum_of_radii: 0.0,\n        })\n    }\n\n    pub fn average_radius(&self) -> f64 {\n        if self.weight == 0.0 {\n            0.0\n        } else {\n            self.sum_of_radii / self.weight\n        }\n    }\n\n    pub fn weight(&self) -> f64 {\n        self.weight\n    }\n}\n\nimpl<'b, Z, T:?Sized> IntermediateCluster<Z, &'b T,T> for MultiCenterRef<'b,T> {\n    fn weight(&self) -> f64 {\n        self.weight()\n    }\n\n    fn distance_to_point<'a>(&self, _points:&'a [Z],_get_point: fn(usize,&'a [Z]) ->&'a T,point: &T, distance: fn(&T, &T) -> f64) -> (f64,usize) {\n        let original = ((distance)(point, self.representatives[0].0), 0);\n        let mut closest = original;\n        for i in 1..self.representatives.len() {\n            let t = ((distance)(point, self.representatives[i].0), i);\n            if closest.0 > t.0 {\n                closest = t;\n            }\n        }\n        ((closest.0 * (1.0 - self.shrinkage as f64) + self.shrinkage as f64 * original.0), closest.1)\n    }\n\n    fn distance_to_cluster<'a>(\n        &self,\n        _points:&'a [Z],\n        _get_point: fn(usize,&'a [Z]) ->&'a T,\n        other: &dyn IntermediateCluster<Z,&'b T, T>,\n        distance: fn(&T, &T) -> f64,\n    ) -> (f64,usize,usize) {\n        let list = other.representatives();\n        let original = ((distance)(list[0].0, self.representatives[0].0), 0, 0);\n        let mut closest = original;\n        for i in 1..self.representatives.len() {\n            for j in 1..list.len() {\n                let t = ((distance)(list[j].0, self.representatives[i].0), i, j);\n                if closest.0 > t.0 {\n                    closest = t;\n                }\n            }\n        }\n        ((closest.0 * (1.0 - self.shrinkage as f64) + self.shrinkage as f64 * original.0), closest.1, closest.2)\n    }\n\n    fn add_point(&mut self, _index: usize, weight: f32, dist: f64, representative: usize) -> Result<()>{\n        self.representatives[representative].1 += weight;\n        self.sum_of_radii += weight as f64 * dist;\n        self.weight += weight as f64;\n        Ok(())\n    }\n\n    fn recompute<'a>(&mut self, _points:&'a [Z],_get_point: fn(usize,&'a [Z]) ->&'a T, _distance: fn(&T, &T) -> f64) -> f64 {\n        self.representatives.sort_by(|a,b| a.1.partial_cmp(&b.1).unwrap());\n        0.0\n    }\n\n    fn reset(&mut self) {\n        self.sum_of_radii = 0.0;\n        self.weight = 0.0;\n        for i in 0..self.representatives.len() {\n            self.representatives[i].1 = 0.0;\n        }\n    }\n\n    fn extent_measure(&self) -> f64 {\n        0.5 * self.average_radius() / self.number_of_representatives as f64\n    }\n\n    fn average_radius(&self) -> f64 {\n        self.average_radius()\n    }\n\n    fn absorb<'a>(\n        &mut self,\n        _points:&'a [Z],\n        _get_point: fn(usize,&'a [Z]) ->&'a T,\n        another: &dyn IntermediateCluster<Z, &'b T, T>,\n        distance: fn(&T, &T) -> f64,\n    )  -> Result<()> {\n        self.sum_of_radii += if self.is_compact {\n            another.average_radius()*another.weight()\n        } else {\n            another.extent_measure()*another.weight()\n        };\n        self.weight += another.weight();\n        let mut representatives = Vec::new();\n        representatives.append(&mut self.representatives);\n        representatives.append(&mut another.representatives());\n        self.representatives = Vec::with_capacity(self.number_of_representatives);\n\n        let mut max_index: usize = 0;\n        let mut weight = representatives[0].1;\n        for i in 1..representatives.len() {\n            if representatives[i].1 > weight {\n                weight = representatives[i].1;\n                max_index = i;\n            }\n        }\n        self.representatives.push(representatives[max_index]);\n        representatives.swap_remove(max_index);\n\n\n        /**\n         * create a list of representatives based on the farthest point method, which\n         * correspond to a well scattered set. See\n         * https://en.wikipedia.org/wiki/CURE_algorithm\n         */\n        while representatives.len() > 0 && self.representatives.len() < self.number_of_representatives {\n            let mut farthest_weighted_distance = 0.0;\n            let mut farthest_index: usize = usize::MAX;\n            for j in 0..representatives.len() {\n                if representatives[j].1 as f64 > (weight as f64) / (2.0 * self.number_of_representatives as f64) {\n                    let mut new_weighted_distance = (distance)(self.representatives[0].0,\n                                                               representatives[j].0) * representatives[j].1 as f64;\n                    check_argument(new_weighted_distance >= 0.0, \" weights or distances cannot be negative\")?;\n                    for i in 1..self.representatives.len() {\n                        let t = (distance)(self.representatives[i].0,\n                                           representatives[j].0) * representatives[j].1 as f64;\n                        check_argument(t >= 0.0, \" weights or distances cannot be negative\")?;\n                        if t < new_weighted_distance {\n                            new_weighted_distance = t;\n                        }\n                    }\n                    if new_weighted_distance > farthest_weighted_distance {\n                        farthest_weighted_distance = new_weighted_distance;\n                        farthest_index = j;\n                    }\n                }\n            }\n            if farthest_weighted_distance == 0.0 {\n                break;\n            }\n            self.representatives.push(representatives[farthest_index]);\n            representatives.swap_remove(farthest_index);\n        }\n\n        // absorb the remainder into existing representatives\n        for j in 0..representatives.len() {\n            let dist = (distance)(representatives[0].0, self.representatives[0].0);\n            check_argument(dist >= 0.0, \"distance cannot be negative\")?;\n            let mut min_dist = dist;\n            let mut min_index: usize = 0;\n            for i in 1..self.representatives.len() {\n                let new_dist = (distance)(self.representatives[i].0, representatives[j].0);\n                check_argument(new_dist >= 0.0, \"distance cannot be negative\")?;\n                if new_dist < min_dist {\n                    min_dist = new_dist;\n                    min_index = i;\n                }\n            }\n            self.representatives[min_index].1 += representatives[j].1;\n            self.sum_of_radii += representatives[j].1 as f64 * min_dist;\n        }\n        self.representatives.sort_by(|a,b| b.1.partial_cmp(&a.1).unwrap());\n        Ok(())\n    }\n\n    fn representatives(&self) -> Vec<(&'b T, f32)> {\n        self.representatives()\n    }\n\n    fn scale_weight(&mut self, factor: f64) -> Result<()>{\n        check_argument(!factor.is_nan() && factor>0.0,\" has to be positive\")?;\n        for i in 0..self.representatives.len() {\n            self.representatives[i].1 = (self.representatives[i].1 as f64 * factor) as f32;\n        }\n        Ok(())\n    }\n\n}\n\npub fn multi_cluster_obj<'a,T:Sync>(\n    dictionary: &'a [T],\n    distance: fn(&T, &T) -> f64,\n    number_of_representatives: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<MultiCenterRef<'a, T>>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_to_ref,\n        one,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        MultiCenterRef::new,\n        (number_of_representatives,shrinkage,is_compact),\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\n\npub fn multi_cluster_as_ref<'a,T:Sync>(\n    dictionary: &'a [&T],\n    distance: fn(&T, &T) -> f64,\n    number_of_representatives: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<MultiCenterRef<'a, T>>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_ref,\n        one,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        MultiCenterRef::new,\n        (number_of_representatives,shrinkage,is_compact),\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\npub fn multi_cluster_as_weighted_obj<'a,T:Sync>(\n    dictionary: &'a [(T,f32)],\n    distance: fn(&T, &T) -> f64,\n    number_of_representatives: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<MultiCenterRef<'a, T>>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_first_to_ref,\n        pick_tuple_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        MultiCenterRef::new,\n        (number_of_representatives,shrinkage,is_compact),\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\npub fn multi_cluster_as_weighted_ref<'a,T:Sync + ?Sized>(\n    dictionary: &'a [(&T,f32)],\n    distance: fn(&T, &T) -> f64,\n    number_of_representatives: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<MultiCenterRef<'a, T>>> {\n\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        empty_weights,\n        pick_ref_tuple_first,\n        pick_tuple_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        MultiCenterRef::new,\n        (number_of_representatives,shrinkage,is_compact),\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n}\n\npub fn multi_cluster_as_object_with_weight_array<'a,T :Sync>(\n    dictionary: &'a [T],\n    weights: &'a [f32],\n    distance: fn(&T, &T) -> f64,\n    number_of_representatives: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    max_allowed: usize,\n    parallel_enabled: bool,\n) -> Result<Vec<MultiCenterRef<'a, T>>> {\n    general_iterative_clustering(\n        max_allowed,\n        dictionary,\n        weights,\n        pick_to_ref,\n        pick_weight,\n        LENGTH_BOUND,\n        max_allowed as u64,\n        parallel_enabled,\n        MultiCenterRef::new,\n        (number_of_representatives,shrinkage,is_compact),\n        distance,\n        false,\n        true,\n        SEPARATION_RATIO_FOR_MERGE,\n    )\n\n}\n\n#[repr(C)]\n#[derive(Clone)]\npub struct MultiCenter<T : Clone> {\n    representatives: Vec<(T,f32)>,\n    shrinkage: f32,\n    weight: f64,\n    sum_of_radii: f64,\n}\n\nimpl<T : Clone> MultiCenter<T>{\n\n    pub fn create<'a>( refc : &MultiCenterRef<'a,T> ) -> Self {\n        let mut rep_list = Vec::new();\n        for j in refc.representatives() {\n            rep_list.push((j.0.clone(),j.1));\n        }\n        MultiCenter {\n            representatives: rep_list,\n            weight: refc.weight,\n            shrinkage : refc.shrinkage,\n            sum_of_radii: refc.sum_of_radii,\n        }\n    }\n\n    pub fn representatives(& self) -> Vec<(T,f32)>{\n        self.representatives.clone()\n    }\n\n    pub fn representative(& self, number: usize) -> T {\n        self.representatives[number].0.clone()\n    }\n\n    pub fn average_radius(&self) -> f64 {\n        if self.weight == 0.0 {\n            0.0\n        } else {\n            self.sum_of_radii / self.weight\n        }\n    }\n\n    pub fn weight(&self) -> f64 {\n        self.weight\n    }\n\n    pub fn distance_to_point(&self, point: &T, ignore: f32, distance: fn(&T, &T) -> f64) -> Result<(f64,usize)> {\n        let original = ((distance)(point, &self.representatives[0].0), 0);\n        check_argument(original.0>=0.0,\"distances cannot be negative\")?;\n        let mut closest = original;\n        for i in 1..self.representatives.len() {\n            if self.representatives[i].1 > ignore {\n                let t = ((distance)(point, &self.representatives[i].0), i);\n                check_argument(t.0 >= 0.0, \"distances cannot be negative\")?;\n                if closest.0 > t.0 {\n                    closest = t;\n                }\n            }\n        }\n        Ok(((closest.0 * (1.0 - self.shrinkage as f64) + self.shrinkage as f64 * original.0), closest.1))\n    }\n\n    pub fn distance_to_point_and_ref<'a>(&'a self, point: &T, ignore: f32, distance: fn(&T, &T) -> f64) -> Result<(f64,&'a T)> {\n        let original = ((distance)(point, &self.representatives[0].0), 0);\n        check_argument(original.0>=0.0,\"distances cannot be negative\")?;\n        let mut closest = original;\n        for i in 1..self.representatives.len() {\n            if self.representatives[i].1 > ignore {\n                let t = ((distance)(point, &self.representatives[i].0), i);\n                check_argument(t.0 >= 0.0, \"distances cannot be negative\")?;\n                if closest.0 > t.0 {\n                    closest = t;\n                }\n            }\n        }\n        Ok(((closest.0 * (1.0 - self.shrinkage as f64) + self.shrinkage as f64 * original.0), &self.representatives[closest.1].0))\n    }\n}\n\npub fn persist<'a,T:Clone>(list:&Vec<MultiCenterRef<'a,T>>) -> Vec<MultiCenter<T>> {\n    let mut answer = Vec::new();\n    for item in list {\n       answer.push(MultiCenter::create(item));\n    }\n    answer\n}"
  },
  {
    "path": "Rust/src/common/conditionalfieldsummarizer.rs",
    "content": "use std::hash::Hash;\nuse crate::{\n    common::samplesummary::{summarize, SampleSummary},\n    pointstore::PointStore,\n    types::Result\n};\n\nfn project_missing(point: &Vec<f32>, position: &[usize]) -> Vec<f32> {\n    position.iter().map(|i| point[*i]).collect()\n}\n\nconst CONDITIONAL_UPPER_FRACTION : f64 = 0.9;\n\nconst CONDITIONAL_LOWER_FRACTION : f64 = 0.1;\n\n/// the following function is a conduit that summarizes the conditional samples derived from the trees\n/// The samples are denoted by (PointIndex, f32) where the PointIndex(usize) corresponds to the point identifier\n/// in the point store and the f32 associated with a scalar value (corresponding to weight)\n/// the field missing corresponds to the list of missing fields in the space of the full (potentially shingled) points\n/// centrality corresponds to the parameter which was used to derive the samples, and thus provides a mechanism for\n/// refined interpretation in summarization\n/// project corresponds to a boolean flag, determining whether we wish to focus on the missing fields only (project = true)\n/// or we focus on the entire space of (potentially shingled) points (in case of project = false) which have different\n/// and complementary uses.\n/// max_number corresponds to a parameter that controls the summarization -- in the current version this corresponds to\n/// an upper bound on the number of summary points in the SampleSummary construct\n///\n/// Note that the global, mean and median do not perform any weighting/pruning; whereas the summarize() performs on\n/// somewhat denoised data to provide a list of summary. Note further that summarize() is redundant (and skipped)\n/// when max_number = 0\n/// The combination appears to provide the best of all worlds with little performance overhead and can be\n/// used and reconfigured easily. In the fullness of time, it is possible to leverage a dynamic Kernel, since\n/// the entire PointStore is present and the PointStore is dynamic.\n#[repr(C)]\npub struct FieldSummarizer {\n    centrality: f64,\n    project: bool,\n    max_number: usize,\n    distance: fn(&[f32], &[f32]) -> f64,\n}\n\nimpl FieldSummarizer {\n    pub fn new(\n        centrality: f64,\n        project: bool,\n        max_number: usize,\n        distance: fn(&[f32], &[f32]) -> f64,\n    ) -> Self {\n        FieldSummarizer {\n            centrality,\n            project,\n            max_number,\n            distance,\n        }\n    }\n\n    pub fn summarize_list<Label:Copy + Sync + Send, Attributes : Copy + Sync + Hash + Eq + Send>(\n        &self,\n        pointstore: &dyn PointStore<Label,Attributes>,\n        point_list_with_distance: &[(f64, usize, f64)],\n        missing: &[usize]\n    ) -> Result<SampleSummary> {\n        let mut distance_list: Vec<f64> = point_list_with_distance.iter().map(|a| a.2).collect();\n        distance_list.sort_by(|a, b| a.partial_cmp(&b).unwrap());\n        let mut threshold = 0.0;\n        if self.centrality > 0.0 {\n            let mut always_include = 0;\n            while always_include < point_list_with_distance.len() - 1\n                && distance_list[always_include] == 0.0\n            {\n                always_include += 1;\n            }\n            threshold = self.centrality\n                * (distance_list[always_include + (distance_list.len() - always_include) / 3]\n                    + distance_list[always_include + (distance_list.len() - always_include) / 2])\n                    as f64;\n        }\n        threshold +=\n            (1.0 - self.centrality) * distance_list[point_list_with_distance.len() - 1] as f64;\n\n        let total_weight = point_list_with_distance.len() as f64;\n        let dimensions = if !self.project || missing.len() == 0 {\n            pointstore.copy(point_list_with_distance[0].1)?.len()\n        } else {\n            missing.len()\n        };\n        let mut mean = vec![0.0f32; dimensions];\n        let mut deviation = vec![0.0f32; dimensions];\n        let mut sum_values_sq = vec![0.0f64; dimensions];\n        let mut sum_values = vec![0.0f64; dimensions];\n        let mut vec = Vec::new();\n        for i in 0..point_list_with_distance.len() {\n            let point = if !self.project || missing.len() == 0 {\n                pointstore.copy(point_list_with_distance[i].1)?\n            } else {\n                project_missing(\n                    &pointstore.copy(point_list_with_distance[i].1)?,\n                    &missing,\n                )\n            };\n            for j in 0..dimensions {\n                sum_values[j] += point[j] as f64;\n                sum_values_sq[j] += point[j] as f64 * point[j] as f64;\n            }\n            // the else can be filtered further\n            let weight: f32 = if point_list_with_distance[i].2 <= threshold {\n                1.0\n            } else {\n                (threshold / point_list_with_distance[i].2) as f32\n            };\n\n            vec.push((point, weight));\n        }\n\n        for j in 0..dimensions {\n            mean[j] = (sum_values[j] / total_weight as f64) as f32;\n            let t: f64 = sum_values_sq[j] / total_weight as f64\n                - sum_values[j] * sum_values[j] / (total_weight as f64 * total_weight as f64);\n            deviation[j] = f64::sqrt(if t > 0.0 { t } else { 0.0 }) as f32;\n        }\n        let vec_weight : f64 = vec.iter().map(|x| x.1 as f64).sum();\n        let num = vec_weight/2.0;\n        let lower_fraction = vec_weight *CONDITIONAL_LOWER_FRACTION;\n        let upper_fraction = vec_weight*CONDITIONAL_UPPER_FRACTION;\n        let mut median = vec![0.0f32; dimensions];\n        let mut upper = vec![0.0f32; dimensions];\n        let mut lower = vec![0.0f32; dimensions];\n\n        for j in 0..dimensions {\n            let mut y: Vec<(f32,f32)> = vec.iter().map(|x| (x.0[j],x.1)).collect();\n            y.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n            let first = SampleSummary::pick(&y,lower_fraction,0,0.0);\n            lower[j] = y[first.0].0;\n            let second = SampleSummary::pick(&y,num,first.0,first.1);\n            median[j] = y[second.0].0;\n            let third = SampleSummary::pick(&y,upper_fraction,second.0,second.1);\n            upper[j] = y[third.0].0;\n        }\n\n        let summary = summarize(&vec, self.distance, self.max_number, false).unwrap();\n        Ok(SampleSummary {\n            summary_points: summary.summary_points.clone(),\n            relative_weight: summary.relative_weight.clone(),\n            total_weight: summary.total_weight,\n            mean,\n            median,\n            upper,\n            lower,\n            deviation,\n        })\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/descriptor.rs",
    "content": "use crate::common::divector::DiVector;\nuse crate::common::rangevector::RangeVector;\nuse crate::trcf::types::{CorrectionMode, ImputationMethod, ScoringStrategy, TransformMethod};\nuse crate::trcf::types::ImputationMethod::USE_RCF;\nuse crate::trcf::types::ScoringStrategy::EXPECTED_INVERSE_HEIGHT;\n\n/**\n * This class maintains a simple discounted statistics. Setters are avoided\n * except for discount rate which is useful as initialization from raw scores\n */\n#[repr(C)]\n#[derive(Clone)]\npub struct Descriptor {\n    pub id: u64,\n    pub current_input: Vec<f32>,\n    pub current_timestamp: u64,\n    pub missing_values: Option<Vec<usize>>,\n    pub rcf_point: Option<Vec<f32>>,\n    pub score: f32,\n    pub correction_mode: CorrectionMode,\n    pub values_seen: usize,\n    pub transform_method : TransformMethod,\n    pub threshold: f32,\n    pub anomaly_grade: f32,\n    pub data_confidence: f32,\n    pub attribution: Option<DiVector>,\n    pub relative_index : i32,\n    pub scale : Option<Vec<f32>>,\n    pub shift : Option<Vec<f32>>,\n    pub difference_deviations: Option<Vec<f32>>,\n    pub deviations_post : Option<Vec<f32>>,\n    pub time_augmented : bool,\n    pub expected_rcf_point: Option<Vec<f32>>,\n    pub last_anomaly : Option<AnomalyInformation>,\n    pub forecast : Option<RangeVector<f32>>,\n    pub error_information : Option<ErrorInformation>,\n    pub scoring_strategy : ScoringStrategy,\n    pub imputation_method : ImputationMethod,\n}\n\n#[repr(C)]\n#[derive(Clone)]\npub struct AnomalyInformation {\n    // we do not explicitly provide a default so that each of these entires are\n    // considered carefully before declaring an anomaly\n    pub expected_rcf_point: Vec<f32>,\n    pub anomalous_rcf_point: Vec<f32>,\n    pub relative_index: i32,\n    pub values_seen: usize,\n    pub attribution: Option<DiVector>,\n    pub score: f32,\n    pub grade: f32,\n    pub expected_timestamp: u64,\n    pub relevant_attribution: Option<Vec<f32>>,\n    pub time_attribution: f32,\n    pub past_values: Vec<f32>,\n    pub past_timestamp: u64,\n    pub expected_values_list: Vec<Vec<f32>>,\n    pub likelihood_of_values: Vec<f32>\n}\n\n#[repr(C)]\n#[derive(Clone)]\npub struct ErrorInformation {\n    pub interval_precision: Vec<f32>,\n    pub error_distribution : RangeVector<f32>,\n    pub error_rmse : DiVector,\n    pub error_mean : Vec<f32>\n}\n\nimpl Default for Descriptor {\n    fn default() -> Self {\n        Descriptor{\n            id: 0,\n            current_input: vec![],\n            current_timestamp: 0,\n            missing_values: None,\n            rcf_point: None,\n            score: 0.0,\n            correction_mode: CorrectionMode::NONE,\n            values_seen: 0,\n            transform_method: TransformMethod::NONE,\n            threshold: 0.0,\n            anomaly_grade: 0.0,\n            data_confidence: 0.0,\n            attribution: None,\n            relative_index: 0,\n            scale: None,\n            shift: None,\n            difference_deviations: None,\n            deviations_post: None,\n            time_augmented: false,\n            expected_rcf_point: None,\n            last_anomaly: None,\n            forecast: None,\n            error_information: None,\n            scoring_strategy: EXPECTED_INVERSE_HEIGHT,\n            imputation_method: USE_RCF\n        }\n    }\n}\n\nimpl Descriptor {\n    pub fn new(id: u64, point: &[f32], current_timestamp: u64,time_augmented: bool, missing_values: Option<Vec<usize>>) -> Self {\n        if missing_values.as_ref().is_some(){\n            for i in missing_values.as_ref().unwrap() {\n                assert!( *i < point.len(), \"incorrect input\")\n            }\n        }\n        Descriptor { id, current_input: Vec::from(point), current_timestamp, time_augmented, missing_values, ..Default::default()}\n    }\n}\n\n"
  },
  {
    "path": "Rust/src/common/deviation.rs",
    "content": "use crate::util::check_argument;\nuse crate::types::Result;\n\n/**\n * This class maintains a simple discounted statistics. Setters are avoided\n * except for discount rate which is useful as initialization from raw scores\n */\n#[repr(C)]\n#[derive(Clone)]\npub struct Deviation {\n    pub discount: f64,\n    pub weight: f64,\n    pub sum_squared:f64,\n    pub sum: f64,\n    pub count: i32\n}\n\nimpl Deviation {\n    pub fn new(discount: f64) -> Result<Self> {\n        check_argument(discount>=0.0 && discount < 1.0, \"incorrect discount value\")?;\n        Ok(Deviation {\n            discount,\n            weight: 0.0,\n            sum:0.0,\n            sum_squared:0.0,\n            count:0\n        })\n    }\n\n    pub fn default() -> Self {\n        Deviation {\n            discount: 0.0,\n            weight: 0.0,\n            sum:0.0,\n            sum_squared:0.0,\n            count:0\n        }\n    }\n\n    pub fn create(discount:f64,weight:f64,sum:f64,sum_squared:f64,count:i32) -> Self{\n        Deviation{\n            discount,\n            weight,\n            sum,\n            sum_squared,\n            count\n        }\n    }\n\n    pub fn reset(&mut self) {\n        self.weight = 0.0;\n        self.count = 0;\n        self.sum = 0.0;\n        self.sum_squared = 0.0;\n    }\n\n    pub fn mean(&self) -> f64 {\n        if self.is_empty() {\n            0.0\n        } else {\n            self.sum / self.weight\n        }\n    }\n\n    pub fn update(&mut self, score: f64) {\n        let factor = if self.discount == 0.0 {1.0} else {\n            let a = 1.0 - self.discount;\n            let b= 1.0 - 1.0 / (self.count + 2) as f64;\n            if a<b {a} else {b}\n        };\n        self.sum = self.sum * factor + score;\n        self.sum_squared = self.sum_squared * factor + score * score;\n        self.weight = self.weight * factor + 1.0;\n        self.count += 1;\n    }\n\n    pub fn deviation(&self) -> f64{\n        if self.is_empty() {\n            return 0.0;\n        }\n        let temp = self.sum / self.weight;\n        let answer = self.sum_squared / self.weight - temp * temp;\n        if answer > 0.0 {\n            f64::sqrt(answer)\n        } else {\n            0.0\n        }\n    }\n\n    pub fn is_empty(&self) -> bool{\n        self.weight <= 0.0\n    }\n\n    pub fn discount(&self) -> f64 {\n        self.discount\n    }\n\n    pub fn set_discount(&mut self, discount:f64) {\n        self.discount = discount;\n    }\n\n    pub fn sum(&self) -> f64 {\n        self.sum\n    }\n\n    pub fn sum_squared(&self) -> f64{\n        self.sum_squared\n    }\n\n    pub fn weight(&self) -> f64{\n        self.weight\n    }\n\n    pub fn count(&self) -> i32{\n        self.count\n    }\n\n    pub fn set_count(&mut self,count:i32) {\n        self.count = count;\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/directionaldensity.rs",
    "content": "use crate::{common::divector::DiVector, samplerplustree::boundingbox::BoundingBox};\nuse crate::types::Result;\nuse crate::util::check_argument;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct InterpolationMeasure {\n    pub measure: DiVector,\n    pub distance: DiVector,\n    pub probability_mass: DiVector,\n    pub sample_size: f32,\n}\n\nimpl InterpolationMeasure {\n    pub fn empty(dimension: usize, sample_size: f32) -> Self {\n        InterpolationMeasure {\n            measure: DiVector::empty(dimension),\n            distance: DiVector::empty(dimension),\n            probability_mass: DiVector::empty(dimension),\n            sample_size,\n        }\n    }\n\n    pub fn new(\n        measure: DiVector,\n        distance: DiVector,\n        prob_mass: DiVector,\n        sample_size: f32,\n    ) -> Result<Self> {\n        check_argument(\n            measure.dimensions() == distance.dimensions(),\n            \" incorrect lengths\"\n        )?;\n        check_argument(\n            measure.dimensions() == prob_mass.dimensions(),\n            \" incorrect lengths\"\n        )?;\n        Ok(InterpolationMeasure {\n            measure: measure,\n            distance: distance,\n            probability_mass: prob_mass,\n            sample_size,\n        })\n    }\n\n    pub fn add_to(&self, other: &mut InterpolationMeasure) {\n        self.probability_mass.add_to(&mut other.probability_mass);\n        self.distance.add_to(&mut other.distance);\n        self.measure.add_to(&mut other.measure);\n        other.sample_size += self.sample_size;\n    }\n\n    pub fn divide(&mut self, num: usize) {\n        self.scale(1.0 / num as f64);\n        self.scale_samples(1.0 / num as f64);\n    }\n\n    pub fn scale(&mut self, factor: f64) {\n        self.distance.scale(factor);\n        self.probability_mass.scale(factor);\n        self.measure.scale(factor);\n    }\n\n    pub fn scale_samples(&mut self, factor: f64) {\n        self.sample_size = (self.sample_size as f64 * factor) as f32;\n    }\n\n    pub fn update(&mut self, point: &[f32], bounding_box: &BoundingBox, measure: f64) -> f64 {\n        let min_values = bounding_box.get_min_values();\n        let max_values = bounding_box.get_max_values();\n        let minsum: f32 = min_values\n            .iter()\n            .zip(point)\n            .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let maxsum: f32 = point\n            .iter()\n            .zip(max_values)\n            .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let sum = maxsum + minsum;\n        let new_range = sum as f64 + bounding_box.get_range_sum();\n        let prob = sum as f64 / (new_range);\n        if prob > 0.0 {\n            self.scale(1.0 - prob);\n            for i in 0..point.len() {\n                if point[i] > max_values[i] {\n                    let t = (point[i] - max_values[i]) as f64 / new_range;\n                    self.distance.high[i] += t * (point[i] - min_values[i]) as f64;\n                    self.probability_mass.high[i] += t;\n                    self.measure.high[i] += measure * t;\n                } else if point[i] < min_values[i] {\n                    let t = (min_values[i] - point[i]) as f64 / new_range;\n                    self.distance.low[i] += t * (max_values[i] - point[i]) as f64;\n                    self.probability_mass.low[i] += t;\n                    self.measure.low[i] += measure * t;\n                }\n            }\n        }\n        prob\n    }\n\n    pub fn directional_measure(&self, threshold: f64, manifold_dimension: f64) -> Result<DiVector> {\n        check_argument(\n            self.sample_size >= 0.0 && self.measure.total() >= 0.0,\n            \" cannot have negative samples or measure\"\n        )?;\n        if self.sample_size == 0.0f32 || self.measure.total() == 0.0 {\n            return Ok(DiVector::empty(self.measure.dimensions()));\n        }\n\n        let mut sum_of_factors = 0.0;\n\n        for i in 0..self.measure.dimensions() {\n            let mut t = if self.probability_mass.high_low_sum(i) > 0.0 {\n                self.distance.high_low_sum(i) / self.probability_mass.high_low_sum(i)\n            } else {\n                0.0\n            };\n\n            if t > 0.0 {\n                t = f64::exp(f64::ln(t) * manifold_dimension)\n                    * self.probability_mass.high_low_sum(i);\n            }\n            sum_of_factors += t;\n        }\n\n        let density_factor = 1.0 / (threshold + sum_of_factors);\n        let mut answer = self.measure.clone();\n        answer.scale(density_factor);\n        Ok(answer)\n    }\n\n    pub fn directional_density(&self) -> Result<DiVector> {\n        self.directional_measure(1e-3, self.measure.dimensions() as f64)\n    }\n\n    pub fn density(&self) -> Result<f64> {\n        Ok(self.directional_density()?.total())\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/divector.rs",
    "content": "use crate::samplerplustree::boundingbox::BoundingBox;\nuse crate::util::check_argument;\nuse crate::types::Result;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct DiVector {\n    pub high: Vec<f64>,\n    pub low: Vec<f64>,\n}\n\nimpl DiVector {\n    pub fn empty(dimension: usize) -> Self {\n        DiVector {\n            high: vec![0.0; dimension],\n            low: vec![0.0; dimension],\n        }\n    }\n\n    pub fn new(high: &[f64], low: &[f64]) -> Result<Self> {\n        check_argument(high.len() == low.len(), \" incorrect lengths\")?;\n        Ok(DiVector {\n            high: Vec::from(high),\n            low: Vec::from(low),\n        })\n    }\n\n    pub fn assign_as_probability_of_cut(&mut self, bounding_box: &BoundingBox, point: &[f32]) {\n        let minsum: f64 = self\n            .low\n            .iter_mut()\n            .zip(bounding_box.get_min_values())\n            .zip(point)\n            .map(|((x, &y), &z)| {\n                if y - z > 0.0 {\n                    *x = (y - z) as f64;\n                    *x\n                } else {\n                    *x = 0.0;\n                    *x\n                }\n            })\n            .sum();\n        let maxsum: f64 = self\n            .high\n            .iter_mut()\n            .zip(point)\n            .zip(bounding_box.get_max_values())\n            .map(|((x, &y), &z)| {\n                if y - z > 0.0 {\n                    *x = (y - z) as f64;\n                    *x\n                } else {\n                    *x = 0.0;\n                    *x\n                }\n            })\n            .sum();\n\n        let sum = minsum + maxsum;\n        if sum != 0.0 {\n            self.scale(1.0 / (bounding_box.get_range_sum() + sum));\n        }\n    }\n\n    pub fn assign_as_probability_of_cut_with_missing_coordinates(\n        &mut self,\n        bounding_box: &BoundingBox,\n        point: &[f32],\n        missing_coordinates: &[bool],\n    ) {\n        let minsum: f64 = self\n            .low\n            .iter_mut()\n            .zip(bounding_box.get_min_values())\n            .zip(point)\n            .zip(missing_coordinates)\n            .map(|(((x, &y), &z), &b)| {\n                if !b && y - z > 0.0 {\n                    *x = (y - z) as f64;\n                    *x\n                } else {\n                    *x = 0.0;\n                    *x\n                }\n            })\n            .sum();\n        let maxsum: f64 = self\n            .high\n            .iter_mut()\n            .zip(point)\n            .zip(bounding_box.get_max_values())\n            .zip(missing_coordinates)\n            .map(|(((x, &y), &z), &b)| {\n                if !b && y - z > 0.0 {\n                    *x = (y - z) as f64;\n                    *x\n                } else {\n                    *x = 0.0;\n                    *x\n                }\n            })\n            .sum();\n\n        let sum = minsum + maxsum;\n        if sum != 0.0 {\n            self.scale(1.0 / (bounding_box.get_range_sum() + sum));\n        }\n    }\n\n    pub fn assign(&mut self, other: &DiVector) {\n        for (x, &y) in self.high.iter_mut().zip(&other.high) {\n            *x = y;\n        }\n        for (x, &y) in self.low.iter_mut().zip(&other.low) {\n            *x = y;\n        }\n    }\n\n    pub fn add_from(&mut self, other: &DiVector, factor: f64) {\n        other.add_to_scaled(self, factor);\n    }\n\n    pub fn add_to(&self, other: &mut DiVector) {\n        for (x, &y) in other.high.iter_mut().zip(&self.high) {\n            *x += y;\n        }\n        for (x, &y) in other.low.iter_mut().zip(&self.low) {\n            *x += y;\n        }\n    }\n\n    pub fn add_to_scaled(&self, other: &mut DiVector, factor: f64) {\n        for (x, &y) in other.high.iter_mut().zip(&self.high) {\n            *x += y * factor;\n        }\n        for (x, &y) in other.low.iter_mut().zip(&self.low) {\n            *x += y * factor;\n        }\n    }\n\n    pub fn divide(&mut self, num: usize) {\n        self.scale(1.0 / num as f64)\n    }\n\n    pub fn scale(&mut self, factor: f64) {\n        for x in self.high.iter_mut() {\n            *x *= factor;\n        }\n        for x in self.low.iter_mut() {\n            *x *= factor;\n        }\n    }\n\n    pub fn total(&self) -> f64 {\n        self.high.iter().sum::<f64>() + self.low.iter().sum::<f64>()\n    }\n\n    pub fn normalize(&mut self, value: f64) {\n        let current = self.total();\n        if current <= 0.0 {\n            let v = value / (2.0 * self.high.len() as f64);\n            for x in self.high.iter_mut() {\n                *x = v;\n            }\n            for x in self.low.iter_mut() {\n                *x = v;\n            }\n        } else {\n            self.scale(value / current);\n        }\n    }\n\n    pub fn dimensions(&self) -> usize {\n        self.high.len()\n    }\n\n    pub fn high_low_sum(&self, index: usize) -> f64 {\n        self.high[index] + self.low[index]\n    }\n\n    pub fn max_contribution(&self, base_dimension: usize) -> Result<usize> {\n        self.max_gap_contribution(base_dimension,self.dimensions())\n    }\n\n    pub fn max_gap_contribution(&self, base_dimension: usize, gap: usize) -> Result<usize> {\n        check_argument(gap>0, \"incorrect input\")?;\n        check_argument(base_dimension>0, \"incorrect input\")?;\n        check_argument(self.dimensions()%base_dimension == 0, \"incorrect input\")?;\n        let mut val = 0.0;\n        let mut index = if  gap * base_dimension > self.dimensions() {0} else { self.dimensions()/base_dimension - gap};\n        for i in 0..base_dimension {\n            val += self.high_low_sum(index*base_dimension + i);\n        }\n        for j in (index+1)..(self.dimensions() / base_dimension) {\n            let mut sum = 0.0;\n            for i in 0..base_dimension {\n                sum += self.high_low_sum(j * base_dimension + i);\n            }\n            if sum > val {\n                val = sum;\n                index = j;\n            }\n        }\n        Ok(index)\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/intervalstoremanager.rs",
    "content": "use std::fmt::Debug;\nuse crate::types::Result;\nuse crate::util::check_argument;\n\n#[repr(C)]\npub struct IntervalStoreManager<T> {\n    capacity: usize,\n    last_in_use: usize,\n    free_indices_start: Vec<T>,\n    free_indices_end: Vec<T>,\n}\n\nimpl<T: Copy + std::convert::TryFrom<usize>> IntervalStoreManager<T>\nwhere\n    T: std::fmt::Display + std::cmp::PartialEq,\n    usize: From<T>,\n{\n    pub fn new(size: usize) -> Self\n    where\n        <T as TryFrom<usize>>::Error: Debug,\n    {\n        IntervalStoreManager {\n            capacity: size,\n            last_in_use: 1,\n            free_indices_start: vec![0.try_into().unwrap()],\n            free_indices_end: vec![(size - 1).try_into().unwrap()],\n        }\n    }\n\n    pub fn get_capacity(&self) -> usize {\n        self.capacity\n    }\n\n    pub fn change_capacity(&mut self, new_capacity: usize)\n    where\n        <T as TryFrom<usize>>::Error: Debug,\n    {\n        if new_capacity > self.capacity {\n            let start: T = self.capacity.try_into().unwrap();\n            let end: T = (new_capacity - 1).try_into().unwrap();\n            if self.free_indices_start.len() == self.last_in_use {\n                self.free_indices_start.resize(self.last_in_use + 1, start);\n                self.free_indices_end.resize(self.last_in_use + 1, end);\n            } else {\n                self.free_indices_start[self.last_in_use] = start;\n                self.free_indices_end[self.last_in_use] = end;\n            }\n            self.last_in_use += 1;\n            self.capacity = new_capacity;\n        }\n    }\n\n    pub fn is_empty(&self) -> bool {\n        self.last_in_use == 0\n    }\n\n    pub fn get(&mut self) -> Result<usize>\n    where\n        <T as TryFrom<usize>>::Error: Debug,\n    {\n        check_argument(!self.is_empty(),\"no more indices left in interval manager\")?;\n        let answer = self.free_indices_start[self.last_in_use - 1];\n        let new_value: usize = answer.into();\n        if answer == self.free_indices_end[self.last_in_use - 1] {\n            self.last_in_use -= 1;\n        } else {\n            self.free_indices_start[self.last_in_use - 1] = (new_value + 1).try_into().unwrap();\n        }\n        Ok(new_value)\n    }\n\n    pub fn release(&mut self, index: usize) -> Result<()>\n    where\n        <T as TryFrom<usize>>::Error: Debug,\n    {\n        let val: T = TryFrom::try_from(index).unwrap();\n        if self.last_in_use != 0 {\n            let start: usize = self.free_indices_start[self.last_in_use - 1].into();\n            let end: usize = self.free_indices_end[self.last_in_use - 1].into();\n            if start == index + 1 {\n                self.free_indices_start[self.last_in_use - 1] = val;\n                return Ok(());\n            } else if end + 1 == index {\n                self.free_indices_end[self.last_in_use - 1] = val;\n                return Ok(());\n            }\n        }\n        if self.last_in_use < self.free_indices_start.len() {\n            self.free_indices_start[self.last_in_use] = val;\n            self.free_indices_end[self.last_in_use] = val;\n        } else {\n            self.free_indices_start.resize(self.last_in_use + 1, val);\n            self.free_indices_end.resize(self.last_in_use + 1, val);\n        }\n        self.last_in_use += 1;\n        Ok(())\n    }\n\n    pub fn used(&self) -> usize {\n        let mut answer = 0;\n        for i in 0..self.last_in_use {\n            let start: usize = self.free_indices_start[i].into();\n            let end: usize = self.free_indices_end[i].into();\n            answer += end - start + 1;\n        }\n        self.capacity - answer\n    }\n\n    pub fn get_size(&self) -> usize {\n        self.free_indices_start.len() * 2 * std::mem::size_of::<T>()\n            + std::mem::size_of::<IntervalStoreManager<T>>()\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/mod.rs",
    "content": "pub mod cluster;\npub mod conditionalfieldsummarizer;\npub mod directionaldensity;\npub mod divector;\npub mod intervalstoremanager;\npub mod multidimdatawithkey;\npub mod samplesummary;\npub mod deviation;\npub mod rangevector;\npub mod descriptor;\n"
  },
  {
    "path": "Rust/src/common/multidimdatawithkey.rs",
    "content": "extern crate rand;\n\nextern crate rand_chacha;\nuse std::f32::consts::PI;\n\nuse rand::SeedableRng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\n\nuse crate::rand::Rng;\nuse crate::util::check_argument;\nuse crate::types::Result;\n\npub struct MultiDimDataWithKey {\n    pub data: Vec<Vec<f32>>,\n    pub change_indices: Vec<usize>,\n    pub labels: Vec<usize>,\n    pub changes: Vec<Vec<f32>>,\n}\n\nimpl MultiDimDataWithKey {\n    pub fn multi_cosine(\n        num: usize,\n        period: &[usize],\n        amplitude: &[f32],\n        noise: f32,\n        seed: u64,\n        base_dimension: usize,\n    ) -> Result<Self> {\n        check_argument(\n            period.len() == base_dimension,\n            \" need a period for each dimension \"\n        )?;\n        check_argument(\n            amplitude.len() == base_dimension,\n            \" need an amplitude for each dimension\"\n        )?;\n        let mut rng = ChaCha20Rng::seed_from_u64(seed);\n        let mut noiserng = ChaCha20Rng::seed_from_u64(seed + 1);\n        let mut phase: Vec<usize> = Vec::new();\n\n        for i in 0..base_dimension {\n            phase.push(rng.next_u64() as usize % period[i]);\n        }\n\n        let mut data: Vec<Vec<f32>> = Vec::new();\n        let mut change_indices: Vec<usize> = Vec::new();\n        let mut changes: Vec<Vec<f32>> = Vec::new();\n\n        for i in 0..num {\n            let mut elem = vec![0.0; base_dimension];\n            let flag = noiserng.gen::<f32>() < 0.01;\n            let mut new_change = vec![0.0; base_dimension];\n            let mut used: bool = false;\n            for j in 0..base_dimension {\n                elem[j] = amplitude[j]\n                    * (2.0 * PI * (i + phase[j]) as f32 / period[j] as f32).cos()\n                    + noise * noiserng.gen::<f32>();\n                if flag && noiserng.gen::<f64>() < 0.3 {\n                    let factor: f32 = 5.0 * (1.0 + noiserng.gen::<f32>());\n                    let mut change: f32 = factor * noise;\n                    if noiserng.gen::<f32>() < 0.5 {\n                        change = -change;\n                    }\n                    elem[j] += change;\n                    new_change[j] = change;\n                    used = true;\n                }\n            }\n            data.push(elem);\n            if used {\n                change_indices.push(i);\n                changes.push(new_change);\n            }\n        }\n        Ok(MultiDimDataWithKey {\n            data,\n            change_indices,\n            labels: Vec::new(),\n            changes,\n        })\n    }\n\n    pub fn mixture(\n        num: usize,\n        mean: &[Vec<f32>],\n        scale: &[Vec<f32>],\n        weight: &[f32],\n        seed: u64,\n    ) -> Result<Self> {\n        let mut rng = ChaCha20Rng::seed_from_u64(seed);\n        check_argument(num > 0, \" number of elements cannot be 0\")?;\n        check_argument(mean.len() > 0, \" cannot be null\")?;\n        let base_dimension = mean[0].len();\n        check_argument(\n            mean.len() == scale.len(),\n            \" need scales and means to be 1-1\"\n        )?;\n        check_argument(\n            weight.len() == mean.len(),\n            \" need weights and means to be 1-1\"\n        )?;\n        for i in 0..mean.len() {\n            check_argument(\n                mean[i].len() == base_dimension,\n                \" must have the same dimensions\"\n            )?;\n            check_argument(\n                scale[i].len() == base_dimension,\n                \"sclaes must have the same dimension as the mean\"\n            )?;\n            check_argument(weight[i] >= 0.0, \" weights cannot be negative\")?;\n        }\n        let sum: f32 = weight.iter().sum();\n\n        let mut data = Vec::new();\n        let mut labels = Vec::new();\n        for _j in 0..num {\n            let mut i = 0;\n            let mut wt: f32 = sum * rng.gen::<f32>();\n            while wt > weight[i] {\n                wt -= weight[i];\n                i += 1;\n            }\n            data.push(new_vec(&mean[i], &scale[i], &mut rng));\n            labels.push(i);\n        }\n\n        Ok(MultiDimDataWithKey {\n            data,\n            labels,\n            change_indices: vec![],\n            changes: vec![],\n        })\n    }\n}\n\nfn next_element(mean: f32, scale: f32, rng: &mut ChaCha20Rng) -> f32 {\n    let mut r: f32 = f64::sqrt(-2.0f64 * f64::ln(rng.gen::<f64>())) as f32;\n    // the following is to discard inf being returned from ln()\n    while r.is_infinite() {\n        r = f64::sqrt(-2.0f64 * f64::ln(rng.gen::<f64>())) as f32;\n    }\n\n    let switch: f32 = rng.gen();\n    if 0.5 < switch {\n        mean + scale * r * f32::cos(2.0 * PI * rng.gen::<f32>())\n    } else {\n        mean + scale * r * f32::sin(2.0 * PI * rng.gen::<f32>())\n    }\n}\n\npub fn new_vec(mean: &[f32], scale: &[f32], rng: &mut ChaCha20Rng) -> Vec<f32> {\n    let dimensions = mean.len();\n    let mut answer = Vec::new();\n    for i in 0..dimensions {\n        answer.push(next_element(mean[i], scale[i], rng));\n    }\n    answer\n}\n"
  },
  {
    "path": "Rust/src/common/rangevector.rs",
    "content": "use crate::util::check_argument;\nuse crate::types::Result;\n\n/**\n * A RangeVector is used when we want to track a quantity and its upper and\n * lower bounds\n */\n#[repr(C)]\n#[derive(Clone)]\npub struct RangeVector<T> {\n    pub values: Vec<T>,\n    pub upper: Vec<T>,\n    pub lower: Vec<T>\n}\n\nimpl RangeVector<f32> {\n    pub fn new(dimensions: usize) -> Self {\n        RangeVector {\n            values: vec![0.0; dimensions],\n            upper: vec![0.0; dimensions],\n            lower: vec![0.0; dimensions]\n        }\n    }\n}\n\nimpl RangeVector<f64> {\n    pub fn new(dimensions: usize) -> Self {\n        RangeVector {\n            values: vec![0.0; dimensions],\n            upper: vec![0.0; dimensions],\n            lower: vec![0.0; dimensions]\n        }\n    }\n}\n\n\nimpl<T: PartialOrd + Clone + Copy  + std::ops::AddAssign + std::ops::MulAssign> RangeVector<T> {\n    pub fn from(values : Vec<T>) -> Self {\n        RangeVector{\n            values : values.clone(),\n            upper : values.clone(),\n            lower : values.clone()\n        }\n    }\n\n    pub fn create(values: &[T], upper: &[T], lower:&[T]) -> Result<Self> {\n        check_argument(values.len() == upper.len() && upper.len() == lower.len(), \" incorrect lengths\")?;\n        for i in 0..values.len() {\n            check_argument(values[i] <= upper[i], \" incorrect upper bound\")?;\n            check_argument(lower[i] <= values [i], \"incorrect lower bounds\")?;\n        }\n        Ok(RangeVector{\n            values :Vec::from(values),\n            upper : Vec::from(upper),\n            lower : Vec::from(lower)\n        })\n    }\n\n    pub fn shift(&mut self, i:usize, shift: T) {\n        self.values[i] += shift;\n        self.upper[i] += shift;\n        self.lower[i] += shift;\n        // managing precision explicitly\n        if self.upper[i] < self.values[i] {\n            self.upper[i] = self.values[i];\n        }\n        if self.lower[i] > self.values[i] {\n            self.lower[i] = self.values[i];\n        }\n    }\n\n    pub fn cascaded_add(&mut self, base: &[T]) -> Result<()>{\n        check_argument(base.len() >0 , \"must be of positive length\")?;\n        let horizon = self.values.len()/base.len();\n        check_argument(horizon * base.len() == self.values.len(), \" incorrect function call\")?;\n        for j in 0..base.len() {\n            self.shift(j,base[j]);\n        }\n        for i in 1..horizon {\n            for j in 0..base.len() {\n                self.shift(i * base.len() + j, self.values[(i-1)*base.len() + j]);\n            }\n        }\n        Ok(())\n    }\n\n    pub fn scale(&mut self, i:usize, scale: T) {\n        self.values[i] *= scale;\n        self.upper[i] *= scale;\n        self.lower[i] *= scale;\n        // managing precision explicitly\n        if self.upper[i] < self.values[i] {\n            self.upper[i] = self.values[i];\n        }\n        if self.lower[i] > self.values[i] {\n            self.lower[i] = self.values[i];\n        }\n    }\n}\n"
  },
  {
    "path": "Rust/src/common/samplesummary.rs",
    "content": "use std::cmp::min;\nuse std::ops::Index;\nuse crate::types::Result;\nuse crate::util::check_argument;\nuse rayon::range;\n\nuse crate::common::cluster::{Center, multi_cluster_as_weighted_obj, multi_cluster_as_weighted_ref, single_centroid_cluster_weighted_vec_with_distance_over_slices};\n\n///\n/// The goal of the summarization below is as follows: on being provided a collection of sampled weighted points\n/// represented by a slice &[(Vec<f32>,f32)] where each of the Vec<f32> has the same length/dimension\n/// and the f32 in the pair is the corresponding weight.\n/// The algorithm uses the philosophy of RCFs, in repeatedly using randomization. It proceeds as follows:\n/// 1. It uses an initial sampling which serves as a basis of efficiency as well as denoising, borrowing from\n/// https://en.wikipedia.org/wiki/CURE_algorithm, in that algorithm's robustness to outliers.\n/// 2. It uses a sampling mechanism to initialize some clusters based on https://en.wikipedia.org/wiki/Data_stream_clustering\n/// where the radom sampling achieves half of the the same effects as hierarchical compression.\n///3.  It repeatedly merges the most overlapping clusters, failing that, eliminates the least weighted cluster to achieve\n/// the same effect as hieararchical compression.\n///\n/// The algorithm takes a distance function as an input, and tends to produce spherical (measured in the input\n/// distance function) clusters. These types of algorithms are unlikely to be useful for large number of output clusters.\n/// The output is the SampleSummary, which provides basic statistics of mean, median and deviation\n/// in addition it performs a grouping/clustering, assuming that the maximum number of clusters are not large\n/// the routine below bounds the number to be max_number_per_dimension times the dimension of Vec<f32>\n/// and a smaller number can also be provided in the summarize() function\n///\n///\n\nconst MAX_NUMBER_PER_DIMENSION: usize = 5;\n\nconst PHASE2_THRESHOLD: usize = 2;\n\nconst LENGTH_BOUND: usize = 1000;\n\nconst UPPER_FRACTION : f64 = 0.9;\n\nconst LOWER_FRACTION : f64 = 0.1;\n\n#[repr(C)]\npub struct SampleSummary {\n    pub summary_points: Vec<Vec<f32>>,\n\n    // a measure of comparison among the typical points;\n    pub relative_weight: Vec<f32>,\n\n    // number of samples, often the number of summary, but can handle weighted points\n    // (possibly indicating confidence or othe measure) in the future\n    pub total_weight: f32,\n\n    // the global mean, median\n    pub mean: Vec<f32>,\n    pub median: Vec<f32>,\n\n    // percentiles and bounds\n    pub upper: Vec<f32>,\n    pub lower : Vec<f32>,\n\n    // This is the global deviation,\n    pub deviation: Vec<f32>,\n}\n\nimpl SampleSummary {\n    pub fn new(\n        total_weight: f32,\n        summary_points: Vec<Vec<f32>>,\n        relative_weight: Vec<f32>,\n        median: Vec<f32>,\n        mean: Vec<f32>,\n        upper: Vec<f32>,\n        lower: Vec<f32>,\n        deviation: Vec<f32>,\n    ) -> Self {\n        SampleSummary {\n            total_weight,\n            summary_points: summary_points.clone(),\n            relative_weight: relative_weight.clone(),\n            median: median.clone(),\n            mean: mean.clone(),\n            upper: upper.clone(),\n            lower: lower.clone(),\n            deviation: deviation.clone(),\n        }\n    }\n\n    pub fn add_typical(&mut self, summary_points: Vec<Vec<f32>>, relative_weight: Vec<f32>) {\n        self.summary_points = summary_points.clone();\n        self.relative_weight = relative_weight.clone();\n    }\n\n    pub fn pick(weighted_points : &[(f32,f32)], weight: f64, start: usize, initial_weight : f64) -> (usize,f64) {\n        let mut running = initial_weight;\n        let mut index = start;\n        while index + 1 < weighted_points.len() && weighted_points[index].1 as f64 + running < weight {\n            running += weighted_points[index].1 as f64;\n            index += 1;\n        }\n        (index, running)\n    }\n\n\n    pub fn from_points<Q>(dimensions: usize,points: &[(Q, f32)], lower_fraction: f64, upper_fraction:f64) -> Result<Self>\n        where Q: Index<usize, Output = f32>\n    {\n        check_argument(points.len() > 0, \"cannot be empty list\")?;\n        check_argument(lower_fraction < 0.5, \" has to be less than half\")?;\n        check_argument(upper_fraction > 0.5, \"has to be larger than half\")?;\n        check_argument(dimensions > 0, \" cannot have 0 dimensions\")?;\n        let total_weight: f64 = points.iter().map(|x| x.1 as f64).sum();\n        check_argument(total_weight > 0.0, \"weights cannot be all zero\")?;\n        check_argument(total_weight.is_finite(), \" cannot have infinite weights\")?;\n        let mut mean = vec![0.0f32; dimensions];\n        let mut deviation = vec![0.0f32; dimensions];\n        let mut sum_values_sq = vec![0.0f64; dimensions];\n        let mut sum_values = vec![0.0f64; dimensions];\n        for i in 0..points.len() {\n            check_argument(points[i].1 >= 0.0, \"point weights have to be non-negative\")?;\n            for j in 0..dimensions {\n                check_argument(\n                    points[i].0[j].is_finite() && !points[i].0[j].is_nan(),\n                    \" cannot have NaN or infinite values\"\n                )?;\n                sum_values[j] += points[i].1 as f64 * points[i].0[j] as f64;\n                sum_values_sq[j] +=\n                    points[i].1 as f64 * points[i].0[j] as f64 * points[i].0[j] as f64;\n            }\n        }\n        for j in 0..dimensions {\n            mean[j] = (sum_values[j] / total_weight) as f32;\n            let t: f64 = sum_values_sq[j] / total_weight\n                - sum_values[j] * sum_values[j] / (total_weight * total_weight);\n            deviation[j] = f64::sqrt(if t > 0.0 { t } else { 0.0 }) as f32;\n        }\n        let mut median = vec![0.0f32; dimensions];\n        let mut upper_vec = vec![0.0f32;dimensions];\n        let mut lower_vec = vec![0.0f32;dimensions];\n        let num = total_weight/2.0;\n        let lower = total_weight * lower_fraction;\n        let upper = total_weight * upper_fraction;\n        for j in 0..dimensions {\n            let mut y: Vec<(f32,f32)> = points.iter().map(|x| (x.0[j],x.1)).collect();\n            y.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n            let first = Self::pick(&y,lower,0,0.0);\n            lower_vec[j] = y[first.0].0;\n            let second = Self::pick(&y,num,first.0,first.1);\n            median[j] = y[second.0].0;\n            let third = Self::pick(&y,upper,second.0,second.1);\n            upper_vec[j] = y[third.0].0;\n        }\n\n        Ok(SampleSummary {\n            summary_points: Vec::new(),\n            relative_weight: Vec::new(),\n            total_weight: total_weight as f32,\n            mean,\n            upper: upper_vec,\n            lower: lower_vec,\n            median,\n            deviation,\n        })\n    }\n\n    pub fn from_references<Q>(dimensions: usize, points: &[(&Q, f32)], lower_fraction: f64, upper_fraction:f64) -> Result<Self>\n    where Q:?Sized + Index<usize, Output = f32>\n    {\n        check_argument(points.len() > 0, \"cannot be empty list\")?;\n        check_argument(lower_fraction < 0.5, \" has to be less than half\")?;\n        check_argument(upper_fraction > 0.5, \"has to be larger than half\")?;\n        check_argument(dimensions > 0, \" cannot have 0 dimensions\")?;\n        let total_weight: f64 = points.iter().map(|x| x.1 as f64).sum();\n        check_argument(total_weight > 0.0, \"weights cannot be all zero\")?;\n        check_argument(total_weight.is_finite(), \" cannot have infinite weights\")?;\n        let mut mean = vec![0.0f32; dimensions];\n        let mut deviation = vec![0.0f32; dimensions];\n        let mut sum_values_sq = vec![0.0f64; dimensions];\n        let mut sum_values = vec![0.0f64; dimensions];\n        for i in 0..points.len() {\n            check_argument(points[i].1 >= 0.0, \"point weights have to be non-negative\")?;\n            for j in 0..dimensions {\n                check_argument(\n                    points[i].0[j].is_finite() && !points[i].0[j].is_nan(),\n                    \" cannot have NaN or infinite values\"\n                )?;\n                sum_values[j] += points[i].1 as f64 * points[i].0[j] as f64;\n                sum_values_sq[j] +=\n                    points[i].1 as f64 * points[i].0[j] as f64 * points[i].0[j] as f64;\n            }\n        }\n        for j in 0..dimensions {\n            mean[j] = (sum_values[j] / total_weight) as f32;\n            let t: f64 = sum_values_sq[j] / total_weight\n                - sum_values[j] * sum_values[j] / (total_weight * total_weight);\n            deviation[j] = f64::sqrt(if t > 0.0 { t } else { 0.0 }) as f32;\n        }\n        let mut median = vec![0.0f32; dimensions];\n        let mut upper_vec = vec![0.0f32;dimensions];\n        let mut lower_vec = vec![0.0f32;dimensions];\n        let num = total_weight/2.0;\n        let lower = total_weight * lower_fraction;\n        let upper = total_weight * upper_fraction;\n        for j in 0..dimensions {\n            let mut y: Vec<(f32,f32)> = points.iter().map(|x| (x.0[j],x.1)).collect();\n            y.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n            let first = Self::pick(&y,lower,0,0.0);\n            lower_vec[j] = y[first.0].0;\n            let second = Self::pick(&y,num,first.0,first.1);\n            median[j] = y[second.0].0;\n            let third = Self::pick(&y,upper,second.0,second.1);\n            upper_vec[j] = y[third.0].0;\n        }\n\n        Ok(SampleSummary {\n            summary_points: Vec::new(),\n            relative_weight: Vec::new(),\n            total_weight: total_weight as f32,\n            mean,\n            upper: upper_vec,\n            lower: lower_vec,\n            median,\n            deviation,\n        })\n    }\n}\n\npub fn summarize(\n    points: &[(Vec<f32>, f32)],\n    distance: fn(&[f32], &[f32]) -> f64,\n    max_number: usize,\n    parallel_enabled: bool,\n) -> Result<SampleSummary> {\n    let dimensions = points[0].0.len();\n    let mut summary = SampleSummary::from_points(dimensions,&points,LOWER_FRACTION,UPPER_FRACTION)?;\n\n    if max_number > 0 {\n        let max_allowed = min(dimensions * MAX_NUMBER_PER_DIMENSION, max_number);\n\n        let mut list: Vec<Center> = single_centroid_cluster_weighted_vec_with_distance_over_slices(\n            &points,\n            distance,\n            max_allowed,\n            parallel_enabled,\n        )?;\n        list.sort_by(|o1, o2| o2.weight().partial_cmp(&o1.weight()).unwrap()); // decreasing order\n        let mut summary_points: Vec<Vec<f32>> = Vec::new();\n        let mut relative_weight: Vec<f32> = Vec::new();\n        let center_sum: f64 = list.iter().map(|x| x.weight()).sum();\n        for i in 0..list.len() {\n            summary_points.push(list[i].representative().clone());\n            relative_weight.push((list[i].weight() / center_sum) as f32);\n        }\n        summary.add_typical(summary_points, relative_weight);\n    }\n    return Ok(summary);\n}\n\n\npub fn multi_summarize_ref(\n    points: &[(&[f32], f32)],\n    distance: fn(&[f32], &[f32]) -> f64,\n    number_of_representatives: usize,\n    shrinkage : f32,\n    max_number: usize,\n    parallel_enabled: bool,\n) -> Result<SampleSummary> {\n    let dimensions = points[0].0.len();\n    let mut summary = SampleSummary::from_references(dimensions,points,LOWER_FRACTION,UPPER_FRACTION)?;\n\n    if max_number > 0 {\n        let max_allowed = min(dimensions * MAX_NUMBER_PER_DIMENSION, max_number);\n\n        let mut list= multi_cluster_as_weighted_ref(\n            &points,\n            distance,\n            number_of_representatives,\n            shrinkage,\n            false,\n            max_allowed,\n            parallel_enabled,\n        )?;\n        list.sort_by(|o1, o2| o2.weight().partial_cmp(&o1.weight()).unwrap()); // decreasing order\n        let mut summary_points: Vec<Vec<f32>> = Vec::new();\n        let mut relative_weight: Vec<f32> = Vec::new();\n        let center_sum: f64 = list.iter().map(|x| x.weight()).sum();\n        for i in 0..list.len() {\n            summary_points.push(Vec::from(list[i].representatives()[0].0));\n            relative_weight.push((list[i].weight() / center_sum) as f32);\n        }\n        summary.add_typical(summary_points, relative_weight);\n    }\n\n    return Ok(summary);\n}\n"
  },
  {
    "path": "Rust/src/errors.rs",
    "content": "use std::error;\nuse std::fmt;\n\n/// Errors that can be returned by RCF operations.\n#[derive(Debug, PartialEq)]\npub enum RCFError {\n    InvalidArgument {\n        msg: &'static str,\n    },\n}\n\nimpl error::Error for RCFError {}\n\nimpl fmt::Display for RCFError {\n    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n        match *self {\n            RCFError::InvalidArgument { msg } => write!(f, \"{}\", msg),\n        }\n    }\n}\n"
  },
  {
    "path": "Rust/src/example.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey,\n    rcf::{RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\nfn main() {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 100000;\n    let number_of_trees = 30;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n        .tree_capacity(capacity).number_of_trees(number_of_trees).random_seed(random_seed)\n        .store_attributes(store_attributes).parallel_enabled(parallel_enabled).internal_shingling(internal_shingling)\n        .time_decay(time_decay).initial_accept_fraction(initial_accept_fraction)\n        .internal_rotation(internal_rotation)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build_default().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 60.0);\n    }\n\n    let data_with_key = multidimdatawithkey::MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut score: f64 = 0.0;\n    let _next_index = 0;\n    let mut error = 0.0;\n    let mut count = 0;\n\n    for i in 0..data_with_key.data.len() {\n        if i > 200 {\n            let next_values = forest.extrapolate(1).unwrap().values;\n            assert_eq!(next_values.len(), base_dimension);\n            error += next_values\n                .iter()\n                .zip(&data_with_key.data[i])\n                .map(|(x, y)| ((x - y) as f64 * (x - y) as f64))\n                .sum::<f64>();\n            count += base_dimension;\n        }\n\n        let new_score = forest.score(&data_with_key.data[i]).unwrap();\n        //println!(\"{} {} score {}\",y,i,new_score);\n        /*\n        if next_index < data_with_key.change_indices.len() && data_with_key.change_indices[next_index] == i {\n            println!(\" score at change {} position {} \", new_score, i);\n            next_index += 1;\n        }\n        */\n\n        score += new_score;\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\n        \"Average score {} \",\n        (score / data_with_key.data.len() as f64)\n    );\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n    println!(\n        \" RMSE {},  noise {} \",\n        f64::sqrt(error / count as f64),\n        noise\n    );\n}\n"
  },
  {
    "path": "Rust/src/glad.rs",
    "content": "\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse crate::common::cluster::{multi_cluster_as_weighted_obj, MultiCenter, persist};\nuse crate::util::check_argument;\nuse crate::trcf::basicthresholder::BasicThresholder;\nuse crate::common::intervalstoremanager::IntervalStoreManager;\nuse crate::types::Result;\n\npub const DEFAULT_MAX_CLUSTERS :usize = 10;\npub const SCORE_MAX : f32 = 10.0;\npub const DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE : f32 = 0.005;\n//ignore clusters that are 10 or more times away from the closest\npub const CLUSTER_COMPARISON_THRESHOLD : f64 = 10.0;\n\n#[repr(C)]\npub struct GenericAnomalyDescriptor<T> {\n    pub representative_list: Vec<(T, f32)>,\n    pub score: f64,\n    pub threshold: f32,\n    pub grade: f32,\n}\n\n#[repr(C)]\npub struct GlobalLocalAnomalyDetector<T:Clone + Sync> {\n    capacity: usize,\n    current_size: usize,\n    random_seed: u64,\n    heap: Vec<(f64,usize)>,\n    object_list : Vec<(T,f32)>,\n    time_decay: f64,\n    most_recent_time_decay_update: u64,\n    accumulated_decay: f64,\n    interval_manager: IntervalStoreManager<usize>,\n    basic_thresholder: BasicThresholder,\n    last_cluster: u64,\n    do_not_recluster_within : u64,\n    entries_seen: u64,\n    sequence_number: u64,\n    last_mean: f32,\n    evicted : Option<(T,f32)>,\n    clusters: Vec<MultiCenter<T>>,\n    max_allowed: usize,\n    shrinkage: f32,\n    is_compact: bool,\n    number_of_representatives: usize,\n    ignore_below: f32,\n    initial_accept_fraction: f64,\n    //global_distance : fn(&T,&T) -> f64\n}\n\nimpl<T:Clone + Sync> GlobalLocalAnomalyDetector<T> {\n    pub fn new(capacity: usize, random_seed: u64, time_decay: f64, number_of_representatives: usize, shrinkage: f32, is_compact: bool) -> Result<Self>{\n        let mut basic_thresholder = BasicThresholder::new_adjustible(time_decay,false)?;\n        basic_thresholder.set_absolute_threshold(1.2);\n        if !is_compact {\n            basic_thresholder.set_z_factor(2.5);\n        }\n        Ok(GlobalLocalAnomalyDetector{\n            capacity,\n            current_size: 0,\n            random_seed,\n            heap: vec![],\n            object_list: vec![],\n            time_decay,\n            most_recent_time_decay_update: 0,\n            accumulated_decay: 0.0,\n            interval_manager: IntervalStoreManager::new(capacity),\n            basic_thresholder,\n            last_cluster: 0,\n            do_not_recluster_within: (capacity / 2) as u64,\n            entries_seen: 0,\n            sequence_number: 0,\n            last_mean: -1.0, // forcing the first clustering\n            evicted: Option::None,\n            clusters: Vec::new(),\n            max_allowed: 10,\n            shrinkage,\n            is_compact,\n            number_of_representatives,\n            ignore_below: DEFAULT_IGNORE_SMALL_CLUSTER_REPRESENTATIVE,\n            initial_accept_fraction: 0.125,\n            //global_distance: ()\n        })\n    }\n\n    fn initial_accept_probability(&self, fill_fraction: f64) -> f64 {\n        return if fill_fraction < self.initial_accept_fraction {\n            1.0\n        } else if self.initial_accept_fraction >= 1.0 {\n            0.0\n        } else {\n            1.0 - (fill_fraction - self.initial_accept_fraction)\n                / (1.0 - self.initial_accept_fraction)\n        };\n    }\n\n    fn fill_fraction(&self) -> f64 {\n        if self.current_size == self.capacity {\n            return 1.0;\n        };\n        self.current_size as f64 / self.capacity as f64\n    }\n\n    fn compute_weight(&self, random_number: f64, weight: f32) -> f64 {\n        f64::ln(-f64::ln(random_number) / weight as f64) -\n            ((self.entries_seen - self.most_recent_time_decay_update) as f64\n                * self.time_decay - self.accumulated_decay)\n    }\n\n    fn swap_down(&mut self, start_index: usize) {\n        let mut current: usize = start_index;\n        while 2 * current + 1 < self.current_size {\n            let mut max_index: usize = 2 * current + 1;\n            if 2 * current + 2 < self.current_size\n                && self.heap[2 * current + 2].0 > self.heap[max_index].0\n            {\n                max_index = 2 * current + 2;\n            }\n            if self.heap[max_index].0 > self.heap[current].0 {\n                self.swap_weights(current, max_index);\n                current = max_index;\n            } else {\n                break;\n            }\n        }\n    }\n\n    fn swap_weights(&mut self, a: usize, b: usize) {\n        let tmp = self.heap[a];\n        self.heap[a] = self.heap[b];\n        self.heap[b] = tmp;\n    }\n\n    fn evict_max(&mut self) -> (f64, usize) {\n        let evicted_point = self.heap[0];\n        self.current_size -= 1;\n        let current: usize = self.current_size.into();\n        self.heap[0] = self.heap[current];\n        self.heap[0] = self.heap[current];\n        self.swap_down(0);\n        evicted_point\n    }\n\n    fn sample(&mut self, object: &T, weight: f32) -> Result<bool> {\n        self.sequence_number += 1;\n        self.entries_seen += 1;\n        let mut initial = false;\n        let mut rng = ChaCha20Rng::seed_from_u64(self.random_seed);\n        self.random_seed = rng.next_u64();\n        let random_number: f64 = rng.gen();\n        let heap_weight = self.compute_weight(random_number, weight);\n        if self.current_size < self.capacity {\n            let other_random: f64 = rng.gen();\n            initial = other_random < self.initial_accept_probability(self.fill_fraction());\n        }\n        if initial || (heap_weight < self.heap[0].0) {\n            if !initial {\n                let old_index = self.evict_max().1;\n                self.evicted = Some(self.object_list[old_index].clone());\n                self.interval_manager.release(old_index)?;\n            }\n            let index = self.interval_manager.get()?;\n            if index < self.object_list.len() {\n                self.object_list[index] = (object.clone(), weight);\n            } else {\n                self.object_list.push((object.clone(), weight));\n            }\n            if self.heap.len() == self.current_size {\n                self.heap.push((heap_weight, index));\n            } else {\n                self.heap[self.current_size] = (heap_weight, index);\n            }\n            let mut current = self.current_size;\n            self.current_size += 1;\n\n            while current > 0 {\n                let tmp = (current - 1) / 2;\n                if self.heap[tmp].0 < self.heap[current].0 {\n                    self.swap_weights(current, tmp);\n                    current = tmp;\n                } else {\n                    break;\n                }\n            }\n            return Ok(true);\n        };\n        Ok(false)\n    }\n\n    pub fn set_z_factor(&mut self, z_factor : f32){\n        self.basic_thresholder.set_z_factor(z_factor);\n    }\n\n    pub fn score(&self, current: &T, local_distance: fn(&T, &T) -> f64, consider_occlusion: bool) -> Result<Vec<(T, f32)>> {\n        if self.clusters.len() == 0 {\n            return Ok(Vec::new());\n        } else {\n            let mut candidate_list: Vec<(usize, (f64, &T), f64)> = Vec::new();\n            for j in 0..self.clusters.len() {\n                let rad = self.clusters[j].average_radius();\n                let close = self.clusters[j].distance_to_point_and_ref(current, self.ignore_below, local_distance)?;\n                candidate_list.push((j, close, rad));\n            }\n            candidate_list.sort_by(|a, b| a.1.0.partial_cmp(&b.1.0)\n                .expect(\"should not have NaN/Infinities\"));\n\n            if candidate_list[0].1.0 == 0.0 {\n                return Ok(vec![(candidate_list[0].1.1.clone(), 0.0)]);\n            }\n            let mut index = 0;\n            while index < candidate_list.len() {\n                let head = candidate_list[index];\n                if consider_occlusion {\n                    for j in index + 1..candidate_list.len() {\n                        let occlude = (local_distance)(head.1.1, candidate_list[j].1.1);\n                        check_argument(occlude>=0.0, \"distances cannot be negative\")?;\n                        if candidate_list[j].2 > f64::sqrt(occlude * occlude + head.2 * head.2) {\n                            candidate_list.remove(j);\n                        }\n                    }\n                }\n                index += 1;\n            }\n            let mut answer = Vec::new();\n            let distance_threshold = candidate_list[0].1.0 * CLUSTER_COMPARISON_THRESHOLD;\n            for head in &candidate_list {\n                if head.1.0 < distance_threshold {\n                    let temp_measure = if head.2 > 0.0 && head.1.0 < SCORE_MAX as f64 * head.2 {\n                        (head.1.0 / head.2) as f32\n                    } else {\n                        SCORE_MAX\n                    };\n                    answer.push((head.1.1.clone(), temp_measure));\n                }\n            }\n            Ok(answer)\n        }\n    }\n\n    pub fn process(&mut self, object: &T, weight: f32, global_distance: fn(&T, &T) -> f64, local_distance: fn(&T, &T) -> f64, consider_occlusion: bool)\n                   -> Result<GenericAnomalyDescriptor<T>> {\n        check_argument(weight >= 0.0, \"weight cannot be negative\")?;\n        // recompute clusters first; this enables easier merges and deserialization\n        if self.sequence_number > self.last_cluster + self.do_not_recluster_within {\n            let current_mean = self.basic_thresholder.primary_mean() as f32;\n            if f32::abs(current_mean - self.last_mean) > 0.1 || current_mean > 1.7f32\n                || self.sequence_number > self.last_cluster + 20 * self.do_not_recluster_within {\n                self.last_cluster = self.sequence_number;\n                self.last_mean = current_mean;\n                let temp = multi_cluster_as_weighted_obj(&self.object_list,\n                                                         global_distance, 5, 0.1, self.is_compact, self.max_allowed, false)?;\n                self.clusters = persist(&temp);\n            }\n        }\n        let mut score_list = self.score(object, local_distance, consider_occlusion)?;\n        let threshold = self.basic_thresholder.threshold();\n        let mut grade: f32 = 0.0;\n        let score: f32 = if score_list.len() == 0 { 0.0 } else {\n            score_list.iter().map(|a| a.1).min_by(|a, b| a.partial_cmp(b)\n                .expect(\"should not contain NaN, corrupt state\"))\n                .expect(\"should be total order, corrupt state\")\n        };\n\n        if score_list.len() > 0 {\n            if score < SCORE_MAX {\n                // an exponential attribution\n                let sum: f64 = score_list.iter().map(|a|\n                    if a.1 == SCORE_MAX { 0.0f64 } else {\n                        f64::exp(-(a.1 * a.1) as f64)\n                    }\n                ).sum();\n                for mut item in &mut score_list {\n                    let t = if item.1 == f32::MAX { 0.0 } else {\n                        f64::min(1.0, f64::exp(-(item.1 * item.1) as f64) / sum)\n                    };\n                    item.1 = t as f32;\n                }\n            } else {\n                let y = score_list.len();\n                for mut item in &mut score_list {\n                    item.1 = 1.0 / (y as f32);\n                }\n            }\n            grade = self.basic_thresholder.primary_grade(score);\n            let other = self.basic_thresholder.z_factor();\n            self.basic_thresholder.update_both(score, f32::min(score, other));\n        }\n        self.sample(object, weight)?;\n\n        Ok(GenericAnomalyDescriptor {\n            representative_list: score_list,\n            score: score as f64,\n            threshold,\n            grade\n        })\n    }\n\n    pub fn clusters(&self) -> Vec<MultiCenter<T>> {\n        self.clusters.clone()\n    }\n}\n\n"
  },
  {
    "path": "Rust/src/lib.rs",
    "content": "pub mod common;\npub mod errors;\nmod pointstore;\npub mod rcf;\nmod samplerplustree;\nmod types;\nmod util;\npub mod visitor;\npub mod trcf;\npub mod glad;\n\nextern crate rand;\nextern crate rand_chacha;\n\nuse num::abs;\n\npub fn l1distance(a: &[f32], b: &[f32]) -> f64 {\n    a.iter()\n        .zip(b)\n        .map(|(&x, &y)| abs(x as f64 - y as f64))\n        .sum()\n}\n\npub fn linfinitydistance(a: &[f32], b: &[f32]) -> f64 {\n    let mut dist = 0.0;\n    for i in 0..a.len() {\n        let t = abs(a[i] as f64 - b[i] as f64);\n        if dist < t {\n            dist = t;\n        };\n    }\n    dist\n}\n\npub fn l2distance(a: &[f32], b: &[f32]) -> f64 {\n    f64::sqrt(\n        a.iter()\n            .zip(b)\n            .map(|(&x, &y)| (abs(x as f64 - y as f64) * abs(x as f64 - y as f64)))\n            .sum(),\n    )\n}\n"
  },
  {
    "path": "Rust/src/pointstore.rs",
    "content": "extern crate num;\nuse std::{collections::HashMap, convert::TryFrom, fmt::Debug};\nuse std::hash::Hash;\nuse std::ptr::hash;\nuse crate::types::{Result};\nuse crate::{common::intervalstoremanager::IntervalStoreManager, types::Location};\nuse crate::errors::RCFError;\nuse crate::util::check_argument;\n\npub const MAX_ATTRIBUTES: usize = 10;\n\npub trait PointStore<Label,Attributes> where Label: Copy + Sync, Attributes: Copy + Sync + Hash + Eq + Send {\n    fn shingled_point(&self, point: &[f32]) -> Result<Vec<f32>>;\n    fn size(&self) -> usize;\n    fn missing_indices(&self, look_ahead: usize, values: &[usize]) -> Result<Vec<usize>>;\n    fn next_indices(&self, look_ahead: usize) -> Result<Vec<usize>>;\n    fn copy(&self, index: usize) -> Result<Vec<f32>>;\n    fn is_equal(&self, point: &[f32], index: usize) -> Result<bool>;\n    fn reference_and_offset(&self, index: usize) -> Result<(&[f32], usize)>;\n    fn entries_seen(&self) -> u64;\n    fn add(&mut self, point: &[f32], label:Label) -> Result<(usize,usize,Option<Vec<f32>>)>;\n    fn inc(&mut self, index: usize,attribute_index: usize) -> Result<()>;\n    fn dec(&mut self, index: usize,attribute_index: usize) -> Result<()>;\n    fn adjust_count(&mut self, result: &[((usize, usize),(usize,usize))]) -> Result<()>;\n    fn compact(&mut self) -> Result<()>;\n    fn label(&self, index: usize) -> Result<Label>;\n    fn attribute(&self, index: usize) -> Result<Attributes>;\n    fn point_sum(&self, list:&[(usize,usize)]) -> Result<Vec<f32>>;\n    fn attribute_vec(&self, index: usize) -> Result<Vec<f32>>;\n}\n\n#[repr(C)]\npub struct VectorizedPointStore<L,Label,Attributes>\nwhere\n    L: Location,\n    Label: Copy + Sync,\n    Attributes: Copy + Sync + Hash + Eq + Send,\n{\n    internal_shingling: bool,\n    internal_rotation: bool,\n    store_labels: bool,\n    store_attributes: bool,\n    propagate_attributes: bool,\n    last_known_shingle: Vec<f32>,\n    dimensions: usize,\n    shingle_size: usize,\n    capacity: usize,\n    store: Vec<f32>,\n    labels: HashMap<usize,(Label,usize)>,\n    attribute_reverse_map: HashMap<Attributes,usize>,\n    attributes: Vec<Attributes>,\n    label_shingle: Vec<Label>,\n    reference_count: Vec<u8>,\n    label_count: Vec<u8>,\n    attribute_count: Vec<u8>,\n    location: Vec<L>,\n    next_sequence_index: usize,\n    start_free_region: usize,\n    index_manager: IntervalStoreManager<usize>,\n    label_manager: IntervalStoreManager<usize>,\n    attribute_manager: IntervalStoreManager<usize>,\n    hash_reference_counts: HashMap<usize, usize>,\n    hash_label_counts: HashMap<usize, usize>,\n    hash_attribute_counts: HashMap<usize, usize>,\n    entries_seen: u64,\n    attribute_creator: fn(_label_shingle: &[Label], _current_label : Label) -> Result<Attributes>,\n    attribute_to_vec: Option<fn( _attribute :&Attributes) -> Result<Vec<f32>>>\n}\n\nimpl<L,Label,Attributes> VectorizedPointStore<L,Label,Attributes>\nwhere\n    L: Location,\n    usize: From<L>,\n    Label : Copy + Sync + Send,\n    Attributes: Copy + Sync + Hash + Eq + Send,\n    <L as TryFrom<usize>>::Error: Debug,\n{\n    pub fn new(\n        dimensions: usize,\n        shingle_size: usize,\n        capacity: usize,\n        initial_capacity: usize,\n        internal_shingling: bool,\n        internal_rotation: bool,\n        store_attributes: bool,\n        propagate_attributes: bool,\n        attribute_creator: fn(_label_shingle: &[Label], _current_label : Label) -> Result<Attributes>,\n        attribute_to_vec: Option<fn( _attribute :&Attributes) -> Result<Vec<f32>>>\n    ) -> Result<Self> {\n        Ok(VectorizedPointStore {\n            internal_shingling,\n            internal_rotation,\n            store_labels: store_attributes,\n            store_attributes,\n            dimensions,\n            shingle_size,\n            capacity,\n            store: vec![0.0; initial_capacity * dimensions],\n            labels: HashMap::new(),\n            attribute_reverse_map: HashMap::new(),\n            location: vec![L::MAX; initial_capacity],\n            reference_count: vec![0; initial_capacity],\n            start_free_region: 0,\n            index_manager: IntervalStoreManager::<usize>::new(initial_capacity),\n            label_manager: IntervalStoreManager::<usize>::new(capacity),\n            attribute_manager: IntervalStoreManager::<usize>::new(capacity),\n            last_known_shingle: vec![0.0; dimensions],\n            hash_reference_counts: HashMap::new(),\n            hash_label_counts: HashMap::new(),\n            hash_attribute_counts: HashMap::new(),\n            entries_seen: 0,\n            next_sequence_index: 0,\n            attribute_count: Vec::new(),\n            label_count: Vec::new(),\n            label_shingle: Vec::new(),\n            attribute_creator,\n            attribute_to_vec,\n            propagate_attributes,\n            attributes: Vec::new()\n        })\n    }\n\n    fn ready_to_copy(&self, point: &[f32]) -> bool {\n        let mut answer: bool = self.shingle_size > 1;\n        let base = self.dimensions / self.shingle_size;\n        let mut index: usize = self.start_free_region;\n        let extra = self.dimensions - base;\n        if answer && index > extra {\n            index -= extra;\n            for i in 0..(extra) {\n                answer = answer && (self.store[index + i] == point[i]);\n            }\n        } else {\n            answer = false;\n        }\n        answer\n    }\n\n    fn inc_helper(index : usize, reference_counts: &mut [u8], hashmap: &mut HashMap<usize,usize>) -> Result<()>{\n        check_argument(index < reference_counts.len(), \"incorrect range of index at insert\")?;\n        if reference_counts[index] == u8::MAX {\n            if let Some(a) = hashmap.remove(&index) {\n                hashmap.insert(index, a + 1);\n            } else {\n                hashmap.insert(index, 1);\n            }\n        } else {\n            reference_counts[index] += 1;\n        };\n        Ok(())\n    }\n\n    fn dec_helper(index : usize, reference_counts: &mut [u8], hashmap: &mut HashMap<usize,usize>) -> Result<()>{\n        check_argument(index < reference_counts.len(), \"incorrect range of index at delete\")?;\n        check_argument(reference_counts[index] != 0, \"index not in use for delete\")?;\n\n        if let Some(a) = hashmap.remove(&index) {\n            if a > 1 {\n                hashmap.insert(index, a - 1);\n            }\n        } else {\n            reference_counts[index] -= 1;\n        }\n        Ok(())\n    }\n}\n\nimpl<L,Label,Attributes> PointStore<Label,Attributes> for VectorizedPointStore<L,Label,Attributes>\nwhere\n    L: Location,\n    usize: From<L>,\n    Label: Copy + Sync + Send,\n    Attributes : Copy + Sync + Eq + Hash + Send,\n    <L as TryFrom<usize>>::Error: Debug,\n{\n    fn shingled_point(&self, point: &[f32]) -> Result<Vec<f32>> {\n        let mut new_point = vec![0.0; self.dimensions];\n        let base = self.dimensions / self.shingle_size;\n        if point.len() == base && self.shingle_size > 1 {\n            check_argument(self.internal_shingling, \"expecting input corresponding to internal shingling\")?;\n            if !self.internal_rotation {\n                for i in 0..(self.dimensions - base) {\n                    new_point[i] = self.last_known_shingle[i + base];\n                }\n                for i in 0..base {\n                    new_point[self.dimensions - base + i] = point[i];\n                }\n            } else {\n                for i in 0..(self.dimensions) {\n                    new_point[i] = self.last_known_shingle[i];\n                }\n                let offset = (self.next_sequence_index * base) % self.dimensions;\n                for i in 0..base {\n                    new_point[offset + i] = point[i];\n                }\n            }\n            return Ok(new_point);\n        } else {\n            check_argument(point.len() == self.dimensions, \" expecting externally shingled input\")?;\n        }\n        for i in 0..self.dimensions {\n            new_point[i] = point[i];\n        }\n        Ok(new_point)\n    }\n\n    fn size(&self) -> usize {\n        self.store.len() * std::mem::size_of::<f32>()\n            + self.location.len() * std::mem::size_of::<L>()\n            + self.reference_count.len() * std::mem::size_of::<u8>()\n            + self.index_manager.get_size()\n            + std::mem::size_of::<VectorizedPointStore<L,Label,Attributes>>()\n    }\n\n    fn missing_indices(&self, look_ahead: usize, values: &[usize]) -> Result<Vec<usize>> {\n        if !self.internal_shingling {\n            for x in values {\n                check_argument(*x<self.dimensions, \"incorrect input\")?;\n            }\n            return Ok(Vec::from(values));\n        }\n        let mut answer = Vec::new();\n        let base = self.dimensions / self.shingle_size;\n        for i in 0..values.len() {\n            check_argument(values[i] < base, \"incorrect input\")?;\n            if self.internal_rotation {\n                answer.push(\n                    ((self.next_sequence_index + look_ahead) * base + values[i]) % self.dimensions,\n                );\n            } else {\n                answer.push(self.dimensions - base + values[i]);\n            }\n        }\n        Ok(answer)\n    }\n\n    fn next_indices(&self, look_ahead: usize) -> Result<Vec<usize>> {\n        let base = self.dimensions / self.shingle_size;\n        let vec: Vec<usize> = (0..base).collect();\n        self.missing_indices(look_ahead, &vec)\n    }\n\n    fn copy(&self, index: usize) -> Result<Vec<f32>> {\n        let mut new_point = vec![0.0; self.dimensions];\n        let (reference, offset) = self.reference_and_offset(index)?;\n        if self.internal_rotation {\n            for i in 0..self.dimensions {\n                new_point[(i + offset) % self.dimensions] = reference[i];\n            }\n        } else {\n            for i in 0..self.dimensions {\n                new_point[i] = reference[i];\n            }\n        }\n        Ok(new_point)\n    }\n\n    fn is_equal(&self, point: &[f32], index: usize) -> Result<bool> {\n        let (reference, offset) = self.reference_and_offset(index)?;\n        if self.internal_rotation {\n            for i in 0..self.dimensions {\n                if point[(i + offset) % self.dimensions] != reference[i] {\n                    return Ok(false);\n                }\n            }\n            return Ok(true);\n        } else {\n            return Ok(point.eq(reference));\n        }\n    }\n\n    fn reference_and_offset(&self, index: usize) -> Result<(&[f32], usize)> {\n        let base = self.dimensions / self.shingle_size;\n        check_argument(self.reference_count[index] != 0 , \"index not in use\")?;\n\n        let locn : usize = self.location[index].try_into().expect(\"corrupt state\");\n        let adj_locn = locn * base;\n        let offset = if !self.internal_rotation {\n            0\n        } else {\n            adj_locn % self.dimensions\n        };\n        Ok((&self.store[adj_locn..(adj_locn + self.dimensions)], offset))\n    }\n\n    fn entries_seen(&self) -> u64 {\n        self.entries_seen\n    }\n\n    fn add(&mut self, point: &[f32], label:Label) -> Result<(usize,usize,Option<Vec<f32>>)> {\n        let base = self.dimensions / self.shingle_size;\n        self.next_sequence_index += 1;\n\n        if self.internal_shingling {\n            check_argument(point.len() == base, \"incorrect length\")?;\n            for i in 0..(self.dimensions - base) {\n                self.last_known_shingle[i] = self.last_known_shingle[i + base];\n            }\n            for i in 0..base {\n                self.last_known_shingle[self.dimensions - base + i] = point[i];\n            }\n            if self.store_labels {\n                if self.next_sequence_index <= self.shingle_size {\n                    self.label_shingle.push(label);\n                } else {\n                    for i in 0..self.shingle_size - 1 {\n                        self.label_shingle[i] = self.label_shingle[i + 1];\n                    }\n                    self.label_shingle[self.shingle_size - 1] = label;\n                }\n            }\n            if self.next_sequence_index < self.shingle_size {\n                return Ok((usize::MAX,usize::MAX,None));\n            }\n        } else {\n            check_argument(point.len() == self.dimensions, \"incorrect lengths\")?;\n        }\n\n        let mut attrib_vec = None;\n        let attrib_pos = if self.store_attributes {\n            let new_attribute = (self.attribute_creator)(&self.label_shingle, label)?;\n            let a_pos = *self.attribute_reverse_map.get(&new_attribute).unwrap_or(&self.attributes.len());\n            let b_pos = if a_pos >= self.attributes.len() {\n                let y = self.attribute_manager.get()?;\n                self.attribute_reverse_map.insert(new_attribute, y);\n                y\n            } else {\n                a_pos\n            };\n            if b_pos == self.attribute_count.len() {\n                    self.attributes.push(new_attribute);\n                    self.attribute_count.push(1);\n            } else {\n                self.attributes[b_pos] = new_attribute;\n                Self::inc_helper(b_pos,&mut self.attribute_count,&mut self.hash_attribute_counts)?;\n            };\n            b_pos\n        } else {usize::MAX};\n\n        let label_pos = if self.store_labels {\n            let y = self.label_manager.get()?;\n            if y >= self.labels.len() {\n                check_argument(y == self.labels.len(), \" incorrect behavior in labels\")?;\n                self.labels.insert(y,(label,attrib_pos));\n                self.label_count.push(1);\n            } else {\n                check_argument(self.label_count[y] == 0, \" incorrect state in label management\")?;\n                self.labels.insert(y,(label,attrib_pos));\n                self.label_count[y] = 1;\n            };\n            y\n        } else {usize::MAX};\n\n\n        if self.dimensions + self.start_free_region > self.store.len() {\n            self.compact()?;\n            if self.dimensions + self.start_free_region > self.store.len() {\n                let mut new_size = self.store.len() + self.store.len() / 5;\n                if new_size > self.capacity * self.dimensions {\n                    new_size = self.capacity * self.dimensions;\n                }\n                self.store.resize(new_size, 0.0);\n            }\n        }\n\n        if self.index_manager.is_empty() {\n            check_argument(self.reference_count.len() == self.location.len(), \"incorrect state\")?;\n            let mut new_size = self.location.len() + self.location.len() / 5;\n            if new_size > self.capacity {\n                new_size = self.capacity;\n            }\n            self.location.resize(new_size, L::MAX);\n            self.reference_count.resize(new_size, 0);\n            self.index_manager.change_capacity(new_size);\n        }\n        let position: usize = self.index_manager.get()?;\n        check_argument(self.reference_count[position] == 0, \"incorrect state\")?;\n        self.reference_count[position] = 1;\n        let new_point: &[f32] = if self.internal_shingling {\n            &self.last_known_shingle\n        } else {\n            &point\n        };\n\n        if self.ready_to_copy(&new_point) {\n            let base = self.dimensions / self.shingle_size;\n            let mut index: usize = self.start_free_region;\n            let extra = self.dimensions - base;\n            let idx_value: usize = (index - extra) / base;\n            self.location[position] = idx_value.try_into().expect(\"incorrect range\");\n            for i in 0..base {\n                self.store[index] = new_point[extra + i];\n                index += 1;\n            }\n            self.start_free_region += base;\n        } else {\n            let mut index: usize = self.start_free_region;\n            let idx_value: usize = index / base;\n            self.location[position] = idx_value.try_into().expect(\"range error\");\n            for i in 0..self.dimensions {\n                self.store[index] = new_point[i];\n                index += 1;\n            }\n            self.start_free_region += self.dimensions;\n        }\n        if self.store_labels {\n            Ok((position, label_pos, attrib_vec))\n        } else {\n            Ok((position,attrib_pos,attrib_vec))\n        }\n    }\n\n    fn inc(&mut self, index: usize, secondary_index: usize) -> Result<()>{\n        Self::inc_helper(index,&mut self.reference_count,&mut self.hash_reference_counts)?;\n        if self.store_labels {\n            let attrib_index = (*self.labels.get(&secondary_index).expect(\"not found\")).1;\n            if attrib_index != usize::MAX {\n                Self::inc_helper(attrib_index,&mut self.attribute_count, &mut self.hash_attribute_counts)?;\n            }\n            Self::inc_helper(secondary_index, &mut self.label_count, &mut self.hash_label_counts)?;\n        } else if self.store_attributes {\n            Self::inc_helper(secondary_index,&mut self.attribute_count, &mut self.hash_attribute_counts)?;\n        }\n        Ok(())\n    }\n\n    fn dec(&mut self, index: usize,secondary_index: usize) -> Result<()> {\n        Self::dec_helper(index,&mut self.reference_count,&mut self.hash_reference_counts)?;\n        if self.reference_count[index] == 0 {\n            self.index_manager.release(index)?;\n            self.location[index] = L::MAX;\n        }\n        if self.store_labels {\n            let attribute_index = (*self.labels.get(&secondary_index).expect(\"not found\")).1;\n            if attribute_index != usize::MAX {\n                Self::dec_helper(attribute_index,&mut self.attribute_count, &mut self.hash_attribute_counts)?;\n                if self.attribute_count[attribute_index] == 0 {\n                    self.attribute_manager.release(attribute_index)?;\n                }\n            }\n            Self::dec_helper(secondary_index, &mut self.label_count, &mut self.hash_label_counts)?;\n            if self.label_count[secondary_index] == 0 {\n                self.label_manager.release(secondary_index)?;\n            }\n        } else {\n            if secondary_index !=usize::MAX {\n                Self::dec_helper(secondary_index,&mut self.attribute_count, &mut self.hash_attribute_counts)?;\n                let x = self.attribute_reverse_map.remove(&self.attributes[secondary_index]).expect(\" error in secondary\");\n                check_argument(x == secondary_index, \"attribute index accounting is incorrect\")?;\n                if self.attribute_count[secondary_index] == 0 {\n                    self.attribute_manager.release(secondary_index)?;\n                }\n            }\n        }\n        Ok(())\n    }\n\n    fn adjust_count(&mut self, result: &[((usize, usize),(usize,usize))]) -> Result<()> {\n        for (insert, delete) in result {\n            if (*insert).0 != usize::MAX {\n                self.inc((*insert).0,(*insert).1)?;\n                if (*delete).0 != usize::MAX {\n                    self.dec((*delete).0,(*delete).1)?;\n                }\n            }\n        }\n        Ok(())\n    }\n\n    fn compact(&mut self) -> Result<()>{\n        let base = self.dimensions / self.shingle_size;\n        let mut reverse_reference: Vec<(usize, usize)> = Vec::new();\n        for i in 0..self.location.len() {\n            if self.location[i] != L::MAX {\n                reverse_reference.push((self.location[i].try_into().expect(\"range error\"), i));\n            }\n        }\n        reverse_reference.sort();\n        let mut fresh_start: usize = 0;\n        let mut j_static: usize = 0;\n        let mut j_dynamic: usize;\n        let end: usize = reverse_reference.len();\n        while j_static < end {\n            let mut block_start: usize = reverse_reference[j_static].0;\n            block_start = block_start * base;\n            let mut block_end: usize = block_start + self.dimensions;\n            let initial = if self.internal_rotation {\n                (self.dimensions - fresh_start + block_start) % self.dimensions\n            } else {\n                0\n            };\n\n            let mut k = j_static + 1;\n            j_dynamic = j_static + 1;\n            while k < end {\n                let new_locn: usize = reverse_reference[k].0;\n                let new_elem: usize = base * new_locn;\n                if block_end >= new_elem {\n                    k += 1;\n                    j_dynamic += 1;\n                    if block_end < new_elem + self.dimensions {\n                        block_end = new_elem + self.dimensions;\n                    }\n                } else {\n                    k = end;\n                }\n            }\n\n            // aligning the boundaries\n            for _i in 0..initial {\n                self.store[fresh_start] = 0.0;\n                fresh_start += 1;\n            }\n\n            for i in block_start..block_end {\n                self.store[fresh_start] = self.store[i];\n                check_argument(!self.internal_rotation || fresh_start % self.dimensions == i % self.dimensions, \"corrupt state in compaction\")?;\n                if j_static < end {\n                    let locn: usize = reverse_reference[j_static].0;\n                    if i == base * locn {\n                        let new_idx: usize = reverse_reference[j_static].1;\n                        self.location[new_idx] = (fresh_start / base).try_into().expect(\"range error\");\n                        j_static += 1;\n                    }\n                }\n                fresh_start += 1;\n            }\n\n            check_argument(j_static == j_dynamic, \"There is discrepancy in indices\")?;\n        }\n        self.start_free_region = fresh_start.try_into().expect(\"range error\");\n        Ok(())\n    }\n\n    fn label(&self, index: usize) -> Result<Label> {\n        check_argument(self.store_labels && index < self.labels.len() && self.label_count[index] != 0, \" cannot access the label\")?;\n        let (label,attribute_index) = *self.labels.get(&index).expect(\"unexpected index\");\n        Ok(label)\n    }\n\n    fn attribute(&self, index: usize) -> Result<Attributes> {\n        check_argument(self.store_attributes, \" attributes not stored\")?;\n        if self.store_labels {\n            let y = *self.labels.get(&index).expect(\"label not in use\");\n            check_argument(y.1<self.attributes.len(),\" not in use\")?;\n            Ok(self.attributes[y.1])\n        } else {\n            check_argument(index<self.attributes.len(),\" not in use\")?;\n            Ok(self.attributes[index])\n        }\n    }\n\n    fn point_sum(&self, list:&[(usize,usize)]) -> Result<Vec<f32>> {\n        let mut answer = vec![0.0; self.dimensions];\n        for (a,b) in list {\n            let (point, offset) = self.reference_and_offset(*a)?;\n            for (x,&y) in answer.iter_mut().zip(point) {\n                *x += y * (*b) as f32;\n            }\n        }\n        Ok(answer)\n    }\n\n    fn attribute_vec(&self, index: usize) -> Result<Vec<f32>> {\n        check_argument(self.store_attributes, \" need to store attributes first\")?;\n        let y = if self.store_labels {\n            let x = *self.labels.get(&index).expect(\"label not in use\");\n            x.1\n        } else {\n            index\n        };\n        if let Some(function) = self.attribute_to_vec {\n            check_argument(index < self.attributes.len(), \" out of range\")?;\n            (function) (&self.attributes[index])\n        } else {\n            let mut answer = vec![0.0f32; MAX_ATTRIBUTES];\n            answer[y % MAX_ATTRIBUTES] = 1.0;\n            Ok(answer)\n        }\n    }\n}\n"
  },
  {
    "path": "Rust/src/rcf.rs",
    "content": "extern crate num;\nextern crate rand;\nextern crate rand_chacha;\n\nuse core::fmt::Debug;\nuse std::collections::HashMap;\nuse std::hash::Hash;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse rayon::prelude::*;\n\nuse crate::{\n    common::{\n        conditionalfieldsummarizer::FieldSummarizer, directionaldensity::InterpolationMeasure,\n        divector::DiVector, samplesummary::SampleSummary,\n    },\n    l1distance,\n    pointstore::{PointStore, VectorizedPointStore},\n    samplerplustree::{\n        nodeview::UpdatableNodeView, samplerplustree::SamplerPlusTree,\n    },\n    types::{Location, Result},\n    util::{add_nbr, add_to, check_argument, divide, nbr_finish},\n    visitor::{\n        attributionvisitor::AttributionVisitor,\n        imputevisitor::ImputeVisitor,\n        interpolationvisitor::InterpolationVisitor,\n        scalarscorevisitor::ScalarScoreVisitor,\n        visitor::{Visitor, VisitorInfo},\n    },\n};\nuse crate::common::rangevector::RangeVector;\nuse crate::errors::RCFError;\nuse crate::errors::RCFError::InvalidArgument;\n\npub(crate) fn score_seen(x: usize, y: usize) -> f64 {\n    1.0 / (x as f64 + f64::log2(1.0 + y as f64))\n}\npub(crate) fn score_unseen(x: usize, _y: usize) -> f64 {\n    1.0 / (x as f64 + 1.0)\n}\npub(crate) fn normalizer(x: f64, y: usize) -> f64 {\n    x * f64::log2(1.0 + y as f64)\n}\npub(crate) fn damp(x: usize, y: usize) -> f64 {\n    1.0 - (x as f64) / (2.0 * y as f64)\n}\n\npub(crate) fn score_seen_displacement(_x: usize, y: usize) -> f64 {\n    1.0 / (1.0 + y as f64)\n}\n\n// the following would be used for density estimation as well; note that for density estimation\n// we are only focused about similarity of points and thus (previously) seen  and\n// (previously) unseen points have little distinction\n// that distinction can be crucial for some applications of anomaly detection\n\npub(crate) fn score_unseen_displacement(_x: usize, y: usize) -> f64 {\n    y as f64\n}\n\n// the normalization is now multiplication by 1/treesize; this makes the\n// max score to be 1.0 whereas for the standard score the average is close to 1\n\npub(crate) fn displacement_normalizer(x: f64, y: usize) -> f64 {\n    x * 1.0 / (1.0 + y as f64)\n}\n\npub(crate) fn identity(x: f64, _y: usize) -> f64 {\n    x\n}\n\npub trait AugmentedRCF<Label,Attributes> {\n    fn update(&mut self, point: &[f32], label: Label) -> Result<()>;\n    fn id(&self) -> u64;\n    fn dimensions(&self) -> usize;\n    fn shingle_size(&self) -> usize;\n    fn is_internal_shingling_enabled(&self) -> bool;\n    fn is_output_ready(&self) -> bool;\n    fn entries_seen(&self) -> u64;\n    fn size(&self) -> usize;\n    fn point_store_size(&self) -> usize;\n    fn shingled_point(&self,point:&[f32]) -> Result<Vec<f32>>;\n\n    fn score(&self, point: &[f32]) -> Result<f64> {\n        self.score_visitor_traversal(point, &VisitorInfo::default())\n    }\n\n    fn displacement_score(&self, point: &[f32]) -> Result<f64> {\n        self.score_visitor_traversal(point, &VisitorInfo::displacement())\n    }\n\n    fn generic_score(\n        &self,\n        point: &[f32],\n        ignore_mass: usize,\n        score_seen: fn(usize, usize) -> f64,\n        score_unseen: fn(usize, usize) -> f64,\n        damp: fn(usize, usize) -> f64,\n        normalizer: fn(f64, usize) -> f64,\n    ) -> Result<f64> {\n        self.score_visitor_traversal(\n            point,\n            &VisitorInfo::use_score(ignore_mass, score_seen, score_unseen, damp, normalizer),\n        )\n    }\n\n    fn score_visitor_traversal(&self, point: &[f32], visitor_info: &VisitorInfo) -> Result<f64>;\n\n    fn attribution(&self, point: &[f32]) -> Result<DiVector> {\n        self.attribution_visitor_traversal(point, &VisitorInfo::default())\n    }\n\n    fn generic_attribution(\n        &self,\n        point: &[f32],\n        ignore_mass: usize,\n        score_seen: fn(usize, usize) -> f64,\n        score_unseen: fn(usize, usize) -> f64,\n        damp: fn(usize, usize) -> f64,\n        normalizer: fn(f64, usize) -> f64,\n    ) -> Result<DiVector> {\n        self.attribution_visitor_traversal(\n            point,\n            &VisitorInfo::use_score(ignore_mass, score_seen, score_unseen, damp, normalizer),\n        )\n    }\n\n    fn attribution_visitor_traversal(\n        &self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n    ) -> Result<DiVector>;\n\n    fn density(&self, point: &[f32]) -> Result<f64> {\n        self.interpolation_visitor_traversal(point, &VisitorInfo::density())?\n            .density()\n    }\n\n    fn directional_density(&self, point: &[f32]) -> Result<DiVector> {\n        self.interpolation_visitor_traversal(point, &VisitorInfo::density())?\n            .directional_density()\n    }\n\n    fn density_interpolant(&self, point: &[f32]) -> Result<InterpolationMeasure> {\n        self.interpolation_visitor_traversal(point, &VisitorInfo::density())\n    }\n\n    fn interpolation_visitor_traversal(\n        &self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n    ) -> Result<InterpolationMeasure>;\n\n    /// the answer format is (score, point, distance from original)\n    fn near_neighbor_list(\n        &self,\n        point: &[f32],\n        percentile: usize,\n    ) -> Result<Vec<(f64, Vec<f32>, f64)>> {\n        self.near_neighbor_traversal(point, percentile, &VisitorInfo::default())\n    }\n\n    fn near_neighbor_traversal(\n        &self,\n        point: &[f32],\n        percentile: usize,\n        visitor_info: &VisitorInfo,\n    ) -> Result<Vec<(f64, Vec<f32>, f64)>>;\n\n    fn impute_missing_values(&self, positions: &[usize], point: &[f32]) -> Result<Vec<f32>> {\n        check_argument(positions.len() > 0, \"nothing to impute\")?;\n        self.conditional_field(positions, point, 1.0, true, 0)\n            .map(|summary| summary.median)\n    }\n\n    fn extrapolate(&self, look_ahead: usize) -> Result<RangeVector<f32>>;\n\n    fn conditional_field(\n        &self,\n        positions: &[usize],\n        point: &[f32],\n        centrality: f64,\n        project: bool,\n        max_number: usize,\n    ) -> Result<SampleSummary> {\n        self.generic_conditional_field_visitor(\n            positions,\n            point,\n            centrality,\n            project,\n            max_number,\n            &VisitorInfo::default(),\n        )\n    }\n\n    fn generic_conditional_field_visitor(\n        &self,\n        positions: &[usize],\n        point: &[f32],\n        centrality: f64,\n        project: bool,\n        max_number: usize,\n        visitor_info: &VisitorInfo,\n    ) -> Result<SampleSummary>;\n\n}\n\n//pub trait LabeledRCF<Update> : AugmentedRCF<Update,u64> {}\n//impl<Update: Sync + Copy + Send, U> LabeledRCF<Update> for U where U : AugmentedRCF<Update,u64> {}\n\n\npub trait RCF : AugmentedRCF<u64,u64> + Send + Sync {}\nimpl<U> RCF for U where U : AugmentedRCF<u64,u64> + Send + Sync {}\n\n\npub struct RCFStruct<C, L, P, N, Label,Attributes>\nwhere\n    C: Location,\n    usize: From<C>,\n    L: Location,\n    usize: From<L>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    Label: Copy + Sync + Send,\n    Attributes : Copy + Sync+ Hash + Eq + Send,\n{\n    id : u64,\n    dimensions: usize,\n    capacity: usize,\n    number_of_trees: usize,\n    sampler_plus_trees: Vec<SamplerPlusTree<C, P, N>>,\n    time_decay: f64,\n    shingle_size: usize,\n    entries_seen: u64,\n    internal_shingling: bool,\n    internal_rotation: bool,\n    store_attributes: bool,\n    propagate_attributes: bool,\n    initial_accept_fraction: f64,\n    bounding_box_cache_fraction: f64,\n    parallel_enabled: bool,\n    random_seed: u64,\n    output_after: usize,\n    point_store: VectorizedPointStore<L,Label,Attributes>,\n}\n\nimpl<C, L, P, N, Label, Attributes> RCFStruct<C, L, P, N, Label, Attributes>\nwhere\n    C: Location,\n    usize: From<C>,\n    L: Location,\n    usize: From<L>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    Label: Copy + Sync + Send,\n    Attributes : Copy + Sync + Eq + Hash + Send,\n    <C as TryFrom<usize>>::Error: Debug,\n    <L as TryFrom<usize>>::Error: Debug,\n    <P as TryFrom<usize>>::Error: Debug,\n    <N as TryFrom<usize>>::Error: Debug,\n{\n    pub fn new(\n        id : u64,\n        dimensions: usize,\n        shingle_size: usize,\n        capacity: usize,\n        number_of_trees: usize,\n        random_seed: u64,\n        store_attributes: bool,\n        store_pointsum: bool,\n        propagate_attributes : bool,\n        parallel_enabled: bool,\n        internal_shingling: bool,\n        internal_rotation: bool,\n        time_decay: f64,\n        initial_accept_fraction: f64,\n        bounding_box_cache_fraction: f64,\n        output_after: usize,\n        attribute_creator : fn(&[Label], Label) -> Result<Attributes>,\n        attribute_to_vec : Option<fn(&Attributes) -> Result<Vec<f32>>>\n    ) -> Result<Self> {\n        let mut point_store_capacity= capacity * number_of_trees + 1;\n        if point_store_capacity < 2 * capacity {\n            point_store_capacity = 2 * capacity;\n        }\n        let initial_capacity = 2 * capacity;\n        check_argument(shingle_size==1 || dimensions % shingle_size == 0, \"Shingle size must divide dimensions\")?;\n        check_argument(!internal_rotation || internal_shingling,\n            \" internal shingling required for rotations\")?;\n        let mut rng = ChaCha20Rng::seed_from_u64(random_seed);\n        let _new_random_seed = rng.next_u64();\n        let mut models: Vec<SamplerPlusTree<C, P, N>> = Vec::new();\n        let using_transforms = internal_rotation; // other conditions may be added eventually\n        for _i in 0..number_of_trees {\n            models.push(SamplerPlusTree::<C, P, N>::new(\n                dimensions,\n                capacity,\n                using_transforms,\n                rng.next_u64(),\n                store_attributes,\n                store_pointsum,\n                propagate_attributes,\n                time_decay,\n                initial_accept_fraction,\n                bounding_box_cache_fraction,\n            )?);\n        }\n        Ok(RCFStruct {\n            id,\n            random_seed,\n            dimensions,\n            capacity,\n            sampler_plus_trees: models,\n            number_of_trees,\n            store_attributes,\n            shingle_size,\n            entries_seen: 0,\n            time_decay,\n            initial_accept_fraction,\n            bounding_box_cache_fraction,\n            parallel_enabled,\n            point_store: VectorizedPointStore::<L,Label,Attributes>::new(\n                dimensions.into(),\n                shingle_size.into(),\n                point_store_capacity,\n                initial_capacity,\n                internal_shingling,\n                internal_rotation,\n                store_attributes,\n                propagate_attributes,\n                attribute_creator,\n                attribute_to_vec\n            )?,\n            internal_shingling,\n            internal_rotation,\n            output_after,\n            propagate_attributes\n        })\n    }\n\n    pub fn generic_conditional_field_point_list_and_distances(\n        &self,\n        positions: &[usize],\n        point: &[f32],\n        centrality: f64,\n        visitor_info: &VisitorInfo,\n    ) -> Result<Vec<(f64, usize, f64)>> {\n        let new_point = self.point_store.shingled_point(point)?;\n        let mut list: Vec<(f64, usize, f64)> = if self.parallel_enabled {\n            self.sampler_plus_trees\n                .par_iter()\n                .map(|m| {\n                    m.conditional_field(\n                        &positions,\n                        centrality,\n                        &new_point,\n                        &self.point_store,\n                        visitor_info,\n                    )\n                })\n                .collect::<Result<Vec<(f64, usize, f64)>>>()?\n        } else {\n            self.sampler_plus_trees\n                .iter()\n                .map(|m| {\n                    m.conditional_field(\n                        &positions,\n                        centrality,\n                        &new_point,\n                        &self.point_store,\n                        visitor_info,\n                    )\n                })\n                .collect::<Result<Vec<(f64, usize, f64)>>>()?\n        };\n        list.sort_by(|&o1, &o2| o1.2.partial_cmp(&o2.2).expect(\"should be total order\"));\n        Ok(list)\n    }\n\n    pub fn simple_traversal<NodeView, V, R, S>(\n        &self,\n        point: &[f32],\n        parameters: &[usize],\n        visitor_info: &VisitorInfo,\n        visitor_factory: fn(usize, &[usize], &VisitorInfo) -> V,\n        default: &R,\n        initial: &S,\n        collect_to: fn(&R, &mut S),\n        finish: fn(&mut S, usize),\n    ) -> Result<S>\n    where\n        NodeView: UpdatableNodeView<Label,Attributes>,\n        V: Visitor<NodeView, R>,\n        R: Clone + Send + Sync,\n        S: Clone,\n    {\n        check_argument(\n            point.len() == self.dimensions || point.len() * self.shingle_size == self.dimensions,\n            \"invalid input length\",\n        )?;\n\n        let mut answer = initial.clone();\n        let new_point = self.point_store.shingled_point(point)?;\n\n        let list: Vec<R> = if self.parallel_enabled {\n             self.sampler_plus_trees\n                .par_iter()\n                .map(|m| {\n                    //m.generic_visitor_traversal(\n                    m.simple_traversal(\n                        &new_point,\n                        &self.point_store,\n                        parameters,\n                        &visitor_info,\n                        visitor_factory,\n                        default,\n                    )\n                })\n                .collect::<Result<Vec<R>>>()?\n        } else {\n            self.sampler_plus_trees\n                .iter()\n                .map(|m| {\n                    m.simple_traversal(\n                        &new_point,\n                        &self.point_store,\n                        parameters,\n                        &visitor_info,\n                        visitor_factory,\n                        default,\n                    )\n                }).collect::<Result<Vec<R>>>()?\n        };\n        list.iter().for_each(|m| (collect_to)(m, &mut answer));\n        (finish)(&mut answer, self.sampler_plus_trees.len());\n        Ok(answer)\n    }\n}\n\n#[deprecated]\npub fn create_rcf(\n    dimensions: usize,\n    shingle_size: usize,\n    capacity: usize,\n    number_of_trees: usize,\n    random_seed: u64,\n    store_attributes: bool,\n    parallel_enabled: bool,\n    internal_shingling: bool,\n    internal_rotation: bool,\n    time_decay: f64,\n    initial_accept_fraction: f64,\n    bounding_box_cache_fraction: f64\n) -> Box<dyn RCF + Sync + Send> {\n    RCFBuilder::<u64,u64>::new(dimensions/shingle_size,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction)\n        .build_default().unwrap() //unwrap kept for deprecated function\n}\n\nimpl<C, L, P, N,Label,Attributes> AugmentedRCF<Label,Attributes> for RCFStruct<C, L, P, N,Label, Attributes>\nwhere\n    C: Location,\n    usize: From<C>,\n    L: Location,\n    usize: From<L>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    Label: Copy + Sync + Send,\n    Attributes : Copy + Sync+ Hash + Eq + Send,\n    <C as TryFrom<usize>>::Error: Debug,\n    <L as TryFrom<usize>>::Error: Debug,\n    <P as TryFrom<usize>>::Error: Debug,\n    <N as TryFrom<usize>>::Error: Debug,\n{\n    fn shingled_point(&self,point:&[f32]) -> Result<Vec<f32>> {\n        self.point_store.shingled_point(point)\n    }\n\n    fn id(&self) -> u64 {\n        self.id\n    }\n\n    fn update(&mut self, point: &[f32], label : Label) -> Result<()> {\n        let (point_index,point_attribute,vector) = self.point_store.add(&point,label)?;\n        if point_index != usize::MAX {\n            let result: Vec<((usize, usize),(usize,usize))> = if self.parallel_enabled {\n                self.sampler_plus_trees\n                    .par_iter_mut()\n                    .map(|m| m.update(point_index, point_attribute, &self.point_store))\n                    .collect::<Result<Vec<((usize, usize), (usize, usize))>>>()?\n            } else {\n                    self.sampler_plus_trees\n                        .iter_mut()\n                        .map(|m| m.update(point_index, point_attribute, &self.point_store))\n                        .collect::<Result<Vec<((usize,usize),(usize,usize))>>>()?\n            };\n            self.point_store.adjust_count(&result)?;\n            self.point_store.dec(point_index,point_attribute)?;\n            self.entries_seen += 1;\n        }\n        Ok(())\n    }\n\n    fn dimensions(&self) -> usize {\n        self.dimensions\n    }\n\n    fn shingle_size(&self) -> usize {\n        self.shingle_size\n    }\n\n    fn is_internal_shingling_enabled(&self) -> bool {\n        self.internal_shingling\n    }\n\n    fn is_output_ready(&self) -> bool {\n        ((self.output_after + if self.is_internal_shingling_enabled() {self.shingle_size - 1} else {0}) as u64) < self.entries_seen\n    }\n\n    fn entries_seen(&self) -> u64 {\n        self.entries_seen\n    }\n\n    fn score_visitor_traversal(&self, point: &[f32], visitor_info: &VisitorInfo) -> Result<f64> {\n        // parameter unused for score traversal\n        if self.output_after > self.entries_seen as usize {\n            return Ok(0.0);\n        }\n        self.simple_traversal(\n            point,\n            &Vec::new(),\n            visitor_info,\n            ScalarScoreVisitor::default,\n            &0.0,\n            &0.0,\n            add_to,\n            divide,\n        )\n    }\n\n    fn attribution_visitor_traversal(\n        &self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n    ) -> Result<DiVector> {\n        if self.output_after > self.entries_seen as usize {\n            return Ok(DiVector::empty(self.dimensions));\n        }\n        // tells the visitor what dimension to expect for each tree\n        let parameters = &vec![self.dimensions];\n        self.simple_traversal(\n            point,\n            parameters,\n            visitor_info,\n            AttributionVisitor::create_visitor,\n            &DiVector::empty(self.dimensions),\n            &DiVector::empty(self.dimensions),\n            DiVector::add_to,\n            DiVector::divide,\n        )\n    }\n\n    fn interpolation_visitor_traversal(\n        &self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n    ) -> Result<InterpolationMeasure> {\n        // tells the visitor what dimension to expect for each tree\n        let parameters = &vec![self.dimensions];\n        self.simple_traversal(\n            point,\n            parameters,\n            visitor_info,\n            InterpolationVisitor::create_visitor,\n            &InterpolationMeasure::empty(self.dimensions, 0.0),\n            &InterpolationMeasure::empty(self.dimensions, 0.0),\n            InterpolationMeasure::add_to,\n            InterpolationMeasure::divide,\n        )\n    }\n\n    fn near_neighbor_traversal(\n        &self,\n        point: &[f32],\n        percentile: usize,\n        visitor_info: &VisitorInfo,\n    ) -> Result<Vec<(f64, Vec<f32>, f64)>> {\n        let x = (0.0f64, usize::MAX, f64::MAX);\n        let parameters = &vec![percentile];\n        let list = self.simple_traversal(\n            point,\n            parameters,\n            visitor_info,\n            ImputeVisitor::create_nbr_visitor,\n            &x,\n            &Vec::new(),\n            add_nbr,\n            nbr_finish,\n        )?;\n        let mut answer = Vec::new();\n        for e in list.iter() {\n            answer.push((e.0, self.point_store.copy(e.1)?, e.2));\n        }\n        Ok(answer)\n    }\n\n    fn generic_conditional_field_visitor(\n        &self,\n        positions: &[usize],\n        point: &[f32],\n        centrality: f64,\n        project: bool,\n        max_number: usize,\n        visitor_info: &VisitorInfo,\n    ) -> Result<SampleSummary> {\n        check_argument(\n            point.len() == self.dimensions || point.len() * self.shingle_size == self.dimensions,\n            \"invalid input length\",\n        )?;\n        let new_positions = if point.len() == self.dimensions {\n            Vec::from(positions)\n        } else {\n            // internal shingling\n            self.point_store.missing_indices(0, positions)?\n        };\n\n        let raw_list = self.generic_conditional_field_point_list_and_distances(\n            &new_positions,\n            point,\n            centrality,\n            visitor_info,\n        )?;\n        let field_summarizer = FieldSummarizer::new(centrality, project, max_number, l1distance);\n        field_summarizer.summarize_list(&self.point_store, &raw_list, &new_positions)\n    }\n\n    fn extrapolate(&self, look_ahead: usize) -> Result<RangeVector<f32>> {\n        check_argument(\n            self.internal_shingling,\n            \"look ahead is not meaningful without internal shingling mechanism\",\n        )?;\n        check_argument(\n            self.shingle_size > 1,\n            \"need shingle size > 1 for extrapolation\",\n        )?;\n        let mut values = Vec::new();\n        let mut upper = Vec::new();\n        let mut lower = Vec::new();\n        let base = self.dimensions / self.shingle_size;\n        let mut fictitious_point = self.point_store.shingled_point(&vec![0.0f32; base])?;\n        for i in 0..look_ahead {\n            let missing = self.point_store.next_indices(i)?;\n            check_argument(missing.len() == base, \"incorrect imputation\")?;\n            let iterate = self.conditional_field(&missing, &fictitious_point, 1.0, true, 0)?;\n            for j in 0..base {\n                values.push(iterate.median[j]);\n                lower.push(iterate.lower[j]);\n                upper.push(iterate.upper[j]);\n                fictitious_point[missing[j]] = values[j];\n            }\n        }\n        RangeVector::create(&values,&upper,&lower)\n    }\n\n    fn size(&self) -> usize {\n        let mut sum: usize = 0;\n        for model in &self.sampler_plus_trees {\n            sum += model.get_size();\n        }\n        sum + self.point_store.size() + std::mem::size_of::<RCFStruct<C, L, P, N,Label,Attributes>>()\n    }\n\n    fn point_store_size(&self) -> usize {\n        self.point_store.size()\n    }\n}\n\n\n\npub type RCFTiny<Update,Operate> = RCFStruct<u8, u16, u16, u8,Update,Operate>; // sampleSize <= 256 for these and shingleSize * { max { base_dimensions, (number_of_trees + 1) } <= 256\npub type RCFSmall<Update,Operate> = RCFStruct<u8, usize, u16, u8,Update,Operate>; // sampleSize <= 256 and (number_of_trees + 1) <= 256 and dimensions = shingle_size*base_dimensions <= 256\npub type RCFMedium<Update,Operate> = RCFStruct<u16, usize, usize, u16,Update,Operate>; // sampleSize, dimensions <= u16::MAX\npub type RCFLarge<Update,Operate> = RCFStruct<usize, usize, usize, usize,Update,Operate>; // as large as the machine would allow\n\npub fn copy_label_as_attribute<Label>(_x: &[Label],y:Label) -> Result<Label> {\n    Ok(y)\n}\n\npub struct RCFBuilder<Label : Send + Sync + Copy + 'static, Attributes : Send + Sync + Copy + Eq + Hash + 'static> {\n    input_dimensions: usize,\n    shingle_size: usize,\n    pub(crate) rcf_options : RCFOptions<Label,Attributes>,\n}\n\n\nimpl<Label : Send + Sync + Copy + 'static, Attributes : Send + Sync + Copy + Eq + Hash + 'static> RCFBuilder<Label,Attributes> {\n    pub fn new(input_dimensions: usize, shingle_size: usize) -> Self {\n        RCFBuilder {input_dimensions, shingle_size, rcf_options: RCFOptions::default()}\n    }\n\n    pub fn validate(&self) -> Result<()> {\n        check_argument( self.input_dimensions > 0, \"input_dimensions cannot be 0\")?;\n        check_argument( self.shingle_size > 0, \"shingle size cannot be 0\")?;\n        self.rcf_options.validate()?;\n        Ok(())\n    }\n\n    // coresion reasons\n    pub fn build_default(&self) -> Result<Box<dyn RCF + Sync + Send>> {\n        check_argument(self.rcf_options.attribute_to_vec.is_none(), \"remove function options for default\")?;\n        check_argument(self.rcf_options.attribute_creator.is_none(), \"remove function options fro default\")?;\n\n        let x =self.build_tiny_simple::<u64>();\n        if x.is_ok() {\n            Ok(Box::new(x?))\n        } else {\n            let y = self.build_small_simple::<u64>();\n            if y.is_ok() {\n                Ok(Box::new(y?))\n            } else {\n                let z = self.build_medium_simple::<u64>();\n                if z.is_ok() {\n                    Ok(Box::new(z?))\n                } else {\n                    Ok(Box::new(self.build_large_simple::<u64>()?))\n                }\n            }\n        }\n    }\n\n    pub fn build_to_u64<Update: Send + Sync + Copy + 'static>(\n        &self,\n        attribute_creator : fn(&[Update],Update) -> Result<u64>\n    ) -> Result<Box<dyn AugmentedRCF<Update,u64> + Sync + Send>> {\n        let x =self.build_tiny::<Update,u64>(attribute_creator,None);\n        if x.is_ok() {\n            Ok(Box::new(x?))\n        } else {\n            let y = self.build_small::<Update,u64>(attribute_creator,None);\n            if y.is_ok() {\n                Ok(Box::new(y?))\n            } else {\n                let z = self.build_medium::<Update,u64>(attribute_creator, None);\n                if z.is_ok() {\n                    Ok(Box::new(z?))\n                } else {\n                    Ok(Box::new(self.build_large::<Update,u64>(attribute_creator,None)?))\n                }\n            }\n        }\n    }\n\n    pub fn build(&self) -> Result<Box<dyn AugmentedRCF<Label,Attributes> + Sync + Send + 'static>> {\n        check_argument(!self.rcf_options.store_attributes || self.rcf_options.attribute_creator.is_some(),\n                       \"need an attribute_creator function to create the attributes\")?;\n        let attribute_creator = self.rcf_options.attribute_creator.unwrap_or( |x,y |\n            {Err(RCFError::InvalidArgument {msg : \"function not provided, should not be invoked\"})});\n        let x =self.build_tiny::<Label,Attributes>(attribute_creator,self.rcf_options.attribute_to_vec);\n        if x.is_ok() {\n            Ok(Box::new(x?))\n        } else {\n            let y = self.build_small::<Label,Attributes>(attribute_creator,self.rcf_options.attribute_to_vec);\n            if y.is_ok() {\n                Ok(Box::new(y?))\n            } else {\n                let z = self.build_medium::<Label,Attributes>(attribute_creator, self.rcf_options.attribute_to_vec);\n                if z.is_ok() {\n                    Ok(Box::new(z?))\n                } else {\n                    Ok(Box::new(self.build_large::<Label,Attributes>(attribute_creator,self.rcf_options.attribute_to_vec)?))\n                }\n            }\n        }\n    }\n\n    pub fn build_tiny<Update: Send + Sync + Copy, Operate:  Send + Sync + Copy + Eq + Hash>(\n        &self,\n        attribute_creator : fn(&[Update],Update) -> Result<Operate>,\n        attribute_to_vec: Option<fn(&Operate) -> Result<Vec<f32>>>\n    ) -> Result<RCFTiny<Update,Operate>> {\n        self.validate()?;\n        let dimensions = self.input_dimensions * self.shingle_size;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let random_seed = self.rcf_options.random_seed.unwrap_or( ChaCha20Rng::from_entropy().gen::<u64>());\n        check_argument(dimensions < (u8::MAX as usize) && (self.rcf_options.capacity - 1 <= u8::MAX as usize),\"incorrect parameters\")?;\n        check_argument(self.rcf_options.capacity * (1 + self.rcf_options.number_of_trees) * self.shingle_size <= u16::MAX as usize, \" incorrect parameters\")?;\n        Ok(RCFTiny::<Update,Operate>::new(\n            self.rcf_options.id,\n            dimensions,\n            self.shingle_size,\n            self.rcf_options.capacity,\n            self.rcf_options.number_of_trees,\n            random_seed,\n            self.rcf_options.store_attributes,\n            self.rcf_options.store_pointsum,\n            self.rcf_options.propagate_attributes,\n            self.rcf_options.parallel_enabled,\n            self.rcf_options.internal_shingling,\n            self.rcf_options.internal_rotation,\n            time_decay,\n            self.rcf_options.initial_accept_fraction,\n            self.rcf_options.bounding_box_cache_fraction,\n            output_after,\n            attribute_creator,\n            attribute_to_vec\n        )?)\n    }\n\n    pub fn build_tiny_simple<Operate: Send + Sync + Copy + Eq + Hash>(&self) -> Result<RCFTiny<Operate,Operate>> {\n        self.build_tiny(copy_label_as_attribute::<Operate>,None)\n    }\n\n    pub fn build_small<Update: Send + Sync + Copy, Operate:  Send + Sync + Copy + Eq + Hash>(\n        &self,\n        attribute_creator : fn(&[Update],Update) -> Result<Operate>,\n        attribute_to_vec: Option<fn(&Operate) -> Result<Vec<f32>>>\n    ) -> Result<RCFSmall<Update,Operate>> {\n        self.validate()?;\n        let dimensions = self.input_dimensions * self.shingle_size;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1 / self.rcf_options.capacity as f64);\n        let random_seed = self.rcf_options.random_seed.unwrap_or(ChaCha20Rng::from_entropy().gen::<u64>());\n        check_argument(dimensions < (u8::MAX as usize) && (self.rcf_options.capacity - 1 <= u8::MAX as usize), \"incorrect parameters\")?;\n        Ok(RCFSmall::<Update,Operate>::new(\n            self.rcf_options.id,\n            dimensions,\n            self.shingle_size,\n            self.rcf_options.capacity,\n            self.rcf_options.number_of_trees,\n            random_seed,\n            self.rcf_options.store_attributes,\n            self.rcf_options.store_pointsum,\n            self.rcf_options.propagate_attributes,\n            self.rcf_options.parallel_enabled,\n            self.rcf_options.internal_shingling,\n            self.rcf_options.internal_rotation,\n            time_decay,\n            self.rcf_options.initial_accept_fraction,\n            self.rcf_options.bounding_box_cache_fraction,\n            output_after,\n            attribute_creator,\n            attribute_to_vec\n        )?)\n    }\n\n    pub fn build_small_simple<Operate: Send + Sync + Copy + Eq + Hash>(&self) -> Result<RCFSmall<Operate,Operate>> {\n        self.build_small(copy_label_as_attribute::<Operate>,None)\n    }\n\n    pub fn build_medium<Update: Send + Sync + Copy, Operate:  Send + Sync + Copy + Eq + Hash>(\n        &self,\n        attribute_creator : fn(&[Update],Update) -> Result<Operate>,\n        attribute_to_vec: Option<fn(&Operate) -> Result<Vec<f32>>>\n    ) -> Result<RCFMedium<Update,Operate>> {\n        self.validate()?;\n        let dimensions = self.input_dimensions * self.shingle_size;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1 / self.rcf_options.capacity as f64);\n        let random_seed = self.rcf_options.random_seed.unwrap_or(ChaCha20Rng::from_entropy().gen::<u64>());\n        check_argument((dimensions < u16::MAX as usize) && (self.rcf_options.capacity - 1 <= u16::MAX as usize), \" incorrect parameters\")?;\n        Ok(RCFMedium::<Update,Operate>::new(\n            self.rcf_options.id,\n            dimensions,\n            self.shingle_size,\n            self.rcf_options.capacity,\n            self.rcf_options.number_of_trees,\n            random_seed,\n            self.rcf_options.store_attributes,\n            self.rcf_options.store_pointsum,\n            self.rcf_options.propagate_attributes,\n            self.rcf_options.parallel_enabled,\n            self.rcf_options.internal_shingling,\n            self.rcf_options.internal_rotation,\n            time_decay,\n            self.rcf_options.initial_accept_fraction,\n            self.rcf_options.bounding_box_cache_fraction,\n            output_after,\n            attribute_creator,\n            attribute_to_vec\n        )?)\n    }\n\n    pub fn build_medium_simple<Operate: Send + Sync + Copy + Eq + Hash>(&self) -> Result<RCFMedium<Operate,Operate>> {\n        self.build_medium(copy_label_as_attribute::<Operate>,None)\n    }\n\n    pub fn build_large<Update: Send + Sync + Copy, Operate:  Send + Sync + Copy + Eq + Hash>(\n        &self,\n        attribute_creator : fn(&[Update],Update) -> Result<Operate>,\n        attribute_to_vec: Option<fn(&Operate) -> Result<Vec<f32>>>\n    ) -> Result<RCFLarge<Update,Operate>> {\n        self.validate()?;\n        let dimensions = self.input_dimensions * self.shingle_size;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let random_seed = self.rcf_options.random_seed.unwrap_or( ChaCha20Rng::from_entropy().gen::<u64>());\n        Ok(RCFLarge::<Update,Operate>::new(\n            self.rcf_options.id,\n            dimensions,\n            self.shingle_size,\n            self.rcf_options.capacity,\n            self.rcf_options.number_of_trees,\n            random_seed,\n            self.rcf_options.store_attributes,\n            self.rcf_options.store_pointsum,\n            self.rcf_options.propagate_attributes,\n            self.rcf_options.parallel_enabled,\n            self.rcf_options.internal_shingling,\n            self.rcf_options.internal_rotation,\n            time_decay,\n            self.rcf_options.initial_accept_fraction,\n            self.rcf_options.bounding_box_cache_fraction,\n            output_after,\n            attribute_creator,\n            attribute_to_vec\n        )?)\n    }\n\n    pub fn build_large_simple<Operate: Send + Sync + Copy + Eq + Hash>(&self) -> Result<RCFLarge<Operate,Operate>> {\n        self.build_large(copy_label_as_attribute::<Operate>,None)\n    }\n\n}\n\npub struct RCFOptions<Label,Attributes> {\n    pub(crate) id: u64,\n    pub(crate) capacity: usize,\n    pub(crate) number_of_trees: usize,\n    pub(crate) time_decay: Option<f64>,\n    pub(crate) internal_shingling: bool,\n    pub(crate) internal_rotation: bool,\n    pub(crate) store_labels: bool,\n    pub(crate) store_attributes: bool,\n    pub(crate) propagate_attributes: bool,\n    pub(crate) store_pointsum: bool,\n    pub(crate) initial_accept_fraction: f64,\n    pub(crate) bounding_box_cache_fraction: f64,\n    pub(crate) parallel_enabled: bool,\n    pub(crate) random_seed: Option<u64>,\n    pub(crate) output_after: Option<usize>,\n    pub(crate) attribute_creator: Option<fn(&[Label],Label) -> Result<Attributes>>,\n    pub(crate) attribute_to_vec: Option<fn(&Attributes) -> Result<Vec<f32>>>\n}\n\nimpl<Label : Send + Sync + Copy, Attributes : Send + Sync + Copy + Eq + Hash> RCFOptions<Label,Attributes> {\n    pub fn validate(&self) -> Result<()> {\n        check_argument(self.capacity > 0, \"capacity cannot be 0\")?;\n        check_argument( self.number_of_trees > 0, \"number of trees cannot be 0\")?;\n        check_argument(self.time_decay.unwrap_or(0.0)>=0.0, \"time decay cannot be negative\")?;\n        check_argument(self.bounding_box_cache_fraction >=0.0\n                           && self.bounding_box_cache_fraction <=1.0,\n                       \"bounding box cache fraction is in [0,1]\")?;\n        check_argument(!self.propagate_attributes || self.store_attributes,\n        \"need to store attributes to propagate them\")?;\n        check_argument(self.initial_accept_fraction > 0.0 && self.initial_accept_fraction <= 1.0,\n                       \"initial accept fraction has to be in (0,1]\")?;\n        Ok(())\n    }\n}\n\nimpl<Label : Send + Sync + Copy, Attributes : Send + Sync + Copy + Eq + Hash> Default for RCFOptions<Label,Attributes> {\n    fn default() -> Self {\n        RCFOptions{\n            id : u64::MAX, // a default tag that this was not set\n            capacity: 256,\n            number_of_trees: 50,\n            time_decay: None,\n            internal_shingling: true,\n            internal_rotation: false,\n            store_labels: false,\n            store_attributes: false,\n            propagate_attributes: false,\n            initial_accept_fraction: 0.125,\n            bounding_box_cache_fraction: 1.0,\n            parallel_enabled: false,\n            store_pointsum : false,\n            random_seed: None,\n            output_after: None,\n            attribute_creator: Option::<fn( &[Label], Label) -> Result<Attributes>>::None,\n            attribute_to_vec: Option::<fn( &Attributes) -> Result<Vec<f32>>>::None\n        }\n    }\n}\n\npub trait RCFOptionsBuilder<Label : Send + Sync + Copy, Attributes : Send + Sync + Copy + Eq + Hash> {\n    fn get_rcf_options(&mut self) -> &mut RCFOptions<Label,Attributes>;\n\n    fn id(&mut self,id:u64) -> &mut Self{\n        self.get_rcf_options().id = id;\n        self\n    }\n    fn parallel_enabled(&mut self,parallel_enabled: bool) -> &mut Self {\n        self.get_rcf_options().parallel_enabled = parallel_enabled;\n        self\n    }\n    fn output_after(&mut self,output_after: usize) -> &mut Self {\n        self.get_rcf_options().output_after = Some(output_after);\n        self\n    }\n    fn random_seed(&mut self,random_seed: u64) -> &mut Self {\n        self.get_rcf_options().random_seed = Some(random_seed);\n        self\n    }\n    fn internal_rotation(&mut self, internal_rotation: bool) -> &mut Self {\n        self.get_rcf_options().internal_rotation = internal_rotation;\n        self\n    }\n    fn internal_shingling(&mut self, internal_shingling: bool) -> &mut Self {\n        self.get_rcf_options().internal_shingling = internal_shingling;\n        self\n    }\n    fn propagate_attribute_vectors(&mut self, propagarate_attribute_vectors : bool) -> &mut Self {\n        self.get_rcf_options().propagate_attributes = propagarate_attribute_vectors;\n        self\n    }\n    fn store_pointsum(&mut self, store_pointsum : bool) -> &mut Self {\n        self.get_rcf_options().store_pointsum = store_pointsum;\n        self\n    }\n    fn store_attributes(&mut self, store_attributes : bool) -> &mut Self {\n        self.get_rcf_options().store_attributes = store_attributes;\n        self\n    }\n    fn initial_accept_fraction(&mut self, initial_accept_fraction : f64) -> &mut Self {\n        self.get_rcf_options().initial_accept_fraction = initial_accept_fraction;\n        self\n    }\n    fn bounding_box_cache_fraction(&mut self, bounding_box_cache_fraction : f64) -> &mut Self {\n        self.get_rcf_options().bounding_box_cache_fraction = bounding_box_cache_fraction;\n        self\n    }\n    fn tree_capacity(&mut self, capacity: usize) -> &mut Self {\n        self.get_rcf_options().capacity = capacity;\n        self\n    }\n\n    fn number_of_trees(&mut self, number_of_trees : usize) -> &mut Self {\n        self.get_rcf_options().number_of_trees = number_of_trees;\n        self\n    }\n    fn time_decay(&mut self, time_decay : f64) -> &mut Self{\n        self.get_rcf_options().time_decay = Some(time_decay);\n        self\n    }\n    fn attribute_creator(&mut self, function: fn( &[Label],Label) -> Result<Attributes>) -> &mut Self{\n        self.get_rcf_options().attribute_creator = Some(function);\n        self\n    }\n    fn attribute_to_vec(&mut self, function: fn( _attribute :&Attributes) -> Result<Vec<f32>>) -> &mut Self{\n        self.get_rcf_options().attribute_to_vec = Some(function);\n        self\n    }\n}\n\nimpl<Label : Send + Sync + Copy, Attributes : Send + Sync + Copy + Eq + Hash> RCFOptionsBuilder<Label,Attributes> for RCFBuilder<Label,Attributes> {\n    fn get_rcf_options(&mut self) -> &mut RCFOptions<Label,Attributes> {\n        &mut self.rcf_options\n    }\n}\n"
  },
  {
    "path": "Rust/src/samplerplustree/boundingbox.rs",
    "content": "use crate::util::check_argument;\nuse crate::types::Result;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct BoundingBox {\n    range_sum: f64,\n    min_values: Vec<f32>,\n    max_values: Vec<f32>,\n}\n\nimpl BoundingBox {\n    pub fn new(first_values: &[f32], second_values: &[f32]) -> Result<Self> {\n        check_argument(first_values.len() == second_values.len(), \" mismatched lengths\")?;\n        let minv: Vec<f32> = first_values\n            .iter()\n            .zip(second_values)\n            .map(|(x, y)| if *x < *y { *x } else { *y })\n            .collect();\n        let maxv: Vec<f32> = first_values\n            .iter()\n            .zip(second_values)\n            .map(|(x, y)| if *x > *y { *x } else { *y })\n            .collect();\n\n        let sum = minv.iter().zip(&maxv).map(|(x, y)| (y - x) as f64).sum();\n        Ok(BoundingBox {\n            min_values: minv,\n            max_values: maxv,\n            range_sum: sum,\n        })\n    }\n\n    pub fn check_contains_and_add_point(&mut self, values: &[f32]) -> bool {\n        self.add_two_arrays(values, values)\n    }\n\n    pub fn add_box(&mut self, x: &BoundingBox) {\n        self.add_two_arrays(x.get_min_values(), x.get_max_values());\n    }\n\n    fn add_two_arrays(&mut self, minvalues: &[f32], maxvalues: &[f32]) -> bool {\n        let old_sum = self.range_sum;\n\n        for (x, y) in self.min_values.iter_mut().zip(minvalues) {\n            *x = if *x < *y { *x } else { *y };\n        }\n        for (x, y) in self.max_values.iter_mut().zip(maxvalues) {\n            *x = if *x < *y { *y } else { *x };\n        }\n\n        self.range_sum = self\n            .min_values\n            .iter()\n            .zip(self.get_max_values())\n            .map(|(x, y)| (y - x) as f64)\n            .sum();\n\n        old_sum == self.range_sum\n    }\n\n    pub fn get_range_sum(&self) -> f64 {\n        self.range_sum\n    }\n\n    pub fn get_min_values(&self) -> &[f32] {\n        &self.min_values\n    }\n\n    pub fn get_max_values(&self) -> &[f32] {\n        &self.max_values\n    }\n\n    pub fn probability_of_cut(&self, point: &[f32]) -> f64 {\n        let minsum: f32 = self\n            .min_values\n            .iter()\n            .zip(point)\n            .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let maxsum: f32 = point\n            .iter()\n            .zip(self.get_max_values())\n            .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let sum = maxsum + minsum;\n\n        if sum == 0.0 {\n            return 0.0;\n        } else if self.range_sum == 0.0 {\n            return 1.0;\n        }\n        (sum as f64) / (self.range_sum + sum as f64)\n    }\n\n    pub fn probability_of_cut_with_missing_coordinates(\n        &self,\n        point: &[f32],\n        missing_coordinates: &[bool],\n    ) -> f64 {\n        let minsum: f32 = self\n            .min_values\n            .iter()\n            .zip(point)\n            .zip(missing_coordinates)\n            .map(|((&x, &y), &z)| if !z && x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let maxsum: f32 = point\n            .iter()\n            .zip(self.get_max_values())\n            .zip(missing_coordinates)\n            .map(|((&x, &y), &z)| if !z && x - y > 0.0 { x - y } else { 0.0 })\n            .sum();\n        let sum = maxsum + minsum;\n\n        if sum == 0.0 {\n            return 0.0;\n        } else if self.range_sum == 0.0 {\n            return 1.0;\n        }\n        (sum as f64) / (self.range_sum + sum as f64)\n    }\n}\n"
  },
  {
    "path": "Rust/src/samplerplustree/cut.rs",
    "content": "use rand::Rng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::SeedableRng;\nuse crate::samplerplustree::boundingbox::BoundingBox;\nuse crate::util::{maxf32, minf32};\n\n/**\n*  this is a class that helps manage the cut information; the nodes do not store information in\n*  this format\n*/\n\npub struct Cut {\n    pub dimension: usize,\n    pub value: f32,\n}\n\nimpl Cut {\n    pub fn new(dimension: usize, value: f32) -> Self {\n        Cut { dimension, value }\n    }\n\n    // factor should be in [0,1) in the follwoing; but we would not rule that\n    // out so that use out-of-range values to test the function\n    // the only invariant we must satisfy is that:\n    // if the range_sum of the input bounding box is 0 (single point) and\n    // the point is not equal to the point defining the box then the cut must\n    // be nontrivial\n    pub fn random_cut_and_separation(\n        bounding_box: &BoundingBox,\n        factor: f64,\n        point: &[f32],\n    ) -> (Cut, bool) {\n        let min_values = bounding_box.get_min_values();\n        let max_values = bounding_box.get_max_values();\n        let mut first_gap = point.len();\n        let mut last_gap = 0;\n        let mut range: f64 = min_values\n            .iter()\n            .zip(max_values)\n            .zip(point)\n            .map(|((x, y), z)| {\n                if z < x {\n                    (x - z) as f64\n                } else if y < z {\n                    (z - y) as f64\n                } else {\n                    0.0\n                }\n            })\n            .sum();\n        if range == 0.0 {\n            return (Cut::new(usize::MAX, 0.0), false);\n        }\n        range += bounding_box.get_range_sum();\n        range *= factor;\n\n        let mut dim: usize = 0;\n        let mut new_cut: f32 = f32::MAX;\n\n        while dim < point.len() {\n            let minv = minf32(min_values[dim], point[dim]);\n            let maxv = maxf32(max_values[dim], point[dim]);\n\n            let gap: f32 = maxv - minv;\n\n            if gap > 0.0 {\n                last_gap = dim;\n                if first_gap == point.len() {\n                    first_gap = dim; // will not change subsequently\n                }\n                let new_range = range - gap as f64;\n                if new_range <= 0.0 {\n                    new_cut = minv + range as f32; // precision lost here\n                    if new_cut <= minv || new_cut >= maxv {\n                        new_cut = minv;\n                    }\n                    break;\n                    // this implies that gap > 0; which means that there will be no issues\n                    // because either min == max both not equal to the point\n                    // or rangesum of the original box is not 0.0\n                }\n                range = new_range;\n            }\n            dim += 1;\n        }\n\n        if dim != point.len() {\n            let minvalue = min_values[dim];\n            let maxvalue = max_values[dim];\n\n            let separation: bool = ((point[dim] <= new_cut) && (new_cut < minvalue))\n                || ((maxvalue <= new_cut) && (new_cut < point[dim]));\n\n            if bounding_box.get_range_sum() != 0.0 || separation {\n                return (Cut::new(dim.try_into().unwrap(), new_cut), separation);\n            };\n        };\n\n        let mut rng = ChaCha20Rng::seed_from_u64(17);//ChaCha20Rng::from_entropy();\n        let index = if rng.gen::<f32>() < 0.5 { first_gap } else { last_gap };\n\n        let new_cut = minf32(min_values[index], point[index]);\n        let separation: bool = ((point[index] == new_cut) && (new_cut < min_values[index]))\n            || ((min_values[index] == new_cut) && (new_cut < point[index]));\n        // note it is possible that range is positive due to max_value[index] == point[index]\n        // not being the same as min_value[index]; but that is not a problematic scenario\n        return (Cut::new(index.try_into().unwrap(), new_cut), separation);\n    }\n}\n\n#[cfg(test)]\nmod tests {\n    use crate::samplerplustree::boundingbox::BoundingBox;\n    use crate::samplerplustree::cut::Cut;\n\n    #[test]\n    fn test_floating_point() {\n        let vec1 = vec![0.0001f32, 0.00001f32];\n        let vec2 = vec![0.0001f32, 0.000011f32];\n\n        // using unwrap in test\n        let b_box= BoundingBox::new(&vec1,&vec1).unwrap();\n        Cut::random_cut_and_separation(&b_box,2.0,&vec2); // exagegration\n        Cut::random_cut_and_separation(&b_box,-2.0,&vec2); // exaggeration\n        Cut::random_cut_and_separation(&b_box,1.0,&vec2); // should not happen\n        Cut::random_cut_and_separation(&b_box,0.0,&vec2); // can happen\n    }\n}"
  },
  {
    "path": "Rust/src/samplerplustree/mod.rs",
    "content": "pub mod boundingbox;\nmod cut;\npub mod nodestore;\npub mod nodeview;\nmod randomcuttree;\npub mod sampler;\npub mod samplerplustree;\n"
  },
  {
    "path": "Rust/src/samplerplustree/nodestore.rs",
    "content": "use std::{collections::HashMap, fmt::Debug, mem};\nuse std::hash::Hash;\nuse crate::types::Result;\n\nuse crate::{\n    common::{divector::DiVector, intervalstoremanager::IntervalStoreManager},\n    pointstore::PointStore,\n    samplerplustree::{boundingbox::BoundingBox, cut::Cut},\n    types::Location,\n};\nuse crate::errors::RCFError;\nuse crate::util::check_argument;\n\n///\n/// capacity is the number of leaves in the tree\n/// this is the (per tree) samplesize in RCF\n/// in the encoding below, the leaves are point_index + capacity\n/// the value capacity - 1 stands for null\n/// the values 0..(capacity-2) corresponds to the internal nodes; note that a regular binary tree\n/// where each node has 0 or 2 children, has (capacity - 1) internal nodes\n///\n/// the nodestore does not need to save the parent information; it is saved if the bounding box cache is\n/// more than 0.\n///\n/// Note that the mass of each node (in use) is at least 1. Subtracting 1 from each node implicitly\n/// makes the values between [0..(capacity-1)] which is very convenient for 2^8 and 2^16.\n///\n\n#[repr(C)]\npub struct VectorNodeStore<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location + Eq + Hash + Send,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n{\n    capacity: usize,\n    dimensions: usize,\n    using_transforms: bool,\n    project_to_tree: fn(Vec<f32>) -> Vec<f32>,\n    bounding_box_cache_fraction: f64,\n    parent_index: Vec<N>,\n    mass: Vec<N>,\n    pub left_index: Vec<P>,\n    pub right_index: Vec<P>,\n    pub cut_dimension: Vec<C>,\n    pub cut_value: Vec<f32>,\n    bounding_box_data: Vec<f32>,\n    range_sum_data: Vec<f64>,\n    hash_mass_leaves: HashMap<usize, usize>,\n    internal_node_manager: IntervalStoreManager<usize>,\n    store_attributes: bool,\n    propagate_attributes: bool,\n    store_pointsum: bool,\n    pointsum : Vec<f32>,\n    attributes: HashMap<P,HashMap<P,N>>,\n    propagated_attributes: HashMap<N,Vec<f32>>\n}\n\nconst SWITCH_THRESHOLD: f64 = 0.5;\n\npub trait NodeStore<Label: Sync + Copy, Attributes: Sync + Copy+ Hash + Eq + Send> : BasicStore + BoxStore<Label,Attributes> {}\n\npub trait BasicStore {\n    fn mass(&self, index: usize) -> usize;\n    fn sibling(&self, node: usize, parent: usize) -> usize;\n    fn leaf_point_index(&self, index: usize) -> Result<usize>;\n    fn cut_dimension(&self, index: usize) -> usize;\n    fn left_index(&self, index: usize) -> usize;\n    fn right_index(&self, index: usize) -> usize;\n    fn cut_value(&self, index: usize) -> f32;\n    fn is_leaf(&self, index: usize) -> bool;\n    fn is_left_of(&self, index: usize, point: &[f32]) -> bool;\n    fn use_path_for_box(&self) -> bool;\n    fn distribution(&self, index: usize) -> (usize, f32, usize, usize);\n    fn cut_and_children(&self, index: usize) -> (usize, f32, usize, usize);\n    fn set_path(&self, answer: &mut Vec<(usize, usize)>, root: usize, point: &[f32]);\n    fn null_node(&self) -> usize;\n    fn attribute_at_leaf(&self,point_index: usize) -> Result<Vec<(usize,usize)>>;\n}\n\npub trait BoxStore<Label: Sync + Copy, Attributes: Sync + Copy + Hash + Eq + Send> : BasicStore {\n    fn attribut_vec<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<Vec<f32>>;\n    fn recompute_attribute_vec<PS: PointStore<Label, Attributes>>(&mut self, index: usize, point_store: &PS) -> Result<()>;\n    fn pointsum<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<Vec<f32>>;\n    fn recompute_pointsum<PS: PointStore<Label, Attributes>>(&mut self, index: usize, point_store: &PS) -> Result<()>;\n    fn manage_ancestors_add<PS: PointStore<Label,Attributes>>(\n        &mut self,\n        path: &mut Vec<(usize, usize)>,\n        point: &[f32],\n        _point_store: &PS,\n        box_resolved: bool,\n    ) -> Result<()>;\n    fn manage_ancestors_delete<PS: PointStore<Label, Attributes>>(\n        &mut self,\n        path: &mut Vec<(usize, usize)>,\n        point: &[f32],\n        point_store: &PS,\n        box_resolved: bool,\n    ) -> Result<()>;\n    fn reconstruct_box<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<BoundingBox>;\n    fn check_contains_and_rebuild_box<PS: PointStore<Label, Attributes>>(\n        &mut self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n    ) -> Result<bool>;\n    fn bounding_box<PS: PointStore<Label,Attributes>>(&self, index: usize, point_store: &PS) -> Result<BoundingBox>;\n    fn probability_of_cut<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n    ) -> Result<f64>;\n    fn probability_of_cut_with_missing_coordinates<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        point_store: &PS,\n    ) -> Result<f64>;\n    fn modify_in_place_probability_of_cut_di_vector<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n        di_vector: &mut DiVector,\n    ) -> Result<()>;\n    fn modify_in_place_probability_of_cut_di_vector_with_missing_coordinates<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        point_store: &PS,\n        di_vector: &mut DiVector,\n    ) -> Result<()>;\n    fn grow_node_box_pair<PS: PointStore<Label,Attributes>>(\n        &self,\n        first: &mut BoundingBox,\n        second: &mut BoundingBox,\n        point_store: &PS,\n        node: usize,\n        sibling: usize,\n    ) -> Result<()>;\n    fn grow_node_box<PS: PointStore<Label,Attributes>>(\n        &self,\n        bounding_box: &mut BoundingBox,\n        point_store: &PS,\n        node: usize,\n        sibling: usize,\n    ) -> Result<()>;\n    fn check_left<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        dim: usize,\n        value: f32,\n        point_store: &PS\n    ) -> Result<bool>;\n    fn check_right<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        dim: usize,\n        value: f32,\n        point_store: &PS\n    ) -> Result<bool>;\n}\n\nimpl<C, P, N> VectorNodeStore<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location+ Eq + Hash + Send,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    <C as TryFrom<usize>>::Error: Debug,\n    <P as TryFrom<usize>>::Error: Debug,\n    <N as TryFrom<usize>>::Error: Debug,\n{\n    pub fn new(\n        capacity: usize,\n        dimensions: usize,\n        using_transforms: bool,\n        store_attributes: bool,\n        store_pointsum: bool,\n        propagate_attributes: bool,\n        project_to_tree: fn(Vec<f32>) -> Vec<f32>,\n        bounding_box_cache_fraction: f64,\n    ) -> Result<Self> {\n        check_argument( capacity - 1 <= N::MAX.into() ,\n                \" invalid parameter, increase size of N to represent {}\")?;\n        let cache_limit: usize = (bounding_box_cache_fraction * capacity as f64) as usize;\n        let null_node = Self::null_value(capacity);\n        let pointsum = if store_pointsum {\n            vec![0.0f32;(capacity - 1)*dimensions]\n        } else {\n            Vec::new()\n        };\n        Ok(VectorNodeStore {\n            capacity,\n            dimensions,\n            using_transforms,\n            project_to_tree,\n            bounding_box_cache_fraction,\n            left_index: vec![null_node.try_into().unwrap(); capacity - 1],\n            right_index: vec![null_node.try_into().unwrap(); capacity - 1],\n            mass: vec![0.try_into().unwrap(); capacity - 1],\n            parent_index: if bounding_box_cache_fraction > 0.0 {\n                vec![null_node.try_into().unwrap(); capacity - 1]\n            } else {\n                Vec::new()\n            },\n            cut_value: vec![0.0; capacity - 1],\n            cut_dimension: vec![C::MAX; capacity - 1],\n            bounding_box_data: vec![0.0; dimensions * 2 * cache_limit],\n            range_sum_data: vec![0.0; cache_limit],\n            hash_mass_leaves: HashMap::new(),\n            internal_node_manager: IntervalStoreManager::<usize>::new(capacity - 1),\n            attributes: HashMap::new(),\n            store_attributes,\n            propagate_attributes: propagate_attributes,\n            store_pointsum,\n            pointsum,\n            propagated_attributes : HashMap::new()\n        })\n    }\n\n    /// 0 is indicative of null given unsigned representation\n    /// otherwise index X uses slot X-1\n    pub fn invalidate_pointsum(&mut self,index: usize) -> Result<()>{\n        check_argument(self.store_pointsum, \"incorrct invocation\")?;\n        for x in self.pointsum[(index*self.dimensions).. ((index+1)*self.dimensions)].iter_mut() {\n            *x=0.0;\n        };\n        Ok(())\n    }\n\n    pub fn add_attrib_at_leaf(&mut self,point_index: usize, point_attribute: usize) -> Result<()>{\n        if self.store_attributes {\n            let p: P = point_index.try_into().unwrap();\n            let v: P = point_attribute.try_into().unwrap();\n            if let Some(x) = self.attributes.get_mut(&p) {\n                let a: usize = if let Some(y) = x.remove(&v) {\n                    usize::from(y) + 1\n                } else {\n                    1\n                };\n                x.insert(v, a.try_into().unwrap());\n            } else {\n                let mut x = HashMap::new();\n                let a: usize = 1;\n                x.insert(v, a.try_into().unwrap());\n                self.attributes.insert(p, x);\n            };\n        }\n        Ok(())\n    }\n\n    pub fn del_attrib_at_leaf(&mut self,point_index: usize, point_attribute: usize) -> Result<()> {\n        if self.store_attributes {\n            let p: P = point_index.try_into().unwrap();\n            let v: P = point_attribute.try_into().unwrap();\n            if let Some(x) = self.attributes.get_mut(&p) {\n                if let Some(y) = x.remove(&v) {\n                    check_argument(usize::from(y) > 0, \" error\")?;\n                    if usize::from(y) > 1 {\n                        x.insert(v, (usize::from(y) - 1).try_into().unwrap());\n                    }\n                    return Ok(());\n                }\n            }\n            return Err(RCFError::InvalidArgument { msg: \"element should be present\" });\n        }\n        Ok(())\n    }\n\n    fn translate(&self, index: usize) -> usize {\n        if index != self.null_node() && self.range_sum_data.len() <= index {\n            usize::MAX\n        } else {\n            index\n        }\n    }\n\n    fn copy_box_to_data(&mut self, index: usize, bounding_box: &BoundingBox) {\n        let idx: usize = self.translate(index);\n        if idx != usize::MAX {\n            let base: usize = 2 * idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minarray = &mut self.bounding_box_data[base..mid];\n            for (x, y) in minarray.iter_mut().zip((*bounding_box).get_min_values()) {\n                *x = *y;\n            }\n            let maxarray = &mut self.bounding_box_data[mid..mid + self.dimensions];\n            for (x, y) in maxarray.iter_mut().zip((*bounding_box).get_max_values()) {\n                *x = *y;\n            }\n            self.range_sum_data[idx] = (*bounding_box).get_range_sum();\n        }\n    }\n\n    fn check_contains_and_add_point(&mut self, index: usize, point: &[f32]) -> bool {\n        let idx: usize = self.translate(index);\n        if idx != usize::MAX {\n            let base = 2 * idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minarray = &mut self.bounding_box_data[base..mid];\n            for (x, y) in minarray.iter_mut().zip(point) {\n                *x = if (*x) > (*y) { *y } else { *x };\n            }\n\n            let maxarray = &mut self.bounding_box_data[mid..mid + self.dimensions];\n            for (x, y) in maxarray.iter_mut().zip(point) {\n                *x = if *x < *y { *y } else { *x };\n            }\n\n            let newminarray = &self.bounding_box_data[base..mid];\n            let newmaxarray = &self.bounding_box_data[mid..mid + self.dimensions];\n            let newsum: f64 = newminarray\n                .iter()\n                .zip(newmaxarray)\n                .map(|(x, y)| (y - x) as f64)\n                .sum();\n            let answer = self.range_sum_data[idx] == newsum;\n            self.range_sum_data[idx] = newsum;\n            return answer;\n        }\n        false\n    }\n\n    fn check_strictly_contains(&mut self, index: usize, point: &[f32]) -> bool {\n        let idx: usize = self.translate(index);\n        if idx != usize::MAX {\n            let base = 2 * idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minarray = &self.bounding_box_data[base..mid];\n            let maxarray = &self.bounding_box_data[mid..mid + self.dimensions];\n            let not_inside = minarray\n                .iter()\n                .zip(point)\n                .zip(maxarray)\n                .any(|((x, y), z)| x >= y || y >= z);\n            return !not_inside;\n        }\n        false\n    }\n\n    fn box_from_data(&self, idx: usize) -> BoundingBox {\n        let dimensions = self.dimensions;\n        let base = 2 * idx * dimensions;\n        return BoundingBox::new(\n            &self.bounding_box_data[base..base + dimensions],\n            &self.bounding_box_data[base + dimensions..base + 2 * dimensions],\n        ).unwrap();\n    }\n\n    pub fn add_node(\n        &mut self,\n        parent_index: usize,\n        point: &[f32],\n        child: usize,\n        point_index: usize,\n        cut: Cut,\n        saved_box: &BoundingBox,\n    ) -> Result<usize> {\n        let index = self.internal_node_manager.get()?.into();\n        self.cut_value[index] = cut.value;\n        self.cut_dimension[index] = cut.dimension.try_into().unwrap();\n        if point[cut.dimension] <= cut.value {\n            self.left_index[index] = self.leaf_index(point_index).try_into().unwrap();\n            self.right_index[index] = child.try_into().unwrap();\n        } else {\n            self.left_index[index] = child.try_into().unwrap();\n            self.right_index[index] = self.leaf_index(point_index).try_into().unwrap();\n        }\n\n        self.mass[index] = (self.mass(child)).try_into().unwrap();\n        // Not adding 1 to the above (new leaf) since all mass is represented as mass- 1\n        if self.bounding_box_cache_fraction > 0.0 {\n            self.copy_box_to_data(index, saved_box);\n            self.check_contains_and_add_point(index, point);\n\n            self.parent_index[index] = parent_index.try_into().unwrap();\n            if !self.is_leaf(child) {\n                self.parent_index[child] = index.try_into().unwrap();\n            }\n        }\n\n        if parent_index != self.null_node() {\n            self.replace_node(parent_index, child, index);\n        }\n        Ok(index)\n    }\n\n    pub fn leaf_index(&self, point_index: usize) -> usize {\n        point_index + self.capacity\n    }\n\n    pub fn set_root(&mut self, index: usize)\n        where\n            <N as TryFrom<usize>>::Error: Debug,\n    {\n        if !self.is_leaf(index) && self.bounding_box_cache_fraction > 0.0 {\n            self.parent_index[index] = 0.try_into().unwrap();\n        }\n    }\n\n    // capacity is the number of leaves\n    pub fn increase_leaf_mass(&mut self, index: usize) -> Result<()> {\n        if index >= self.capacity {\n            let y = index - self.capacity;\n            if let Some(a) = self.hash_mass_leaves.remove(&y) {\n                self.hash_mass_leaves.insert(y, a + 1);\n            } else {\n                self.hash_mass_leaves.insert(y, 1);\n            }\n            return Ok(());\n        }\n        Err(RCFError::InvalidArgument { msg: \" incorrect call with a non-leaf index\" })\n    }\n\n    pub fn decrease_leaf_mass(&mut self, index: usize) -> Result<usize> {\n        check_argument(self.is_leaf(index), \"incorrect leaf index\")?;\n        let y = index - self.capacity;\n        if let Some(a) = self.hash_mass_leaves.remove(&y) {\n            if a > 1 {\n                self.hash_mass_leaves.insert(y, a - 1);\n                Ok(a)\n            } else {\n                Ok(1) //default\n            }\n        } else {\n            Ok(0)\n        }\n    }\n\n\n    pub fn delete_internal_node(&mut self, index: usize) -> Result<()>{\n        let null_node = self.null_node();\n\n        self.left_index[index] = null_node.try_into().unwrap();\n        self.right_index[index] = null_node.try_into().unwrap();\n        self.mass[index] = 0.try_into().unwrap();\n        if self.bounding_box_cache_fraction > 0.0 {\n            self.parent_index[index] = null_node.try_into().unwrap(); // null\n        }\n        self.cut_dimension[index] = C::MAX;\n        self.cut_value[index] = 0.0;\n        if self.propagate_attributes {\n            self.propagated_attributes.remove(&index.try_into().expect(\"incorrect state\"));\n        }\n        self.internal_node_manager.release(index)\n    }\n\n    pub fn cut_value(&self, index: usize) -> f32 {\n        self.cut_value[index]\n    }\n\n    pub fn cut_dimension(&self, index: usize) -> usize {\n        self.cut_dimension[index].into()\n    }\n\n\n    pub fn replace_node(&mut self, grand_parent: usize, parent: usize, node: usize) {\n        if parent == self.left_index[grand_parent].into() {\n            self.left_index[grand_parent] = node.try_into().unwrap();\n        } else {\n            self.right_index[grand_parent] = node.try_into().unwrap();\n        }\n        if !self.is_leaf(node) && self.bounding_box_cache_fraction > 0.0 {\n            self.parent_index[node] = grand_parent.try_into().unwrap();\n        }\n    }\n\n    pub fn size(&self, _dimensions: usize) -> usize {\n        (self.internal_node_manager.get_size() + self.left_index.len() + self.right_index.len())\n            * std::mem::size_of::<P>()\n            + (self.parent_index.len() + self.mass.len()) * std::mem::size_of::<N>()\n            + (self.cut_dimension.len()) * std::mem::size_of::<C>()\n            + (self.cut_value.len()) * mem::size_of::<f32>()\n            + (self.bounding_box_data.len() + 2 * self.range_sum_data.len()) * mem::size_of::<f32>()\n            + std::mem::size_of::<VectorNodeStore<C, P, N>>()\n    }\n\n    fn null_value(capacity: usize) -> usize {\n        capacity - 1\n    }\n\n    fn is_internal(&self, index: usize) -> bool {\n        index != self.null_node() && index < self.capacity\n    }\n\n}\n\n\nimpl<C, P, N,Label,Attributes> BoxStore<Label,Attributes> for VectorNodeStore<C, P, N>\n    where\n        C: Location,\n        usize: From<C>,\n        P: Location+ Eq + Hash + Send,\n        usize: From<P>,\n        N: Location,\n        usize: From<N>,\n        <C as TryFrom<usize>>::Error: Debug,\n        <P as TryFrom<usize>>::Error: Debug,\n        <N as TryFrom<usize>>::Error: Debug,\n        Label : Sync + Copy,\n        Attributes: Sync + Copy+ Hash + Eq + Send,\n{\n    fn attribut_vec<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<Vec<f32>>{\n        check_argument(self.propagate_attributes, \" enable propagation of vectors\")?;\n        if self.is_leaf(index) {\n            let list = self.attributes.get(&(self.leaf_point_index(index)?.try_into().unwrap())).expect(\"incorrect state\");\n            check_argument(list.len() > 0, \"cannot be 0\")?;\n            let veclist = list.iter().map(|(&x, &y)| {\n                let weight : usize = y.into();\n                let mut vec = point_store.attribute_vec(x.into())?;\n                for z in vec.iter_mut() {\n                    *z *= weight as f32;\n                };\n                Ok(vec)\n            }).collect::<Result<Vec<Vec<f32>>>>()?;\n            let mut answer =  veclist[0].clone();\n            for i in 1..list.len() {\n                for (x,y) in answer.iter_mut().zip(&veclist[i]) {\n                    *x += *y;\n                }\n            }\n            Ok(answer)\n        } else {\n            Ok(self.propagated_attributes.get(&index.try_into().unwrap()).expect(\"incorrect state\").clone())\n        }\n    }\n\n    fn recompute_attribute_vec<PS: PointStore<Label, Attributes>>(&mut self, index: usize, point_store: &PS) -> Result<()>{\n        check_argument(!self.is_leaf(index), \"incorrect invocation\")?;\n        let mut left = self.attribut_vec(self.left_index[index].into(),point_store)?;\n        let right = self.attribut_vec(self.right_index[index].into(),point_store)?;\n        for (x,y) in left.iter_mut().zip(right){\n            *x += y;\n        }\n        self.propagated_attributes.insert(index.try_into().expect(\"incorrect state\"),left);\n        Ok(())\n    }\n\n    fn pointsum<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<Vec<f32>>{\n        check_argument(self.store_pointsum, \" enable store_pointsum\")?;\n        if self.is_leaf(index) {\n            let mut point = point_store.copy(self.leaf_point_index(index)?)?;\n            let mass = self.mass(index);\n            for x in point.iter_mut() {\n                *x *= mass as f32;\n            }\n            Ok(point)\n        } else {\n            Ok(Vec::from(&self.pointsum[(index * self.dimensions)..((index + 1) * self.dimensions)]))\n        }\n    }\n\n    fn recompute_pointsum<PS: PointStore<Label, Attributes>>(&mut self, index: usize, point_store: &PS) -> Result<()>{\n        check_argument(!self.is_leaf(index), \"incorrect invocation\")?;\n        let left = self.pointsum(self.left_index[index].into(),point_store)?;\n        let right = self.pointsum(self.right_index[index].into(),point_store)?;\n        for ((x,y),z) in self.pointsum[(index * self.dimensions)..((index + 1) * self.dimensions)].iter_mut().zip(left).zip(right) {\n            *x = y + z;\n        }\n        Ok(())\n    }\n\n    fn manage_ancestors_add<PS: PointStore<Label, Attributes>>(\n        &mut self,\n        path: &mut Vec<(usize, usize)>,\n        point: &[f32],\n        point_store: &PS,\n        box_resolved: bool,\n    ) -> Result<()>{\n        let mut resolved = box_resolved;\n        while path.len() != 0 {\n            let index = path.pop().unwrap().0;\n            let val: usize = self.mass[index].into();\n            self.mass[index] = (val + 1).try_into().unwrap();\n            if self.store_pointsum {\n                self.recompute_pointsum(index,point_store)?;\n            }\n            if self.propagate_attributes {\n                self.recompute_attribute_vec(index,point_store)?;\n            }\n            if self.bounding_box_cache_fraction > 0.0 && !resolved {\n                resolved = self.check_contains_and_add_point(index.into(), point);\n            }\n        }\n        Ok(())\n    }\n\n    fn manage_ancestors_delete<PS: PointStore<Label, Attributes>>(\n        &mut self,\n        path: &mut Vec<(usize, usize)>,\n        point: &[f32],\n        point_store: &PS,\n        box_resolved: bool,\n    ) -> Result<()>{\n        let mut resolved = box_resolved;\n        while path.len() != 0 {\n            let index = path.pop().unwrap().0;\n            let val: usize = self.mass[index].into();\n            self.mass[index] = (val - 1).try_into().unwrap();\n            if self.store_pointsum {\n                self.recompute_pointsum(index,point_store)?;\n            }\n            if self.propagate_attributes {\n                self.recompute_attribute_vec(index,point_store)?;\n            }\n            if self.bounding_box_cache_fraction > 0.0 && !resolved {\n                resolved = self.check_contains_and_rebuild_box(index, point, point_store)?;\n            }\n        }\n        Ok(())\n    }\n\n    fn reconstruct_box<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<BoundingBox> {\n        let mut mutated_bounding_box = self.bounding_box(self.left_index[index].into(), point_store)?;\n        self.grow_node_box(\n            &mut mutated_bounding_box,\n            point_store,\n            index,\n            self.right_index[index].into(),\n        )?;\n        Ok(mutated_bounding_box)\n    }\n\n    fn check_contains_and_rebuild_box<PS: PointStore<Label, Attributes>>(\n        &mut self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n    ) -> Result<bool> {\n        let idx = self.translate(index);\n        if idx != usize::MAX {\n            if !self.check_strictly_contains(index, point) {\n                let mutated_bounding_box = self.reconstruct_box(index, point_store)?;\n                self.copy_box_to_data(index, &mutated_bounding_box);\n                return Ok(false);\n            }\n            Ok(true)\n        } else {\n            Ok(false)\n        }\n    }\n\n    fn bounding_box<PS: PointStore<Label, Attributes>>(&self, index: usize, point_store: &PS) -> Result<BoundingBox> {\n        if self.is_leaf(index) {\n            return if self.using_transforms {\n                let point =\n                    &(self.project_to_tree)(point_store.copy(self.leaf_point_index(index)?)?);\n                BoundingBox::new(point, point)\n            } else {\n                let point = point_store\n                    .reference_and_offset(self.leaf_point_index(index)?)?\n                    .0;\n                BoundingBox::new(point, point)\n            };\n        } else {\n            let idx: usize = self.translate(index);\n            if idx != usize::MAX {\n                return Ok(self.box_from_data(idx));\n            }\n            let mut mutated_bounding_box = self.bounding_box(self.left_index[index].into(), point_store)?;\n            self.grow_node_box(\n                &mut mutated_bounding_box,\n                point_store,\n                index,\n                self.right_index[index].into(),\n            )?;\n            return Ok(mutated_bounding_box);\n        }\n    }\n\n    fn probability_of_cut<PS: PointStore<Label, Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n    ) -> Result<f64> {\n        let node_idx: usize = self.translate(index);\n        if node_idx != usize::MAX {\n            let base = 2 * node_idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minarray = &self.bounding_box_data[base..mid];\n            let maxarray = &self.bounding_box_data[mid..mid + self.dimensions];\n            let minsum: f32 = minarray\n                .iter()\n                .zip(point)\n                .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n                .sum();\n            let maxsum: f32 = point\n                .iter()\n                .zip(maxarray)\n                .map(|(&x, &y)| if x - y > 0.0 { x - y } else { 0.0 })\n                .sum();\n            let sum = maxsum + minsum;\n\n            if sum == 0.0 {\n                return Ok(0.0);\n            }\n            Ok(sum as f64 / (self.range_sum_data[node_idx] + sum as f64))\n        } else {\n            let bounding_box = self.bounding_box(index, point_store)?;\n            Ok(bounding_box.probability_of_cut(point))\n        }\n    }\n    fn probability_of_cut_with_missing_coordinates<PS: PointStore<Label, Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        point_store: &PS,\n    ) -> Result<f64> {\n        let node_idx: usize = self.translate(index);\n        if node_idx != usize::MAX {\n            let base = 2 * node_idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minarray = &self.bounding_box_data[base..mid];\n            let maxarray = &self.bounding_box_data[mid..mid + self.dimensions];\n            let minsum: f32 = minarray\n                .iter()\n                .zip(point)\n                .zip(missing_coordinates)\n                .map(|((&x, &y), &b)| if !b && x - y > 0.0 { x - y } else { 0.0 })\n                .sum();\n            let maxsum: f32 = point\n                .iter()\n                .zip(maxarray)\n                .zip(missing_coordinates)\n                .map(|((&x, &y), &b)| if !b && x - y > 0.0 { x - y } else { 0.0 })\n                .sum();\n            let sum = maxsum + minsum;\n\n            if sum == 0.0 {\n                return Ok(0.0);\n            }\n            Ok(sum as f64 / (self.range_sum_data[node_idx] + sum as f64))\n        } else {\n            let bounding_box = self.bounding_box(index, point_store)?;\n            Ok(bounding_box.probability_of_cut_with_missing_coordinates(point, missing_coordinates))\n        }\n    }\n    fn modify_in_place_probability_of_cut_di_vector<PS: PointStore<Label, Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        point_store: &PS,\n        di_vector: &mut DiVector,\n    ) -> Result<()>{\n        check_argument(di_vector.high.len() == point.len(), \" incorrect dimensions of bounding box\")?;\n        let node_idx: usize = self.translate(index);\n        if node_idx != usize::MAX {\n            let base = 2 * node_idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minsum: f64 = di_vector\n                .low\n                .iter_mut()\n                .zip(&self.bounding_box_data[base..mid])\n                .zip(point)\n                .map(|((x, &y), &z)| {\n                    if y - z > 0.0 {\n                        *x = (y - z) as f64;\n                        *x\n                    } else {\n                        *x = 0.0;\n                        *x\n                    }\n                })\n                .sum();\n            let maxsum: f64 = di_vector\n                .high\n                .iter_mut()\n                .zip(point)\n                .zip(&self.bounding_box_data[mid..mid + self.dimensions])\n                .map(|((x, &y), &z)| {\n                    if y - z > 0.0 {\n                        *x = (y - z) as f64;\n                        *x\n                    } else {\n                        *x = 0.0;\n                        *x\n                    }\n                })\n                .sum();\n            let sum = maxsum + minsum;\n            if sum > 0.0 {\n                di_vector.scale(1.0 / (self.range_sum_data[node_idx] + sum));\n            }\n        } else {\n            let bounding_box = self.bounding_box(index, point_store)?;\n            di_vector.assign_as_probability_of_cut(&bounding_box, point);\n        };\n        Ok(())\n    }\n\n    fn modify_in_place_probability_of_cut_di_vector_with_missing_coordinates<PS: PointStore<Label, Attributes>>(\n        &self,\n        index: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        point_store: &PS,\n        di_vector: &mut DiVector,\n    ) -> Result<()>{\n        check_argument(di_vector.high.len() == point.len(), \" incorrect dimensions of bounding box\")?;\n        let node_idx: usize = self.translate(index);\n        if node_idx != usize::MAX {\n            let base = 2 * node_idx * self.dimensions;\n            let mid = base + self.dimensions;\n            let minsum: f64 = di_vector\n                .low\n                .iter_mut()\n                .zip(&self.bounding_box_data[base..mid])\n                .zip(point)\n                .zip(missing_coordinates)\n                .map(|(((x, &y), &z), &b)| {\n                    if !b && y - z > 0.0 {\n                        *x = (y - z) as f64;\n                        *x\n                    } else {\n                        *x = 0.0;\n                        *x\n                    }\n                })\n                .sum();\n            let maxsum: f64 = di_vector\n                .high\n                .iter_mut()\n                .zip(point)\n                .zip(&self.bounding_box_data[mid..mid + self.dimensions])\n                .zip(missing_coordinates)\n                .map(|(((x, &y), &z), &b)| {\n                    if !b && y - z > 0.0 {\n                        *x = (y - z) as f64;\n                        *x\n                    } else {\n                        *x = 0.0;\n                        *x\n                    }\n                })\n                .sum();\n            let sum = maxsum + minsum;\n            if sum > 0.0 {\n                di_vector.scale(1.0 / (self.range_sum_data[node_idx] + sum));\n            }\n        } else {\n            let bounding_box = self.bounding_box(index, point_store)?;\n            di_vector.assign_as_probability_of_cut_with_missing_coordinates(\n                &bounding_box,\n                point,\n                missing_coordinates,\n            );\n        };\n        Ok(())\n    }\n\n    fn grow_node_box_pair<PS: PointStore<Label, Attributes>>(\n        &self,\n        first: &mut BoundingBox,\n        second: &mut BoundingBox,\n        point_store: &PS,\n        _node: usize,\n        sibling: usize,\n    ) -> Result<()>{\n        if self.is_leaf(sibling) {\n            if self.using_transforms {\n                let point =\n                    &(self.project_to_tree)(point_store.copy(self.leaf_point_index(sibling)?)?);\n                (*first).check_contains_and_add_point(point);\n                (*second).check_contains_and_add_point(point);\n            } else {\n                let point = point_store\n                    .reference_and_offset(self.leaf_point_index(sibling)?)?\n                    .0;\n                (*first).check_contains_and_add_point(point);\n                (*second).check_contains_and_add_point(point);\n            }\n        } else {\n            let idx: usize = self.translate(sibling);\n            if idx != usize::MAX {\n                let dimensions = self.dimensions;\n                let base = 2 * idx * dimensions;\n                (*first)\n                    .check_contains_and_add_point(&self.bounding_box_data[base..base + dimensions]);\n                (*second)\n                    .check_contains_and_add_point(&self.bounding_box_data[base..base + dimensions]);\n                (*first).check_contains_and_add_point(\n                    &self.bounding_box_data[base + dimensions..base + 2 * dimensions],\n                );\n                (*second).check_contains_and_add_point(\n                    &self.bounding_box_data[base + dimensions..base + 2 * dimensions],\n                );\n            } else {\n                self.grow_node_box_pair(\n                    first,\n                    second,\n                    point_store,\n                    sibling,\n                    self.left_index(sibling),\n                )?;\n                self.grow_node_box_pair(\n                    first,\n                    second,\n                    point_store,\n                    sibling,\n                    self.right_index(sibling),\n                )?;\n            }\n        }\n        Ok(())\n    }\n\n    fn grow_node_box<PS: PointStore<Label, Attributes>>(\n        &self,\n        bounding_box: &mut BoundingBox,\n        point_store: &PS,\n        _node: usize,\n        sibling: usize,\n    ) -> Result<()>{\n        if self.is_leaf(sibling) {\n            if self.using_transforms {\n                let point =\n                    &(self.project_to_tree)(point_store.copy(self.leaf_point_index(sibling)?)?);\n                (*bounding_box).check_contains_and_add_point(point);\n            } else {\n                let point = point_store\n                    .reference_and_offset(self.leaf_point_index(sibling)?)?\n                    .0;\n                (*bounding_box).check_contains_and_add_point(point);\n            }\n        } else {\n            let idx: usize = self.translate(sibling.into());\n            if idx != usize::MAX {\n                let dimensions = self.dimensions;\n                let base = 2 * idx * dimensions;\n                (*bounding_box)\n                    .check_contains_and_add_point(&self.bounding_box_data[base..base + dimensions]);\n                (*bounding_box).check_contains_and_add_point(\n                    &self.bounding_box_data[base + dimensions..base + 2 * dimensions],\n                );\n            } else {\n                self.grow_node_box(\n                    bounding_box,\n                    point_store,\n                    sibling,\n                    self.left_index(sibling),\n                )?;\n                self.grow_node_box(\n                    bounding_box,\n                    point_store,\n                    sibling,\n                    self.right_index(sibling),\n                )?;\n            }\n        }\n        Ok(())\n    }\n    fn check_left<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        dim: usize,\n        value: f32,\n        point_store: &PS\n    ) -> Result<bool> {\n        if self.is_leaf(index) {\n            let point = (self.project_to_tree)(point_store.copy(self.leaf_point_index(index)?)?);\n            return Ok(point[dim] < value);\n        }\n        // both are left -- we want both to be less than value\n        Ok(self.check_left(self.left_index(index), dim, value, point_store)?\n            && self.check_left(self.right_index(index), dim, value, point_store)?)\n    }\n\n    fn check_right<PS: PointStore<Label,Attributes>>(\n        &self,\n        index: usize,\n        dim: usize,\n        value: f32,\n        point_store: &PS\n    ) -> Result<bool> {\n        if self.is_leaf(index) {\n            let point = (self.project_to_tree)(point_store.copy(self.leaf_point_index(index)?)?);\n            return Ok(point[dim] >= value);\n        }\n        // both are right -- we want the subtree to be greater or equal value\n        Ok(self.check_right(self.left_index(index), dim, value, point_store)?\n            && self.check_right(self.right_index(index), dim, value, point_store)?)\n    }\n}\n\nimpl<C, P, N> BasicStore for VectorNodeStore<C, P, N>\n        where\n            C: Location,\n            usize: From<C>,\n            P: Location + Eq + Hash + Send,\n            usize: From<P>,\n            N: Location,\n            usize: From<N>,\n            <C as TryFrom<usize>>::Error: Debug,\n            <P as TryFrom<usize>>::Error: Debug,\n            <N as TryFrom<usize>>::Error: Debug,\n{\n    fn mass(&self, index: usize) -> usize {\n        if self.is_leaf(index) {\n            let y = index - self.capacity;\n            return if let Some(a) = self.hash_mass_leaves.get(&y) {\n                (*a).into()\n            } else {\n                1\n            };\n        }\n        let base: usize = self.mass[index].into();\n        base + 1\n    }\n\n    fn leaf_point_index(&self, index: usize) -> Result<usize> {\n        check_argument(self.is_leaf(index), \" not a leaf index\")?;\n        Ok(index - self.capacity)\n    }\n\n    fn sibling(&self, node: usize, parent: usize) -> usize {\n        let mut sibling = self.left_index[parent].into();\n        if node == sibling {\n            sibling = self.right_index[parent].into();\n        }\n        sibling\n    }\n\n\n    fn cut_dimension(&self, index: usize) -> usize {\n        self.cut_dimension[index].into()\n    }\n\n    fn left_index(&self, index: usize) -> usize {\n        self.left_index[index].try_into().unwrap()\n    }\n\n    fn right_index(&self, index: usize) -> usize {\n        self.right_index[index].try_into().unwrap()\n    }\n\n    fn cut_value(&self, index: usize) -> f32 {\n        self.cut_value[index]\n    }\n\n    fn is_leaf(&self, index: usize) -> bool {\n        index != self.null_node() && index >= self.capacity\n    }\n\n    fn is_left_of(&self, index: usize, point: &[f32]) -> bool {\n        let dim_idx: usize = self.cut_dimension[index].try_into().unwrap();\n        point[dim_idx] <= self.cut_value[index]\n    }\n\n    fn use_path_for_box(&self) -> bool {\n        self.bounding_box_cache_fraction < SWITCH_THRESHOLD\n    }\n\n    fn distribution(&self, index: usize) -> (usize, f32, usize, usize) {\n        (\n            self.cut_dimension[index].into(),\n            self.cut_value[index],\n            self.mass(self.left_index(index)),\n            self.mass(self.right_index(index)),\n        )\n    }\n\n    fn cut_and_children(&self, index: usize) -> (usize, f32, usize, usize) {\n        if self.is_internal(index) {\n            (\n                self.cut_dimension[index].into(),\n                self.cut_value[index],\n                self.left_index[index].into(),\n                self.right_index[index].into(),\n            )\n        } else {\n            (usize::MAX, f32::MAX, usize::MAX, usize::MAX)\n        }\n    }\n\n    fn set_path(&self, answer: &mut Vec<(usize, usize)>, root: usize, point: &[f32]) {\n        let mut node = root;\n        answer.push((root, self.null_node()));\n        while !self.is_leaf(node) {\n            if self.is_left_of(node, point) {\n                answer.push((self.left_index[node].into(), self.right_index[node].into()));\n                node = self.left_index[node].into();\n            } else {\n                answer.push((self.right_index[node].into(), self.left_index[node].into()));\n                node = self.right_index[node].into();\n            }\n        }\n    }\n\n    fn null_node(&self) -> usize {\n        Self::null_value(self.capacity)\n    }\n\n    fn attribute_at_leaf(&self,point_index: usize) -> Result<Vec<(usize,usize)>> {\n        self.attributes.get(&point_index.try_into().expect(\"out of range\")).expect(\"should be present\").iter()\n            .map(|(&x,&y)| {\n                Ok((x.into(),y.into()))\n            }).collect()\n    }\n}\n\nimpl<C, P, N,Label,Attributes> NodeStore<Label,Attributes> for VectorNodeStore<C, P, N>\n    where\n        C: Location,\n        usize: From<C>,\n        P: Location+ Eq + Hash + Send,\n        usize: From<P>,\n        N: Location,\n        usize: From<N>,\n        <C as TryFrom<usize>>::Error: Debug,\n        <P as TryFrom<usize>>::Error: Debug,\n        <N as TryFrom<usize>>::Error: Debug,\n        Label : Sync + Copy,\n        Attributes: Sync + Copy+ Hash + Eq + Send,\n{}\n"
  },
  {
    "path": "Rust/src/samplerplustree/nodeview.rs",
    "content": "use std::hash::Hash;\nuse crate::{\n    common::divector::DiVector,\n    pointstore::PointStore,\n    samplerplustree::{boundingbox::BoundingBox, nodestore::{BasicStore, NodeStore}},\n    visitor::visitor::VisitorInfo,\n    types::Result\n};\n\n\npub trait UpdatableNodeView <Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send>{\n    fn create<NS : NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self;\n    fn update_at_leaf<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>;\n    fn update_from_node_traversing_down< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>;\n    fn update_from_node_traversing_up< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>;\n    fn current_node(&self) -> usize;\n    fn set_use_shadow_box< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(&mut self, node_store: &NS, point_store: &PS) -> Result<()>;\n}\n\npub trait UpdatableMultiNodeView<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send> : UpdatableNodeView<Label, Attributes> {\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self;\n    fn set_trigger_traversing_down<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        parent: usize,\n        node_store: &NS,\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    );\n    fn update_view_to_parent_with_missing_coordinates<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>;\n    fn set_current_node(&mut self, index: usize);\n    fn bounding_box(&self) -> Option<BoundingBox>;\n    fn merge_paths<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        saved_box: Option<BoundingBox>,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n    ) -> Result<()>;\n}\n\n#[repr(C)]\npub struct SmallNodeView {\n    current_node: usize,\n    probability_of_cut: f64,\n    shadow_box_probability_of_cut: f64,\n    mass: usize,\n    depth: usize,\n    leaf_index: usize,\n    leaf_duplicate: bool,\n    use_shadow_box: bool,\n    current_box: Option<BoundingBox>,\n    shadow_box: Option<BoundingBox>,\n}\n\nimpl SmallNodeView {\n    pub fn probability_of_cut(&self) -> f64 {\n        self.probability_of_cut\n    }\n    pub fn shadow_box_probability_of_cut(&self) -> f64 {\n        self.shadow_box_probability_of_cut\n    }\n    pub fn mass(&self) -> usize {\n        self.mass\n    }\n    pub fn depth(&self) -> usize {\n        self.depth\n    }\n    pub fn leaf_index(&self) -> usize {\n        self.leaf_index\n    }\n    pub fn is_duplicate(&self) -> bool {\n        self.leaf_duplicate\n    }\n    pub fn new<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send, NS: NodeStore<Label,Attributes>>(root: usize, _node_store: &NS) -> Self {\n        SmallNodeView {\n            current_node: root,\n            probability_of_cut: f64::MAX, // not feasible; but that is the point!\n            shadow_box_probability_of_cut: f64::MAX,\n            mass: 0,\n            depth: 0,\n            leaf_index: usize::MAX,\n            leaf_duplicate: false,\n            use_shadow_box: false,\n            current_box: None,\n            shadow_box: None,\n        }\n    }\n}\n\nimpl<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send> UpdatableNodeView<Label,Attributes> for SmallNodeView {\n\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, _node_store: &NS) -> Self {\n        SmallNodeView::new(root, _node_store)\n    }\n\n    fn update_at_leaf< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) ->Result<()>{\n        self.leaf_index = node_store.leaf_point_index(index)?;\n        self.mass = node_store.mass(index);\n        self.probability_of_cut = if point_store.is_equal(point, self.leaf_index)? {\n            self.leaf_duplicate = true;\n            0.0\n        } else {\n            self.leaf_duplicate = false;\n            1.0f64\n        };\n        if node_store.use_path_for_box() {\n            self.current_box = Some(node_store.bounding_box(self.current_node, point_store)?);\n        }\n        Ok(())\n    }\n\n    fn update_from_node_traversing_down< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        _index: usize,\n        node_store: &NS,\n        _point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        if node_store.is_left_of(self.current_node, point) {\n            self.current_node = node_store.left_index(self.current_node);\n        } else {\n            self.current_node = node_store.right_index(self.current_node);\n        }\n        self.depth += 1;\n        Ok(())\n    }\n\n    fn update_from_node_traversing_up<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        self.probability_of_cut = match &mut self.current_box {\n            Some(x) => {\n                let sibling = node_store.sibling(self.current_node, index);\n                if self.use_shadow_box {\n                    let z = node_store.bounding_box(sibling, point_store)?;\n                    x.add_box(&z);\n                    match &mut self.shadow_box {\n                        Some(y) => y.add_box(&z),\n                        None => self.shadow_box = Some(z),\n                    }\n                    self.shadow_box_probability_of_cut =\n                        self.shadow_box.as_ref().unwrap().probability_of_cut(point);\n                } else {\n                    node_store.grow_node_box(x, point_store, index, sibling)?;\n                };\n                x.probability_of_cut(point)\n            }\n            None => node_store.probability_of_cut(index, point, point_store)?,\n        };\n        self.current_node = index;\n        self.mass = node_store.mass(index);\n        self.depth -= 1;\n        Ok(())\n    }\n\n    fn current_node(&self) -> usize {\n        self.current_node\n    }\n\n    fn set_use_shadow_box< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(&mut self, node_store: &NS, point_store: &PS) -> Result<()>{\n        self.use_shadow_box = true;\n        // we will maintain a current box since we haver to maintain a shadow box in any case\n        // the shadow box is not set; it can only be set at the next level\n        // when update_from_node_up() is invoked\n        // we will maintain the invariant that *if the shadow is present then the current box also is present*\n        self.current_box = Some(node_store.bounding_box(self.current_node, point_store)?);\n        Ok(())\n    }\n}\n\n#[repr(C)]\npub struct MediumNodeView {\n    current_node: usize,\n    sibling: usize,\n    probability_of_cut: f64,\n    shadow_box_probablity_of_cut: f64,\n    mass: usize,\n    depth: usize,\n    leaf_index: usize,\n    leaf_duplicate: bool,\n    use_shadow_box: bool,\n    current_box: Option<BoundingBox>,\n    shadow_box: Option<BoundingBox>,\n    cut_dimension: usize,\n    cut_value: f32,\n    point_at_leaf: Vec<f32>,\n}\n\nimpl MediumNodeView {\n    pub fn probability_of_cut(&self) -> f64 {\n        self.probability_of_cut\n    }\n    pub fn shadow_box_probability_of_cut(&self) -> f64 {\n        self.shadow_box_probablity_of_cut\n    }\n    pub fn mass(&self) -> usize {\n        self.mass\n    }\n    pub fn depth(&self) -> usize {\n        self.depth\n    }\n    pub fn leaf_index(&self) -> usize {\n        self.leaf_index\n    }\n    pub fn is_duplicate(&self) -> bool {\n        self.leaf_duplicate\n    }\n    pub fn cut_dimension(&self) -> usize {\n        self.cut_dimension\n    }\n    pub fn cut_value(&self) -> f32 {\n        self.cut_value\n    }\n    pub fn leaf_point(&self) -> Vec<f32> {\n        self.point_at_leaf.clone()\n    }\n    pub fn new<Label: Copy + Sync, Attributes : Copy + Sync>(root: usize, cut_dimension : usize, cut_value: f32, mass: usize ) -> Self {\n        Self {\n            current_node: root,\n            sibling: usize::MAX,\n            probability_of_cut: f64::MAX,\n            shadow_box_probablity_of_cut: f64::MAX,\n            mass,\n            depth: 0,\n            leaf_index: usize::MAX,\n            leaf_duplicate: false,\n            use_shadow_box: false,\n            current_box: None,\n            shadow_box: None,\n            cut_dimension,\n            cut_value,\n            point_at_leaf: Vec::new(),\n        }\n    }\n}\n\nimpl<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send> UpdatableNodeView<Label,Attributes> for MediumNodeView {\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self {\n        let (cut_dimension, cut_value, _left_child, _right_child) =\n            node_store.cut_and_children(root);\n        let mass = node_store.mass(root);\n        Self {\n            current_node: root,\n            sibling: usize::MAX,\n            probability_of_cut: f64::MAX,\n            shadow_box_probablity_of_cut: f64::MAX,\n            mass,\n            depth: 0,\n            leaf_index: usize::MAX,\n            leaf_duplicate: false,\n            use_shadow_box: false,\n            current_box: None,\n            shadow_box: None,\n            cut_dimension,\n            cut_value,\n            point_at_leaf: Vec::new(),\n        }\n    }\n\n    fn update_at_leaf<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        self.leaf_index = node_store.leaf_point_index(index)?;\n        self.point_at_leaf = point_store.copy(self.leaf_index)?;\n        self.mass = node_store.mass(index);\n        self.probability_of_cut = if self.point_at_leaf.eq(point) {\n            self.leaf_duplicate = true;\n            0.0\n        } else {\n            self.leaf_duplicate = false;\n            1.0f64\n        };\n        if node_store.use_path_for_box() {\n            self.current_box = Some(BoundingBox::new(&self.point_at_leaf, &self.point_at_leaf)?);\n        }\n        Ok(())\n    }\n\n    fn update_from_node_traversing_down<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        _index: usize,\n        node_store: &NS,\n        _point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        if node_store.is_left_of(self.current_node, point) {\n            self.current_node = node_store.left_index(self.current_node);\n        } else {\n            self.current_node = node_store.right_index(self.current_node);\n        }\n        self.depth += 1;\n        Ok(())\n    }\n\n    fn update_from_node_traversing_up<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        self.probability_of_cut = match &mut self.current_box {\n            Some(x) => {\n                self.sibling = node_store.sibling(self.current_node, index);\n                if self.use_shadow_box {\n                    let z = node_store.bounding_box(self.sibling, point_store)?;\n                    x.add_box(&z);\n                    match &mut self.shadow_box {\n                        Some(y) => y.add_box(&z),\n                        None => self.shadow_box = Some(z),\n                    }\n                    self.shadow_box_probablity_of_cut =\n                        self.shadow_box.as_ref().unwrap().probability_of_cut(point);\n                } else {\n                    node_store.grow_node_box(x, point_store, index, self.sibling)?;\n                };\n                x.probability_of_cut(point)\n            }\n            None => node_store.probability_of_cut(index, point, point_store)?,\n        };\n        self.current_node = index;\n        let (cut_dimension, cut_value, _left_child, _right_child) =\n            node_store.cut_and_children(self.current_node);\n        self.cut_dimension = cut_dimension;\n        self.cut_value = cut_value;\n        self.mass = node_store.mass(index);\n        self.depth -= 1;\n        Ok(())\n    }\n\n    fn current_node(&self) -> usize {\n        self.current_node\n    }\n\n    fn set_use_shadow_box<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(&mut self, node_store: &NS, point_store: &PS) -> Result<()>{\n        self.use_shadow_box = true;\n        // we will maintain a current box since we haver to maintain a shadow box in any case\n        // the shadow box is not set; it can only be set at the next level\n        // when update_from_node_up() is invoked\n        // we will maintain the invariant that *if the shadow is present then the current box also is present*\n        self.current_box = Some(node_store.bounding_box(self.current_node, point_store)?);\n        Ok(())\n    }\n}\n\nimpl<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send> UpdatableMultiNodeView<Label,Attributes> for MediumNodeView {\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self {\n        let (cut_dimension,cut_value,_left_child,_right_child) = node_store.cut_and_children(root);\n        let mass = node_store.mass(root);\n        MediumNodeView::new::<Label,Attributes>(root, cut_dimension,cut_value,mass)\n    }\n\n    fn set_trigger_traversing_down< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        _point: &[f32],\n        _parent: usize,\n        node_store: &NS,\n        _point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) {\n        let (cut_dimension, cut_value, _left_child, _right_child) =\n            node_store.cut_and_children(self.current_node);\n        self.cut_dimension = cut_dimension;\n        self.cut_value = cut_value;\n    }\n\n    fn update_view_to_parent_with_missing_coordinates< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        if node_store.use_path_for_box() {\n            let sibling = node_store.sibling(self.current_node, parent);\n            node_store.grow_node_box(\n                self.current_box.as_mut().unwrap(),\n                point_store,\n                parent,\n                sibling,\n            )?;\n            self.probability_of_cut = self\n                .current_box\n                .as_ref()\n                .unwrap()\n                .probability_of_cut_with_missing_coordinates(point, missing_coordinates);\n        } else {\n            self.probability_of_cut = node_store.probability_of_cut_with_missing_coordinates(\n                parent,\n                point,\n                missing_coordinates,\n                point_store,\n            )?;\n        }\n        self.current_node = parent;\n        Ok(())\n    }\n\n    fn set_current_node(&mut self, index: usize) {\n        self.current_node = index;\n    }\n\n    fn bounding_box(&self) -> Option<BoundingBox> {\n        match &self.current_box {\n            Some(x) => Some(x.clone()),\n            None => None,\n        }\n    }\n\n    fn merge_paths<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        saved_box: Option<BoundingBox>,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n    ) -> Result<()>{\n        if node_store.use_path_for_box() {\n            // both boxes, current and saved, should be present as invariant\n            self.current_box\n                .as_mut()\n                .unwrap()\n                .add_box(saved_box.as_ref().unwrap());\n            self.probability_of_cut = self\n                .current_box\n                .as_ref()\n                .unwrap()\n                .probability_of_cut_with_missing_coordinates(point, missing_coordinates);\n        } else {\n            self.probability_of_cut = node_store.probability_of_cut_with_missing_coordinates(\n                parent,\n                point,\n                missing_coordinates,\n                point_store,\n            )?;\n        }\n        self.current_node = parent;\n        Ok(())\n    }\n}\n\n#[repr(C)]\npub struct LargeNodeView {\n    current_node: usize,\n    sibling: usize,\n    probability_of_cut: f64,\n    shadow_box_probablity_of_cut: f64,\n    mass: usize,\n    depth: usize,\n    leaf_index: usize,\n    leaf_duplicate: bool,\n    use_shadow_box: bool,\n    current_box: Option<BoundingBox>,\n    shadow_box: Option<BoundingBox>,\n    cut_dimension: usize,\n    cut_value: f32,\n    left_child: usize,\n    right_child: usize,\n    point_at_leaf: Vec<f32>,\n}\n\nimpl LargeNodeView {\n    pub fn probability_of_cut(&self) -> f64 {\n        self.probability_of_cut\n    }\n    pub fn shadow_box_probability_of_cut(&self) -> f64 {\n        self.shadow_box_probablity_of_cut\n    }\n    pub fn mass(&self) -> usize {\n        self.mass\n    }\n    pub fn depth(&self) -> usize {\n        self.depth\n    }\n    pub fn leaf_index(&self) -> usize {\n        self.leaf_index\n    }\n    pub fn is_duplicate(&self) -> bool {\n        self.leaf_duplicate\n    }\n    pub fn cut_dimension(&self) -> usize {\n        self.cut_dimension\n    }\n    pub fn cut_value(&self) -> f32 {\n        self.cut_value\n    }\n    pub fn leaf_point(&self) -> Vec<f32> {\n        self.point_at_leaf.clone()\n    }\n    pub fn bounding_box(&self) -> Option<BoundingBox> {\n        match &self.current_box {\n            Some(x) => Some(x.clone()),\n            None => None,\n        }\n    }\n\n    pub fn shadow_box(&self) -> Option<BoundingBox> {\n        match &self.shadow_box {\n            Some(x) => Some(x.clone()),\n            None => None,\n        }\n    }\n    pub fn assign_probability_of_cut(&self, di_vector: &mut DiVector, point: &[f32]) {\n        di_vector.assign_as_probability_of_cut(self.current_box.as_ref().unwrap(), point)\n    }\n    pub fn assign_probability_of_cut_shadow_box(&self, di_vector: &mut DiVector, point: &[f32]) {\n        assert!(self.use_shadow_box, \"shadow box not in use\");\n        di_vector.assign_as_probability_of_cut(self.shadow_box.as_ref().unwrap(), point)\n    }\n\n    pub fn new<Label: Copy + Sync, Attributes : Copy + Sync>(root: usize, cut_dimension : usize, cut_value : f32,\n                                                             left_child : usize, right_child: usize, mass: usize) -> Self {\n        Self {\n            current_node: root,\n            sibling: usize::MAX,\n            probability_of_cut: f64::MAX,\n            shadow_box_probablity_of_cut: f64::MAX,\n            mass,\n            depth: 0,\n            leaf_index: usize::MAX,\n            leaf_duplicate: false,\n            use_shadow_box: false,\n            current_box: None,\n            shadow_box: None,\n            cut_dimension,\n            cut_value,\n            left_child,\n            right_child,\n            point_at_leaf: Vec::new(),\n        }\n    }\n}\n\nimpl<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send> UpdatableNodeView<Label,Attributes> for LargeNodeView {\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self {\n        let (cut_dimension,cut_value,left_child,right_child) = node_store.cut_and_children(root);\n        let mass = node_store.mass(root);\n        LargeNodeView::new::<Label,Attributes>(root, cut_dimension,cut_value,left_child,right_child,mass)\n    }\n\n    fn update_at_leaf< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        self.leaf_index = node_store.leaf_point_index(index)?;\n        self.point_at_leaf = point_store.copy(self.leaf_index)?;\n        self.mass = node_store.mass(index);\n        self.probability_of_cut = if self.point_at_leaf.eq(point) {\n            self.leaf_duplicate = true;\n            0.0\n        } else {\n            self.leaf_duplicate = false;\n            1.0f64\n        };\n        self.current_box = Some(BoundingBox::new(&self.point_at_leaf, &self.point_at_leaf)?);\n        Ok(())\n    }\n\n    fn update_from_node_traversing_down<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        _index: usize,\n        node_store: &NS,\n        _point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        if point[self.cut_dimension] <= self.cut_value {\n            self.current_node = self.left_child;\n        } else {\n            self.current_node = self.right_child;\n        }\n        let (cut_dimension, cut_value, left_child, right_child) =\n            node_store.cut_and_children(self.current_node);\n        self.cut_dimension = cut_dimension;\n        self.cut_value = cut_value;\n        self.left_child = left_child;\n        self.right_child = right_child;\n        self.depth += 1;\n        self.mass = node_store.mass(self.current_node);\n        Ok(())\n    }\n\n    fn update_from_node_traversing_up< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        point: &[f32],\n        index: usize,\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        self.sibling = node_store.sibling(self.current_node, index);\n        if self.use_shadow_box {\n            let z = node_store.bounding_box(self.sibling, point_store)?;\n            self.current_box.as_mut().unwrap().add_box(&z);\n            match &mut self.shadow_box {\n                Some(y) => y.add_box(&z),\n                None => self.shadow_box = Some(z),\n            }\n            self.shadow_box_probablity_of_cut =\n                self.shadow_box.as_ref().unwrap().probability_of_cut(point);\n        } else {\n            node_store.grow_node_box(\n                self.current_box.as_mut().unwrap(),\n                point_store,\n                index,\n                self.sibling,\n            )?;\n        };\n        self.probability_of_cut = self.current_box.as_ref().unwrap().probability_of_cut(point);\n\n        self.current_node = index;\n        let (cut_dimension, cut_value, left_child, right_child) =\n            node_store.cut_and_children(self.current_node);\n        self.cut_dimension = cut_dimension;\n        self.cut_value = cut_value;\n        self.left_child = left_child;\n        self.right_child = right_child;\n        self.mass = node_store.mass(index);\n        self.depth -= 1;\n        Ok(())\n    }\n\n    fn current_node(&self) -> usize {\n        self.current_node\n    }\n\n    fn set_use_shadow_box< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(&mut self, _node_store: &NS, _point_store: &PS) -> Result<()>{\n        self.use_shadow_box = true;\n        // note that the current box is always maintained\n        Ok(())\n    }\n}\n\nimpl<Label: Copy + Sync, Attributes : Copy + Sync+ Hash + Eq + Send,> UpdatableMultiNodeView<Label,Attributes> for LargeNodeView {\n    fn create<NS: NodeStore<Label,Attributes>>(root: usize, node_store: &NS) -> Self {\n        let (cut_dimension,cut_value,left_child,right_child) = node_store.cut_and_children(root);\n        let mass = node_store.mass(root);\n        LargeNodeView::new::<Label,Attributes>(root, cut_dimension,cut_value,left_child,right_child,mass)\n    }\n\n    fn set_trigger_traversing_down< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        _point: &[f32],\n        _parent: usize,\n        node_store: &NS,\n        _point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) {\n        let (cut_dimension, cut_value, _left_child, _right_child) =\n            node_store.cut_and_children(self.current_node);\n        self.cut_dimension = cut_dimension;\n        self.cut_value = cut_value;\n    }\n\n    fn update_view_to_parent_with_missing_coordinates<PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n        _visitor_info: &VisitorInfo,\n    ) -> Result<()> {\n        if node_store.use_path_for_box() {\n            let sibling = node_store.sibling(self.current_node, parent);\n            node_store.grow_node_box(\n                self.current_box.as_mut().unwrap(),\n                point_store,\n                parent,\n                sibling,\n            )?;\n            self.probability_of_cut = self\n                .current_box\n                .as_ref()\n                .unwrap()\n                .probability_of_cut_with_missing_coordinates(point, missing_coordinates);\n        } else {\n            self.probability_of_cut = node_store.probability_of_cut_with_missing_coordinates(\n                parent,\n                point,\n                missing_coordinates,\n                point_store,\n            )?;\n        }\n        self.current_node = parent;\n        Ok(())\n    }\n\n    fn set_current_node(&mut self, index: usize) {\n        self.current_node = index;\n    }\n\n    fn bounding_box(&self) -> Option<BoundingBox> {\n        match &self.current_box {\n            Some(x) => Some(x.clone()),\n            None => None,\n        }\n    }\n\n    fn merge_paths< PS: PointStore<Label,Attributes>, NS: NodeStore<Label,Attributes>>(\n        &mut self,\n        parent: usize,\n        saved_box: Option<BoundingBox>,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        node_store: &NS,\n        point_store: &PS,\n    ) -> Result <()>{\n        if node_store.use_path_for_box() {\n            // both boxes, current and saved, should be present as invariant\n            self.current_box\n                .as_mut()\n                .unwrap()\n                .add_box(saved_box.as_ref().unwrap());\n            self.probability_of_cut = self\n                .current_box\n                .as_ref()\n                .unwrap()\n                .probability_of_cut_with_missing_coordinates(point, missing_coordinates);\n        } else {\n            self.probability_of_cut = node_store.probability_of_cut_with_missing_coordinates(\n                parent,\n                point,\n                missing_coordinates,\n                point_store,\n            )?;\n        }\n        self.current_node = parent;\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "Rust/src/samplerplustree/randomcuttree.rs",
    "content": "use std::collections::HashMap;\nuse std::fmt::Debug;\nuse std::hash::Hash;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\n\nuse crate::{\n    pointstore::PointStore,\n    samplerplustree::{\n        boundingbox::BoundingBox,\n        cut::Cut,\n        nodestore::{NodeStore, BasicStore, BoxStore, VectorNodeStore},\n        nodeview::{MediumNodeView, UpdatableMultiNodeView, UpdatableNodeView},\n    },\n    types::{Result, Location},\n    visitor::{\n        imputevisitor::ImputeVisitor,\n        visitor::{SimpleMultiVisitor, Visitor, VisitorInfo},\n    },\n};\nuse crate::errors::RCFError;\nuse crate::util::check_argument;\n\nextern crate rand;\nextern crate rand_chacha;\n\n#[repr(C)]\npub struct RCFTree<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>\n{\n    dimensions: usize,\n    capacity: usize,\n    node_store: VectorNodeStore<C, P, N>,\n    random_seed: u64,\n    root: usize,\n    tree_mass: usize,\n    using_transforms: bool,\n    store_attributes: bool,\n    store_pointsum: bool,\n    propagate_attributes: bool,\n}\n\nimpl<C, P, N> RCFTree<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    <C as TryFrom<usize>>::Error: Debug,\n    <P as TryFrom<usize>>::Error: Debug,\n    <N as TryFrom<usize>>::Error: Debug\n{\n    pub fn new(\n        dimensions: usize,\n        capacity: usize,\n        using_transforms: bool,\n        store_attributes: bool,\n        store_pointsum: bool,\n        propagate_attributes: bool,\n        bounding_box_cache_fraction: f64,\n        random_seed: u64,\n    ) -> Result<Self> {\n        let project_to_tree: fn(Vec<f32>) -> Vec<f32> = { |x| x };\n        let node_store = VectorNodeStore::<C, P, N>::new(\n            capacity,\n            dimensions,\n            using_transforms,\n            store_attributes,\n            store_pointsum,\n            propagate_attributes,\n            project_to_tree,\n            bounding_box_cache_fraction,\n        )?;\n        let root = node_store.null_node();\n        Ok(RCFTree {\n            dimensions,\n            capacity,\n            using_transforms,\n            store_attributes,\n            store_pointsum,\n            random_seed,\n            node_store,\n            root,\n            tree_mass: 0,\n            propagate_attributes\n        })\n    }\n\n    pub fn add<Label : Copy + Sync ,Attributes: Copy + Sync+ Hash + Eq + Send, PS: PointStore<Label,Attributes>>(\n        &mut self,\n        point_index: usize,\n        point_attribute: usize,\n        point_store: &PS,\n    ) -> Result<usize> {\n        if self.root == self.node_store.null_node() {\n            self.root = self.node_store.leaf_index(point_index);\n            self.tree_mass = 1;\n            if self.store_attributes {\n               self.node_store.add_attrib_at_leaf(point_index,point_attribute)?;\n            }\n            Ok(point_index)\n        } else {\n            let point = &point_store.copy(point_index)?;\n            let mut path_to_root = Vec::new();\n            self.node_store\n                .set_path(&mut path_to_root, self.root, point);\n            let (mut node, mut sibling) = path_to_root.pop().unwrap();\n\n            let leaf_point_index = self.node_store.leaf_point_index(node)?;\n            let old_point = &point_store.copy(leaf_point_index)?;\n\n            self.tree_mass += 1;\n            if point.eq(old_point) {\n                self.node_store.increase_leaf_mass(node)?;\n                if self.store_attributes {\n                    self.node_store.add_attrib_at_leaf(leaf_point_index, point_attribute)?;\n                }\n                self.node_store\n                    .manage_ancestors_add(&mut path_to_root, point, point_store, true)?;\n                return Ok(leaf_point_index);\n            } else {\n                let mut saved_parent = if path_to_root.len() != 0 {\n                    path_to_root.last().unwrap().0\n                } else {\n                    self.node_store.null_node()\n                };\n                let mut saved_node = node;\n                let mut current_box = BoundingBox::new(old_point, old_point)?;\n                let mut saved_box = current_box.clone();\n                let mut parent_path: Vec<(usize, usize)> = Vec::new();\n                let mut rng = ChaCha20Rng::seed_from_u64(self.random_seed);\n                self.random_seed = rng.next_u64();\n\n                let mut parent = saved_parent;\n                let mut saved_cut = Cut::new(usize::MAX, 0.0);\n                /* the loop has the execute once */\n                loop {\n                    let factor: f64 = rng.gen();\n                    let (new_cut, separation) =\n                        Cut::random_cut_and_separation(&current_box, factor, point);\n                    if separation {\n                        saved_cut = new_cut;\n                        saved_parent = parent;\n                        saved_node = node;\n                        saved_box = current_box.clone();\n                        parent_path.clear();\n                    } else {\n                        parent_path.push((node, sibling));\n                    }\n                    check_argument(saved_cut.dimension != usize::MAX, \"incorrect state\")?;\n\n                    if parent == self.node_store.null_node() {\n                        break;\n                    } else {\n                        self.node_store.grow_node_box(\n                            &mut current_box,\n                            point_store,\n                            parent,\n                            sibling,\n                        )?;\n                        let (a, b) = path_to_root.pop().unwrap();\n                        node = a;\n                        sibling = b;\n                        parent = if path_to_root.len() != 0 {\n                            path_to_root.last().unwrap().0\n                        } else {\n                            self.node_store.null_node()\n                        };\n                    }\n                }\n\n                if saved_parent != self.node_store.null_node() {\n                    while !parent_path.is_empty() {\n                        path_to_root.push(parent_path.pop().unwrap());\n                    }\n                    check_argument(path_to_root.last().unwrap().0 == saved_parent, \"incorrect state\")?;\n                } else {\n                    check_argument(path_to_root.len() == 0, \"incorrect state\")?;\n                }\n                let merged_node = self.node_store.add_node(\n                    saved_parent,\n                    point,\n                    saved_node,\n                    point_index,\n                    saved_cut,\n                    &saved_box,\n                )?;\n\n                if self.store_attributes {\n                    self.node_store.add_attrib_at_leaf(point_index,point_attribute)?;\n                }\n                if self.store_pointsum {\n                    self.node_store.recompute_pointsum(merged_node,point_store)?;\n                }\n                if self.propagate_attributes {\n                    self.node_store.recompute_attribute_vec(merged_node,point_store)?;\n                }\n                if saved_parent != self.node_store.null_node() {\n                    self.node_store.manage_ancestors_add(\n                        &mut path_to_root,\n                        point,\n                        point_store,\n                        false,\n                    )?;\n                } else {\n                    self.root = merged_node;\n                }\n            }\n            Ok(point_index)\n        }\n    }\n\n    pub fn delete<Label : Copy + Sync ,Attributes: Copy + Sync+ Hash + Eq + Send,PS:PointStore<Label,Attributes>>(\n        &mut self,\n        point_index: usize,\n        point_attribute: usize,\n        point_store: &PS\n    ) -> Result<usize> {\n        check_argument(self.root != self.node_store.null_node() ,\" deleting from an empty tree\")?;\n\n        self.tree_mass = self.tree_mass - 1;\n        let point = &point_store.copy(point_index)?;\n        let mut leaf_path = Vec::new();\n        self.node_store.set_path(&mut leaf_path, self.root, point);\n        let (leaf_node, leaf_saved_sibling) = leaf_path.pop().unwrap();\n\n        let leaf_point_index = self.node_store.leaf_point_index(leaf_node)?;\n\n        if leaf_point_index != point_index {\n            if !point_store.is_equal(point, leaf_point_index)? {\n                return Err(RCFError::InvalidArgument {\n                    msg: \" deleting wrong node \"\n                });\n            }\n        }\n\n        if self.store_attributes {\n            self.node_store.del_attrib_at_leaf(leaf_point_index, point_attribute)?;\n        }\n\n        if self.node_store.decrease_leaf_mass(leaf_node)? == 0 {\n            if leaf_path.len() == 0 {\n                self.root = self.node_store.null_node();\n            } else {\n                let (parent, _sibling) = leaf_path.pop().unwrap();\n                let grand_parent = if leaf_path.len() == 0 {\n                    self.node_store.null_node()\n                } else {\n                    leaf_path.last().unwrap().0\n                };\n\n                if grand_parent == self.node_store.null_node() {\n                    self.root = leaf_saved_sibling;\n                    self.node_store.set_root(self.root);\n                } else {\n                    self.node_store\n                        .replace_node(grand_parent, parent, leaf_saved_sibling);\n                    self.node_store.manage_ancestors_delete(\n                        &mut leaf_path,\n                        point,\n                        point_store,\n                        false,\n                    )?;\n                }\n\n                self.node_store.delete_internal_node(parent)?;\n                if self.store_pointsum {\n                    self.node_store.invalidate_pointsum(parent)?;\n                }\n            }\n        } else {\n            self.node_store\n                .manage_ancestors_delete(&mut leaf_path, point, point_store, true)?;\n        }\n        Ok(leaf_point_index)\n    }\n\n    pub fn conditional_field<Label : Copy + Sync ,Attributes: Copy + Sync+ Hash + Eq + Send,PS: PointStore<Label,Attributes>>(\n        &self,\n        missing: &[usize],\n        point: &[f32],\n        point_store: &PS,\n        centrality: f64,\n        seed: u64,\n        visitor_info: &VisitorInfo,\n    ) -> Result<(f64, usize, f64)> {\n        if self.root == self.node_store.null_node() {\n            return Ok((0.0, usize::MAX, 0.0));\n        }\n        let mut visitor = ImputeVisitor::new(missing, centrality, self.tree_mass, seed);\n        let (cut_dimension, cut_value, _left_child, _right_child) = self.node_store.cut_and_children(self.root);\n        let mass = self.node_store.mass(self.root);\n        let mut node_view = MediumNodeView::new::<Label,Attributes>(self.root, cut_dimension,cut_value,mass);\n        let mut missing_coordinates = vec![false; self.dimensions];\n        for i in missing.iter() {\n            missing_coordinates[*i] = true;\n        }\n        self.traverse_multi_with_missing_coordinates(\n            &mut node_view,\n            &mut visitor,\n            visitor_info,\n            point,\n            &missing_coordinates,\n            point_store,\n        )?;\n        visitor.result(&visitor_info)\n    }\n\n    pub fn traverse_multi_with_missing_coordinates<V, NodeView, PS, R, Label, Attributes>(\n        &self,\n        node_view: &mut NodeView,\n        visitor: &mut V,\n        visitor_info: &VisitorInfo,\n        point: &[f32],\n        missing_coordinates: &[bool],\n        point_store: &PS,\n    ) -> Result<()> where\n        V: SimpleMultiVisitor<NodeView, R>,\n        Label: Copy + Sync,\n        Attributes : Copy + Sync+ Hash + Eq + Send,\n        NodeView: UpdatableMultiNodeView<Label,Attributes>,\n        PS: PointStore<Label,Attributes>,\n    {\n        let node = node_view.current_node();\n        if self.node_store.is_leaf(node) {\n            node_view.update_at_leaf(point, node, &self.node_store, point_store, &visitor_info)?;\n            visitor.accept_leaf(point, visitor_info, node_view)?;\n        } else {\n            let parent = node;\n            node_view.set_trigger_traversing_down(\n                point,\n                parent,\n                &self.node_store,\n                point_store,\n                visitor_info,\n            );\n            if missing_coordinates[self.node_store.cut_dimension(parent)] {\n                let right = self.node_store.left_index(parent);\n                let left = self.node_store.right_index(parent);\n                node_view.set_current_node(left);\n                self.traverse_multi_with_missing_coordinates(\n                    node_view,\n                    visitor,\n                    visitor_info,\n                    point,\n                    missing_coordinates,\n                    point_store,\n                )?;\n                let saved_box = node_view.bounding_box();\n                node_view.set_current_node(right);\n                self.traverse_multi_with_missing_coordinates(\n                    node_view,\n                    visitor,\n                    visitor_info,\n                    point,\n                    missing_coordinates,\n                    point_store,\n                )?;\n                visitor.combine_branches(point, &node_view, visitor_info)?;\n                if !visitor.is_converged()? {\n                    node_view.merge_paths(\n                        parent,\n                        saved_box,\n                        point,\n                        missing_coordinates,\n                        &self.node_store,\n                        point_store,\n                    )?;\n                }\n            } else {\n                node_view.update_from_node_traversing_down(\n                    point,\n                    parent,\n                    &self.node_store,\n                    point_store,\n                    &visitor_info,\n                )?;\n                self.traverse_multi_with_missing_coordinates(\n                    node_view,\n                    visitor,\n                    visitor_info,\n                    point,\n                    missing_coordinates,\n                    point_store,\n                )?;\n                if !visitor.is_converged()? {\n                    node_view.update_view_to_parent_with_missing_coordinates(\n                        parent,\n                        point,\n                        missing_coordinates,\n                        &self.node_store,\n                        point_store,\n                        &visitor_info,\n                    )?;\n                }\n            }\n            if !visitor.is_converged()? {\n                visitor.accept(point, visitor_info, node_view)?;\n            }\n        }\n        Ok(())\n    }\n\n    pub fn get_size(&self) -> usize {\n        self.node_store.size(self.dimensions.into()) + std::mem::size_of::<RCFTree<C, P, N>>()\n    }\n\n    fn traverse_recursive<R, PS, NodeView, V,Label,Attributes>(\n        &self,\n        point: &[f32],\n        node_view: &mut NodeView,\n        visitor: &mut V,\n        visitor_info: &VisitorInfo,\n        point_store: &PS,\n    ) -> Result<()> where\n        PS: PointStore<Label,Attributes>,\n        V: Visitor<NodeView, R>,\n        R: Clone,\n        Label: Copy + Sync,\n        Attributes : Copy + Sync+ Hash + Eq + Send,\n        NodeView: UpdatableNodeView<Label,Attributes>,\n    {\n        let current_node = node_view.current_node();\n        if self.node_store.is_leaf(current_node) {\n            node_view.update_at_leaf(\n                point,\n                current_node,\n                &self.node_store,\n                point_store,\n                &visitor_info,\n            )?;\n            visitor.accept_leaf(point, visitor_info, &node_view)?;\n            if visitor.use_shadow_box() {\n                node_view.set_use_shadow_box(&self.node_store, point_store)?;\n            }\n        } else {\n            node_view.update_from_node_traversing_down(\n                point,\n                current_node,\n                &self.node_store,\n                point_store,\n                visitor_info,\n            )?;\n            self.traverse_recursive(point, node_view, visitor, visitor_info, point_store)?;\n            if !visitor.is_converged()? {\n                node_view.update_from_node_traversing_up(\n                    point,\n                    current_node,\n                    &self.node_store,\n                    point_store,\n                    &visitor_info,\n                )?;\n                visitor.accept(point, visitor_info, &node_view)?;\n            }\n        }\n        Ok(())\n    }\n}\n\npub trait Traversable<NodeView, V, R, Label,Attributes>\nwhere\n    V: Visitor<NodeView, R>,\n    Label: Copy + Sync,\n    Attributes: Copy + Sync+ Hash + Eq + Send,\n{\n    fn traverse<PS: PointStore<Label,Attributes>>(\n        &self,\n        point: &[f32],\n        parameters: &[usize],\n        visitor_factory: fn(usize, &[usize], &VisitorInfo) -> V,\n        visitor_info: &VisitorInfo,\n        point_store: &PS,\n        default: &R,\n    ) -> Result<R>;\n}\n\nimpl<C, P, N, NodeView, V, R, Label,Attributes> Traversable<NodeView, V, R,Label,Attributes>\nfor RCFTree<C, P, N>\nwhere\n    C: Location,\n    <C as TryFrom<usize>>::Error: Debug,\n    usize: From<C>,\n    P: Location + Eq + Hash,\n    <P as TryFrom<usize>>::Error: Debug,\n    usize: From<P>,\n    N: Location,\n    <N as TryFrom<usize>>::Error: Debug,\n    usize: From<N>,\n    Label: Copy + Sync,\n    Attributes : Copy + Sync+ Hash + Eq + Send,\n    NodeView: UpdatableNodeView<Label,Attributes>,\n    V: Visitor<NodeView, R>,\n    R: Clone,\n{\n    fn traverse<PS: PointStore<Label,Attributes>>(\n        &self,\n        point: &[f32],\n        parameters: &[usize],\n        visitor_factory: fn(usize, &[usize], &VisitorInfo) -> V,\n        visitor_info: &VisitorInfo,\n        point_store: &PS,\n        default: &R,\n    ) -> Result<R> {\n        if self.root == self.node_store.null_node() {\n            return Ok(default.clone());\n        }\n        let mut visitor = visitor_factory(self.tree_mass, parameters, &visitor_info);\n        let mut node_view = NodeView::create(self.root, &self.node_store);\n        self.traverse_recursive(\n            point,\n            &mut node_view,\n            &mut visitor,\n            &visitor_info,\n            point_store,\n        )?;\n        visitor.result(visitor_info)\n    }\n}\n"
  },
  {
    "path": "Rust/src/samplerplustree/sampler.rs",
    "content": "use std::fmt::Debug;\n\nuse crate::types::Max;\nuse crate::util::check_argument;\nuse crate::types::Result;\n\n#[repr(C)]\npub struct Sampler<P> {\n    capacity: usize,\n    weights: Vec<f32>,\n    points: Vec<P>,\n    store_attributes: bool,\n    point_attributes: Vec<P>,\n    current_size: usize,\n    accepted_state: (f32, usize, usize),\n}\n\n#[repr(C)]\npub struct SamplerAcceptState {\n    pub(crate) eviction_occurred: bool,\n    pub(crate) point_index: usize,\n    pub(crate) evicted_weight: f32,\n    pub(crate) point_attribute: usize,\n}\n\nimpl<P: Max + Copy + std::cmp::PartialEq> Sampler<P>\nwhere\n    P: TryFrom<usize>,\n    usize: From<P>,\n{\n    pub fn new(capacity: usize, store_attributes: bool) -> Result<Self> {\n        let attrib_vec: Vec<P> = if store_attributes {\n            vec![P::MAX; capacity]\n        } else {\n            Vec::new()\n        };\n        Ok(Sampler {\n            store_attributes,\n            capacity,\n            weights: vec![0.0; capacity],\n            points: vec![P::MAX; capacity],\n            point_attributes: attrib_vec,\n            accepted_state: (0.0, usize::MAX, usize::MAX),\n            current_size: 0,\n        })\n    }\n\n    pub fn get_references(&self) -> &[P] {\n        &self.points[0..self.current_size]\n    }\n\n    fn swap_down(&mut self, start_index: usize, validate: bool) -> Result<()>{\n        let mut current: usize = start_index;\n        while 2 * current + 1 < self.current_size {\n            let mut max_index: usize = 2 * current + 1;\n            if 2 * current + 2 < self.current_size\n                && self.weights[2 * current + 2] > self.weights[max_index]\n            {\n                max_index = 2 * current + 2;\n            }\n            if self.weights[max_index] > self.weights[current] {\n                check_argument(!validate, \"the heap property is not satisfied\")?;\n                self.swap_weights(current, max_index)?;\n                current = max_index;\n            } else {\n                break;\n            }\n        }\n        Ok(())\n    }\n\n    pub fn reheap(&mut self, validate: bool) -> Result<()>{\n        for i in ((self.current_size + 1) / 2)..=0 {\n            self.swap_down(i, validate)?;\n        }\n        Ok(())\n    }\n\n    fn swap_weights(&mut self, a: usize, b: usize) -> Result<()>{\n        check_argument( self.points[a] != P::MAX && self.points[b] != P::MAX, \"incorrect state in sampler swap\")?;\n\n        let tmp: P = self.points[a];\n        self.points[a] = self.points[b];\n        self.points[b] = tmp;\n\n        let tmp_weight: f32 = self.weights[a];\n        self.weights[a] = self.weights[b];\n        self.weights[b] = tmp_weight;\n\n        if self.store_attributes {\n            let tmp_attrib: P = self.point_attributes[a];\n            self.point_attributes[a] = self.point_attributes[b];\n            self.point_attributes[b] = tmp_attrib;\n        }\n        Ok(())\n    }\n\n    pub fn add_point(&mut self, point_index: usize) -> Result<()>\n    where\n        <P as TryFrom<usize>>::Error: Debug,\n    {\n        if point_index != usize::MAX {\n            check_argument(self.current_size < self.capacity.into(), \"sampler full\")?;\n            check_argument(self.accepted_state.1 !=  usize::MAX,\n                \"this method should only be called after a successful call to accept_sample(long)\")?;\n\n            self.weights[self.current_size] = self.accepted_state.0;\n            self.points[self.current_size] = point_index.try_into().unwrap();\n            // note, not self.accepted_state.1, even though we want that to not\n            // P::MAX This corresponds to the change in the index value via\n            // duplicates in the trees\n            if self.store_attributes {\n                self.point_attributes[self.current_size] = if self.accepted_state.2 != usize::MAX {\n                    self.accepted_state.2.try_into().unwrap()\n                } else {\n                    P::MAX\n                }\n            };\n\n            let mut current = self.current_size;\n            self.current_size += 1;\n\n            while current > 0 {\n                let tmp = (current - 1) / 2;\n                if self.weights[tmp] < self.weights[current] {\n                    self.swap_weights(current, tmp)?;\n                    current = tmp;\n                } else {\n                    break;\n                }\n            }\n            // resetting the state\n            self.accepted_state = (0.0, usize::MAX, usize::MAX);\n        }\n        Ok(())\n    }\n\n    pub fn accept_point(\n        &mut self,\n        initial: bool,\n        weight: f32,\n        point_index: usize,\n        attribute: usize,\n    ) -> Result<SamplerAcceptState> {\n        let mut return_val = (true, usize::MAX, weight, usize::MAX);\n        if initial || (weight < self.weights[0]) {\n            self.accepted_state = (weight, point_index, attribute);\n            if !initial {\n                let partial = self.evict_max()?;\n                return_val = (true, partial.0, partial.1, partial.2);\n            }\n        } else {\n            return_val.0 = false;\n        }\n        Ok(SamplerAcceptState {\n            eviction_occurred: return_val.0,\n            point_index: return_val.1,\n            evicted_weight: return_val.2,\n            point_attribute: return_val.3,\n        })\n    }\n\n    /**\n     * evicts the maximum weight point from the sampler. can be used repeatedly to\n     * change the size of the sampler and associated tree\n     */\n\n    pub fn evict_max(&mut self) -> Result<(usize, f32, usize)> {\n        let evicted_attribute_index: usize = if self.store_attributes {\n            self.point_attributes[0].into()\n        } else {\n            usize::MAX\n        };\n\n        let evicted_point = (\n            self.points[0].into(),\n            self.weights[0],\n            evicted_attribute_index,\n        );\n        self.current_size -= 1;\n        let current: usize = self.current_size.into();\n        self.weights[0] = self.weights[current];\n        self.points[0] = self.points[current];\n        if self.store_attributes {\n            self.point_attributes[0] = self.point_attributes[current];\n        }\n        self.swap_down(0, false)?;\n\n        Ok(evicted_point)\n    }\n\n    pub fn is_full(&self) -> bool {\n        self.current_size == self.capacity\n    }\n\n    pub fn get_fill_fraction(&self) -> f64 {\n        if self.current_size == self.capacity {\n            return 1.0;\n        }\n        let fill_fraction: f64 = self.current_size as f64 / self.capacity as f64;\n        fill_fraction\n    }\n\n    pub fn get_size(&self) -> usize {\n        (self.weights.len()) * std::mem::size_of::<f32>()\n            + (self.points.len()) * std::mem::size_of::<P>()\n            + std::mem::size_of::<Sampler<P>>()\n    }\n}\n"
  },
  {
    "path": "Rust/src/samplerplustree/samplerplustree.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\n\nuse std::fmt::Debug;\nuse std::hash::Hash;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\n\nuse crate::{\n    pointstore::PointStore,\n    samplerplustree::{\n        nodestore::VectorNodeStore,\n        nodeview::UpdatableNodeView,\n        randomcuttree::{RCFTree, Traversable},\n        sampler::Sampler,\n    },\n    types::{Location,Result},\n    visitor::visitor::{Visitor, VisitorInfo},\n};\nuse crate::samplerplustree::nodestore::NodeStore;\n\n#[repr(C)]\npub struct SamplerPlusTree<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>\n{\n    tree: RCFTree<C, P, N>,\n    sampler: Sampler<P>,\n    using_transforms: bool,\n    time_decay: f64,\n    entries_seen: usize,\n    initial_accept_fraction: f64,\n    random_seed: u64,\n}\n\nimpl<C, P, N> SamplerPlusTree<C, P, N>\nwhere\n    C: Location,\n    usize: From<C>,\n    P: Location + Eq + Hash,\n    usize: From<P>,\n    N: Location,\n    usize: From<N>,\n    <C as TryFrom<usize>>::Error: Debug,\n    <P as TryFrom<usize>>::Error: Debug,\n    <N as TryFrom<usize>>::Error: Debug,\n{\n    pub fn new(\n        dimensions: usize,\n        capacity: usize,\n        using_transforms: bool,\n        random_seed: u64,\n        store_attributes: bool,\n        store_pointsum: bool,\n        propagate_attribute_vectors: bool,\n        time_decay: f64,\n        initial_accept_fraction: f64,\n        bounding_box_cache_fraction: f64,\n    ) -> Result<Self> {\n        let mut rng = ChaCha20Rng::seed_from_u64(random_seed);\n        let self_seed = rng.next_u64();\n\n        Ok(SamplerPlusTree {\n            time_decay,\n            initial_accept_fraction,\n            using_transforms,\n            tree: RCFTree::<C, P, N>::new(\n                dimensions,\n                capacity,\n                using_transforms,\n                store_attributes,\n                store_pointsum,\n                propagate_attribute_vectors,\n                bounding_box_cache_fraction,\n                rng.next_u64(),\n            )?,\n            sampler: Sampler::new(capacity, store_attributes)?,\n            entries_seen: 0,\n            random_seed: self_seed,\n        })\n    }\n\n    pub fn update<Label : Copy + Sync ,Attributes: Copy + Sync+ Hash + Eq + Send, PS: PointStore<Label,Attributes>>(\n        &mut self,\n        point_index: usize,\n        point_attribute: usize,\n        point_store: &PS\n    ) -> Result<((usize, usize),(usize,usize))> {\n        if point_index != usize::MAX {\n            let mut initial = false;\n            let mut rng = ChaCha20Rng::seed_from_u64(self.random_seed);\n            self.random_seed = rng.next_u64();\n            let random_number: f64 = rng.gen();\n            let weight: f64 =\n                f64::ln(-f64::ln(random_number)) - ((self.entries_seen as f64) * self.time_decay);\n            if !self.sampler.is_full() {\n                let other_random: f64 = rng.gen();\n                let fill_fraction: f64 = self.sampler.get_fill_fraction();\n                initial = other_random < self.initial_accept_probability(fill_fraction);\n            }\n            let accept_state =\n                self.sampler\n                    .accept_point(initial, weight as f32, point_index, point_attribute)?;\n\n            self.entries_seen += 1;\n            if accept_state.eviction_occurred {\n                let delete_ref = if accept_state.point_index != usize::MAX {\n                    (self.tree.delete(\n                        accept_state.point_index,\n                        accept_state.point_attribute,\n                        point_store,\n                    )?,accept_state.point_attribute)\n                } else {\n                    (usize::MAX,usize::MAX)\n                };\n\n                // the tree may choose to return a reference to an existing point\n                // whose value is equal to `point`\n                let added_ref = self.tree.add(point_index, point_attribute, point_store)?;\n\n                self.sampler.add_point(added_ref)?;\n                return Ok(((added_ref,point_attribute), delete_ref));\n            }\n        }\n        Ok(((usize::MAX, usize::MAX),(usize::MAX,usize::MAX)))\n    }\n\n    fn initial_accept_probability(&self, fill_fraction: f64) -> f64 {\n        return if fill_fraction < self.initial_accept_fraction {\n            1.0\n        } else if self.initial_accept_fraction >= 1.0 {\n            0.0\n        } else {\n            1.0 - (fill_fraction - self.initial_accept_fraction)\n                / (1.0 - self.initial_accept_fraction)\n        };\n    }\n\n    pub fn simple_traversal<NodeView, V, R, PS,Label,Attributes>(\n        &self,\n        point: &[f32],\n        point_store: &PS,\n        parameters: &[usize],\n        visitor_info: &VisitorInfo,\n        visitor_factory: fn(usize, &[usize], &VisitorInfo) -> V,\n        default: &R,\n    ) -> Result<R>\n    where\n        NodeView: UpdatableNodeView<Label,Attributes>,\n        V: Visitor<NodeView, R>,\n        Label : Sync + Copy,\n        Attributes: Copy + Sync+ Hash + Eq + Send,\n        PS: PointStore<Label,Attributes>,\n        R: Clone,\n    {\n        self.tree.traverse(\n            point,\n            parameters,\n            visitor_factory,\n            visitor_info,\n            point_store,\n            default,\n        )\n    }\n\n    pub fn conditional_field<Label : Copy + Sync ,Attributes: Copy + Sync+ Hash + Eq + Send, PS>(\n        &self,\n        positions: &[usize],\n        centrality: f64,\n        point: &[f32],\n        point_store: &PS,\n        visitor_info: &VisitorInfo,\n    ) -> Result<(f64, usize, f64)>\n    where\n        PS: PointStore<Label,Attributes>\n    {\n        self.tree.conditional_field(\n            positions,\n            point,\n            point_store,\n            centrality,\n            self.random_seed,\n            visitor_info,\n        )\n    }\n\n    pub fn get_size(&self) -> usize {\n        self.tree.get_size()\n            + self.sampler.get_size()\n            + std::mem::size_of::<SamplerPlusTree<C, P, N>>()\n    }\n}\n"
  },
  {
    "path": "Rust/src/trcf/basicthresholder.rs",
    "content": "\nuse crate::common::deviation::Deviation;\nuse crate::trcf::types::TransformMethod;\nuse crate::trcf::types::TransformMethod::{DIFFERENCE, NONE, NORMALIZE_DIFFERENCE};\nuse crate::util::{maxf32, minf32};\nuse crate::types::Result;\n\nconst DEFAULT_ELASTICITY : f32= 0.01;\nconst DEFAULT_SCORE_DIFFERENCING: f32 = 0.5;\n\nconst DEFAULT_MINIMUM_SCORES : i32 = 10;\nconst DEFAULT_ABSOLUTE_THRESHOLD : f32= 0.8;\nconst DEFAULT_ABSOLUTE_SCORE_FRACTION :f32 = 0.5;\nconst DEFAULT_LOWER_THRESHOLD :f32 = 1.0;\nconst DEFAULT_INITIAL_THRESHOLD :f32= 1.5;\nconst DEFAULT_Z_FACTOR :f32 = 3.0;\nconst MINIMUM_Z_FACTOR : f32 = 2.0;\nconst DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD : f32 = 0.9;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct BasicThresholder {\n    elasticity: f32,\n    count: i32,\n    score_differencing: f32,\n    last_score: f32,\n    primary_deviation: Deviation,\n    secondary_deviation: Deviation,\n    threshold_deviation: Deviation,\n    auto_threshold: bool,\n    absolute_threshold:f32,\n    absolute_score_fraction: f32,\n    lower_threshold: f32,\n    factor_adjustment_threshold: f32,\n    initial_threshold: f32,\n    z_factor: f32,\n    minimum_scores: i32,\n}\n\nimpl BasicThresholder {\n    pub fn new_adjustible(discount : f64, adjust:bool) -> Result<Self> {\n        Ok(BasicThresholder{\n            elasticity: DEFAULT_ELASTICITY,\n            count: 0,\n            score_differencing: DEFAULT_SCORE_DIFFERENCING,\n            last_score: 0.0,\n            primary_deviation: Deviation::new(discount as f64)?,\n            secondary_deviation: Deviation::new(discount as f64)?,\n            threshold_deviation: Deviation::new(discount as f64/2.0)?,\n            auto_threshold: adjust,\n            absolute_threshold: DEFAULT_ABSOLUTE_THRESHOLD,\n            absolute_score_fraction: DEFAULT_ABSOLUTE_SCORE_FRACTION,\n            lower_threshold: DEFAULT_LOWER_THRESHOLD,\n            factor_adjustment_threshold: DEFAULT_FACTOR_ADJUSTMENT_THRESHOLD,\n            initial_threshold: DEFAULT_INITIAL_THRESHOLD,\n            z_factor: DEFAULT_Z_FACTOR,\n            minimum_scores: DEFAULT_MINIMUM_SCORES\n        })\n    }\n\n    pub fn new(discount : f64) -> Result<Self>{\n        BasicThresholder::new_adjustible(discount,false)\n    }\n\n    pub fn is_deviation_ready(&self) -> bool {\n        if self.count < self.minimum_scores {\n            return false;\n        }\n        if self.score_differencing != 0.0 {\n            return self.primary_deviation.count() >= self.minimum_scores;\n        }\n        return true;\n    }\n\n    fn intermediate_fraction(&self) -> f32 {\n        if self.count < self.minimum_scores {\n            return 0.0;\n        } else if self.count > 2 * self.minimum_scores {\n            return 1.0;\n        } else {\n            return (self.count - self.minimum_scores) as f32 * 1.0 / self.minimum_scores as f32;\n        }\n    }\n\n    fn adjusted_factor(&self, factor : f32, method:TransformMethod, _dimension:usize) -> f32 {\n        let base = self.primary_deviation.mean();\n        let corrected_factor = if (base as f32) < self.factor_adjustment_threshold && method != NONE {\n            (base as f32) * factor / self.factor_adjustment_threshold\n        } else {\n            factor\n        };\n        if corrected_factor < MINIMUM_Z_FACTOR {\n            MINIMUM_Z_FACTOR\n        } else {\n            corrected_factor\n        }\n    }\n\n    fn long_term_deviation(&self, method : TransformMethod, shingle_size:usize) -> f32{\n        if shingle_size == 1 && !(method == DIFFERENCE || method == NORMALIZE_DIFFERENCE) {\n            minf32((f64::sqrt(2.0)*self.threshold_deviation.mean()) as f32,self.primary_deviation.mean() as f32)\n        } else {\n            let mut first = self.primary_deviation.deviation();\n            let t = f64::sqrt(2.0) * self.threshold_deviation.deviation();\n            if t < first {\n                first = t;\n            }\n            if self.secondary_deviation.deviation() < first {\n                first = self.secondary_deviation.deviation();\n            }\n            self.score_differencing * (first as f32) + (1.0 - self.score_differencing) * (self.secondary_deviation.deviation() as f32)\n        }\n    }\n\n    pub fn threshold_and_grade(&self, score : f32, method: TransformMethod, dimension : usize,\n                               shingle_size : usize)  -> ( f32, f32) {\n        self.threshold_and_grade_with_factor(score,self.z_factor,method,dimension,shingle_size)\n    }\n\n    pub fn threshold_and_grade_with_factor(&self, score : f32, factor : f32, method: TransformMethod, dimension : usize,\n                                   shingle_size : usize)  -> ( f32, f32) {\n        let intermediate_fraction = self.intermediate_fraction();\n        let new_factor = self.adjusted_factor(factor, method, dimension);\n        let long_term = self.long_term_deviation(method, shingle_size);\n        let scaled_deviation = (new_factor - 1.0) * long_term + self.primary_deviation.deviation() as f32;\n\n        let mut absolute = self.absolute_threshold;\n        let t = self.primary_deviation.mean() as f32;\n        if self.auto_threshold && intermediate_fraction >= 1.0 && t <\n            self.factor_adjustment_threshold {\n            absolute = t * absolute / self.factor_adjustment_threshold;\n        }\n        let threshold = if !self.is_deviation_ready() {\n            maxf32(self.initial_threshold,absolute)\n        } else {\n            let t = intermediate_fraction * (self.primary_deviation.mean() as f32 + scaled_deviation) +\n                (1.0 - intermediate_fraction) * self.initial_threshold;\n            maxf32(t,absolute)\n        };\n\n        if   (score as f32) < threshold || threshold == 0.0 {\n            return (threshold, 0.0);\n        } else {\n            let mut t = self.surprise_index(score, threshold, new_factor, scaled_deviation / new_factor);\n            t = minf32(f32::floor(t * 20.0) / 16.0,1.0);\n            if t > 0.0 { (threshold, t) } else {\n                (score as f32, 0.0)\n            }\n        }\n    }\n\n    fn surprise_index(&self, score: f32, base : f32, factor: f32, deviation : f32)  -> f32 {\n        if self.is_deviation_ready() {\n            let mut t_factor = 2.0 * factor;\n            if deviation > 0.0 {\n                let z = (score as f32 - base) / deviation;\n                t_factor = minf32(z,factor);\n            }\n            t_factor = t_factor/factor;\n            maxf32(t_factor,0.0)\n        } else {\n            let t = ((score as f32) - self.absolute_threshold) / self.absolute_threshold;\n            minf32(1.0,maxf32(t,0.0))\n        }\n    }\n\n\n    pub fn threshold(&self) -> f32 {\n        self.primary_deviation.mean() as f32 + self.z_factor * self.primary_deviation.deviation() as f32\n    }\n\n    pub fn primary_grade(&self, score : f32) -> f32 {\n        if !self.is_deviation_ready() {\n            return 0.0;\n        }\n        let threshold = self.threshold();\n        let mut t = score - threshold;\n        let deviation = self.primary_deviation.deviation() as f32;\n        if t>0.0 {\n            if deviation > 0.0 {\n                t = t/(deviation);\n                return minf32(t,1.0);\n            } else {\n                return 0.1;\n            }\n        } else {\n            return 0.0;\n        }\n    }\n\n    pub fn primary_threshold_and_grade(&self, score: f32) -> (f32,f32) {\n        (self.threshold(),self.primary_grade(score))\n    }\n\n    pub fn update_threshold(&mut self, score:f32) {\n        let gap : f32 = score - self.primary_deviation.mean() as f32;\n        if gap>0.0 {\n            self.threshold_deviation.update(gap as f64);\n        }\n    }\n\n    pub fn update_primary(&mut self, score : f64) {\n        self.last_score = score as f32;\n        self.primary_deviation.update(score);\n        self.update_threshold(score as f32);\n        self.count += 1;\n    }\n\n    pub fn update_both(&mut self, primary : f32, secondary : f32) {\n        self.last_score = primary;\n        self.primary_deviation.update(primary as f64);\n        self.secondary_deviation.update(secondary as f64);\n        self.update_threshold(primary as f32);\n        self.count += 1;\n    }\n\n    pub fn update(&mut self, primary: f32, secondary: f32, last_score: f32){\n        self.update_both(minf32(2.0,primary),secondary - last_score);\n    }\n\n\n    pub fn z_factor(&self) -> f32 {\n        self.z_factor\n    }\n\n    pub fn set_z_factor(&mut self, factor : f32){\n        self.z_factor = factor;\n    }\n\n    // the next set of functions maintain the invariant that\n    // absolute_threshold <= lower_threshold < initial_threshold <= upper_threshold\n    // absolute_threshold <= lower_threshold < 2.0 *lower_threshold <= upper_threshold\n    // to increase proceed as upper_threshold, initial_threshold, lower_threshold, absolute_threshold\n    // to decrease proceed in reverse of the above order\n\n    pub fn set_lower_threshold(&mut self, lower : f32) {\n        self.lower_threshold = lower;\n    }\n\n    pub fn set_absolute_threshold(&mut self, value:f32) {\n        self.absolute_threshold = value;\n    }\n\n    pub fn set_initial_threshold(&mut self, initial : f32) {\n        self.initial_threshold =  initial;\n    }\n\n    pub fn set_score_differencing(&mut self, horizon:f32) {\n        assert!(horizon >= 0.0 && horizon <= 1.0, \"incorrect horizon parameter\");\n        self.score_differencing = horizon;\n    }\n\n    pub fn last_score(&self) -> f32{\n        self.last_score\n    }\n\n    pub fn primary_mean(&self) -> f64 {\n        self.primary_deviation.mean()\n    }\n\n    pub fn primary_deviation(&self) -> f64 {\n        self.primary_deviation.deviation()\n    }\n\n}\n"
  },
  {
    "path": "Rust/src/trcf/basictrcf.rs",
    "content": "use std::hash::Hash;\nuse rand::Rng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::SeedableRng;\nuse crate::common::descriptor::Descriptor;\nuse crate::common::deviation::Deviation;\nuse crate::common::rangevector::RangeVector;\nuse crate::rcf::{AugmentedRCF, RCF, RCFBuilder, RCFOptions};\nuse crate::types::{Result};\nuse crate::trcf::predictorcorrector::PredictorCorrector;\nuse crate::trcf::preprocessor::{Preprocessor, PreprocessorBuilder};\nuse crate::trcf::types::ForestMode::{STANDARD, STREAMING_IMPUTE, TIME_AUGMENTED};\nuse crate::trcf::types::{ForestMode, ImputationMethod, ScoringStrategy, TransformMethod};\nuse crate::trcf::types::TransformMethod::{NONE, NORMALIZE};\nuse crate::util::check_argument;\nuse crate::rcf::RCFOptionsBuilder;\nuse crate::trcf::types::ImputationMethod::USE_RCF;\nuse crate::trcf::types::ScoringStrategy::{DISTANCE, EXPECTED_INVERSE_HEIGHT};\n\n#[repr(C)]\n#[derive(Clone)]\npub struct Bandit {\n    id: u64,\n    current_model: usize,\n    switches: usize,\n    affirmations: usize,\n    interval: (usize, usize),\n    stats: Vec<Deviation>,\n}\n\n#[repr(C)]\n#[derive(Clone)]\npub struct State {\n    pub id: u64,\n    random_seed : u64,\n    pub scoring_strategy: ScoringStrategy,\n    pub bandit: Bandit,\n    pub predictor_corrector: PredictorCorrector,\n    pub preprocessor: Preprocessor,\n    pub last_descriptor : Descriptor,\n}\n\nimpl Bandit{\n   pub fn new(id: u64, arms:usize, interval:(usize,usize)) -> Result<Self>{\n       Ok(Bandit{\n           id,\n           switches : 0,\n           affirmations : 0,\n           current_model: if arms==1 {0} else {arms},\n           interval,\n           stats: vec![Deviation::new(0.0)?;arms]\n       })\n   }\n\n    pub fn current_model(&self) -> usize {\n        self.current_model\n    }\n\n    pub fn is_evaluating(&self,arms:usize, internal_timestamp:usize) -> bool {\n        arms == self.current_model || (internal_timestamp >= self.interval.0 && self.interval.1 >= internal_timestamp)\n    }\n\n    pub fn update(&mut self, scores:&[f32]) -> Result<()>{\n        check_argument(scores.len() == self.stats.len(), \"incorrect length\")?;\n        self.stats.iter_mut().zip(scores).for_each(|(x,&y)|\n            {if y>0.0 {\n                x.update(y as f64);\n            }});\n        Ok(())\n    }\n\n    // no assumptions are made other than the fact that the scores are +ve and lower is better\n    pub fn choose(&mut self, internal_timestamp: usize, shingle_size: usize, random: f32){\n        if self.current_model == self.stats.len() || self.interval.1 == internal_timestamp {\n            let mut min = self.stats.len();\n            let mut min_value = f32::MAX;\n            for i in 0..self.stats.len() {\n                if !self.stats[i].is_empty() && min_value > self.stats[i].mean() as f32 {\n                    min = i;\n                    min_value = self.stats[i].mean() as f32;\n                    self.stats[i].reset();\n                }\n            }\n            if self.current_model != self.stats.len() {\n                if self.current_model != min {\n                    self.switches += 1;\n                } else {\n                    self.affirmations += 1;\n                }\n            }\n            self.current_model = min;\n            if self.current_model != self.stats.len() {\n                let gap = ((1.0 + random) * self.interval.1 as f32) as usize + 3 * shingle_size;\n                self.interval = (self.interval.0 + gap, self.interval.1 + gap);\n            }\n        }\n    }\n    pub fn switches(&self) -> usize {\n        self.switches\n    }\n\n    pub fn affirmations(&self) -> usize {\n        self.affirmations\n    }\n}\n\nimpl State {\n    pub fn new(id: u64, arms: usize, scoring_strategy: ScoringStrategy, predictor_corrector : PredictorCorrector, preprocessor: Preprocessor) -> Result<Self>{\n        let random_seed = ChaCha20Rng::from_entropy().gen::<u64>();\n        let last_descriptor = Descriptor::new(id,&Vec::new(),0,preprocessor.forest_mode() == TIME_AUGMENTED,None);\n        let base = preprocessor.start_normalization();\n        let interval = (base, base + 3 * preprocessor.shingle_size());\n        Ok(State{\n            id,\n            random_seed,\n            scoring_strategy,\n            bandit: Bandit::new(id,arms,interval)?,\n            predictor_corrector,\n            preprocessor,\n            last_descriptor,\n        })\n    }\n\n    pub fn random(&mut self) -> f32 {\n    let mut rng = ChaCha20Rng::seed_from_u64( self.random_seed);\n    self.random_seed = rng.gen::<u64>();\n    rng.gen::<f32>()\n    }\n}\n\npub struct BasicTRCF {\n    rcf : Box<dyn RCF + Send + Sync>,\n    state: State\n}\n\npub fn core_process<U :?Sized, Label:Sync + Copy, Attributes: Sync + Copy>(rcf: Option<&Box<U>>, state : &mut State, point: &[f32], timestamp: u64) -> Result<Descriptor>\n    where U : AugmentedRCF<Label,Attributes> {\n    let mut result = Descriptor::new(state.id,point,timestamp,state.preprocessor.forest_mode() == TIME_AUGMENTED,None);\n    result.values_seen = state.preprocessor.values_seen();\n    // the check for input length is done in exactly this place\n    //along with Nan/finiteness\n    match state.preprocessor.shingled_point(rcf, point, timestamp)? {\n        Some(x) => {\n            result.rcf_point = Some(x);\n            result.shift = Some(state.preprocessor.shift());\n            result.scale = Some(state.preprocessor.scale());\n            result.difference_deviations = Some(state.preprocessor.difference_deviations());\n            result.scoring_strategy = state.scoring_strategy;\n            result.transform_method = state.preprocessor.transformation_method();\n            if state.preprocessor.is_ready() {\n                match rcf {\n                    Some(y) => state.predictor_corrector.detect_and_modify(&mut result, &state.last_descriptor, state.preprocessor.shingle_size(), y)?,\n                    _ =>{}\n                }\n            }\n        },\n        None => {}\n    }\n    state.preprocessor.post_process(&mut result, point, timestamp, &state.last_descriptor)?;\n    if result.anomaly_grade > 0.0 {\n        state.last_descriptor = result.clone();\n    } else {\n        state.last_descriptor.values_seen = result.values_seen;\n        state.last_descriptor.current_timestamp = result.current_timestamp;\n        state.last_descriptor.anomaly_grade = 0.0;\n        state.last_descriptor.rcf_point = result.rcf_point.clone();\n        state.last_descriptor.score = result.score;\n        state.last_descriptor.threshold = result.threshold;\n    }\n    Ok(result)\n}\n\nimpl BasicTRCF {\n\n    pub fn process(&mut self, point: &[f32], timestamp: u64) -> Result<Descriptor>{\n        let result = core_process(Some(&self.rcf), &mut self.state, point, timestamp)?;\n        match result.rcf_point.as_ref() {\n            // this path would be taken for all un-normalized transformations\n            // relies on internal shingling\n            Some(x) => {\n                let dimension = x.len();\n                let shingle_size = self.state.preprocessor.shingle_size();\n                self.rcf.update(&x[(dimension - (dimension / shingle_size))..dimension], timestamp as u64)?;\n            },\n            _ => { self.state.preprocessor.drain(Some(&mut self.rcf))?; }\n        }\n        Ok(result)\n    }\n\n    pub fn process_sequentially(&mut self, input: &[(&[f32],u64)]) -> Result<Vec<Descriptor>> {\n        let answer = input.into_iter().map(|(a, b)| self.process(*a, *b))\n           .collect::<Result<Vec<Descriptor>>>()?.into_iter().filter(|x| x.anomaly_grade>0.0).collect();\n        Ok(answer)\n    }\n\n    pub fn extrapolate(&self, look_ahead: usize) -> Result<(RangeVector<f32>,Option<RangeVector<f64>>)> {\n        self.state.preprocessor.invert_extrapolation(self.rcf.extrapolate(look_ahead)?)\n    }\n\n}\n\npub struct TRCFOptions {\n    pub(crate) transform_decay: Option<f64>,\n    pub(crate) transform_method: TransformMethod,\n    pub(crate) forest_mode : ForestMode,\n    pub(crate) verbose: bool,\n    pub(crate) weights: Option<Vec<f32>>,\n    pub(crate) default_fill : Option<Vec<f32>>,\n    pub(crate) start_normalization: usize,\n    pub(crate) stop_normalization:usize,\n    pub(crate) scoring_strategy: ScoringStrategy,\n}\n\npub trait TRCFOptionsBuilder {\n    fn get_trcf_options(&mut self) -> &mut TRCFOptions;\n    fn transform_decay(&mut self, transform_decay: f64) -> &mut Self {\n        self.get_trcf_options().transform_decay = Some(transform_decay);\n        self\n    }\n    fn forest_mode(&mut self, forest_mode: ForestMode) -> &mut Self {\n        self.get_trcf_options().forest_mode = forest_mode;\n        self\n    }\n    fn transform_method(&mut self, transform_method: TransformMethod) -> &mut Self {\n        self.get_trcf_options().transform_method = transform_method;\n        self\n    }\n    fn start_normalization(&mut self, start_normalization: usize) -> &mut Self {\n        self.get_trcf_options().start_normalization = start_normalization;\n        self\n    }\n    fn stop_normalization(&mut self, stop_normalization: usize) -> &mut Self {\n        self.get_trcf_options().stop_normalization = stop_normalization;\n        self\n    }\n    fn weights(&mut self, weights: &[f32]) -> &mut Self {\n        self.get_trcf_options().weights = Some(Vec::from(weights));\n        self\n    }\n    fn default_fill(&mut self, default_fill: &[f32]) -> &mut Self {\n        self.get_trcf_options().default_fill = Some(Vec::from(default_fill));\n        self\n    }\n    fn verbose(&mut self, verbose: bool) -> &mut Self {\n        self.get_trcf_options().verbose = verbose;\n        self\n    }\n    fn scoring_strategy(&mut self, scoring_strategy: ScoringStrategy) -> &mut Self {\n        self.get_trcf_options().scoring_strategy = scoring_strategy;\n        self\n    }\n}\n\nimpl TRCFOptions{\n    pub fn validate(&self, input_dimensions: usize) ->Result<()> {\n        check_argument(self.transform_decay.unwrap_or(0.0) >= 0.0, \"transform decay cannot be negative\")?;\n        // juct check -- the builder should not be modified in case it is reused\n        check_argument(self.weights.as_ref().unwrap_or(&vec![1.0; input_dimensions]).len() == input_dimensions,\n                       \" incorrect length of weight vector\")?;\n        check_argument(self.default_fill.as_ref().unwrap_or(&vec![0.0; input_dimensions]).len() == input_dimensions,\n                       \" incorrect length of default_fill vector\")?;\n        check_argument(self.start_normalization <= self.stop_normalization, \"normalization cannot start cannot be after stopping\")?;\n        Ok(())\n    }\n}\n\nimpl Default for TRCFOptions {\n    fn default() -> Self {\n       TRCFOptions {\n           transform_decay: None,\n           transform_method: NORMALIZE,\n           forest_mode: STANDARD,\n           verbose: false,\n           weights: None,\n           default_fill: None,\n           start_normalization: 10,\n           stop_normalization: usize::MAX,\n           scoring_strategy: EXPECTED_INVERSE_HEIGHT\n       }\n    }\n}\n\npub struct BasicTRCFBuilder {\n    input_dimensions: usize,\n    shingle_size: usize,\n    trcf_options: TRCFOptions,\n    rcf_options : RCFOptions<u64,u64>\n}\n\nimpl BasicTRCFBuilder {\n    pub fn new(input_dimensions: usize, shingle_size: usize) -> Self {\n        BasicTRCFBuilder {\n            input_dimensions,\n            shingle_size,\n            rcf_options: RCFOptions::default(),\n            trcf_options: TRCFOptions::default()\n        }\n    }\n\n    pub fn build(&self) -> Result<BasicTRCF> {\n        check_argument(self.trcf_options.forest_mode!= STREAMING_IMPUTE, \"not yet supported\")?;\n        check_argument( self.input_dimensions > 0, \"input_dimensions cannot be 0\")?;\n        check_argument( self.shingle_size > 0, \"shingle size cannot be 0\")?;\n        self.rcf_options.validate()?;\n        self.trcf_options.validate(self.input_dimensions)?;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let transform_decay = self.trcf_options.transform_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let weights = match &self.trcf_options.weights {\n            Some(x) => x.clone(),\n            _ => vec![1.0; self.input_dimensions],\n        };\n        let random_seed = self.rcf_options.random_seed.unwrap_or( ChaCha20Rng::from_entropy().gen::<u64>());\n        let rcf = RCFBuilder::<u64,u64>::new(self.input_dimensions,self.shingle_size)\n            .tree_capacity(self.rcf_options.capacity).number_of_trees(self.rcf_options.number_of_trees)\n            .random_seed(random_seed)\n            .parallel_enabled(self.rcf_options.parallel_enabled).time_decay(time_decay)\n            .bounding_box_cache_fraction(self.rcf_options.bounding_box_cache_fraction)\n            .output_after(output_after)\n            .initial_accept_fraction(self.rcf_options.initial_accept_fraction).build_default()?;\n        let preprocessor = PreprocessorBuilder::new(self.input_dimensions,self.shingle_size)\n            .transform_decay(transform_decay)\n            .transform_method(self.trcf_options.transform_method)\n            .forest_mode(self.trcf_options.forest_mode)\n            .random_seed(random_seed+1)\n            .weights(&weights)\n            .start_normalization(self.trcf_options.start_normalization)\n            .stop_normalization(self.trcf_options.stop_normalization).build()?;\n        let predictor_corrector = PredictorCorrector::new(transform_decay,!self.trcf_options.verbose,self.input_dimensions)?;\n\n        Ok(BasicTRCF {\n            rcf,\n            state: State::new(0, 1, self.trcf_options.scoring_strategy,predictor_corrector, preprocessor)?\n        })\n    }\n}\n\nimpl RCFOptionsBuilder<u64, u64> for BasicTRCFBuilder {\n    fn get_rcf_options(&mut self) -> &mut RCFOptions<u64, u64> {\n        &mut self.rcf_options\n    }\n}\n\nimpl TRCFOptionsBuilder for BasicTRCFBuilder {\n    fn get_trcf_options(&mut self) -> &mut TRCFOptions {\n        &mut self.trcf_options\n    }\n}"
  },
  {
    "path": "Rust/src/trcf/errorhandler.rs",
    "content": "\nuse crate::common::descriptor::{Descriptor, ErrorInformation};\nuse crate::common::divector::DiVector;\nuse crate::common::rangevector::RangeVector;\nuse crate::trcf::types::Calibration;\nuse crate::util::{check_argument, maxf32, minf32};\nuse crate::types::{Result};\n\n#[repr(C)]\n#[derive(Clone)]\npub struct ErrorHandler {\n    add_error : bool,\n    input_length: usize,\n    sequence_index: usize,\n    percentile: f32,\n    forecast_horizon: usize,\n    error_horizon: usize,\n    past_forecasts: Vec<RangeVector<f32>>,\n    actuals: Vec<Vec<f32>>,\n    error_distribution: RangeVector<f32>,\n    error_rmse: DiVector,\n    error_mean: Vec<f32>,\n    interval_precision: Vec<f32>,\n    last_deviations: Vec<f32>\n}\n\nimpl ErrorHandler {\n    pub fn new(add_error: bool, input_length: usize, forecast_horizon: usize, error_horizon: usize, percentile: f32) -> Self {\n        ErrorHandler {\n            add_error,\n            input_length,\n            sequence_index: 0,\n            percentile,\n            forecast_horizon,\n            error_horizon,\n            past_forecasts: Vec::new(),\n            actuals: Vec::new(),\n            error_distribution: RangeVector::<f32>::new(input_length*forecast_horizon),\n            error_rmse: DiVector::empty(input_length*forecast_horizon),\n            error_mean: vec![0.0;input_length*forecast_horizon],\n            interval_precision: vec![0.0;input_length*forecast_horizon],\n            last_deviations: vec![0.0;input_length]\n        }\n    }\n\n    pub fn update_actuals(&mut self, input : &[f32], deviations : &[f32]) -> Result<()>{\n        let array_length = self.past_forecasts.len();\n        let input_length = input.len();\n        check_argument(self.input_length == input_length, \"incorrect input\")?;\n        if self.sequence_index > 0 {\n            let input_index = (self.sequence_index + array_length - 1) % array_length;\n            if self.sequence_index < array_length + 1 {\n                check_argument(self.actuals.len() == input_index, \"incorrect accounting\")?;\n                self.actuals.push(Vec::from(input));\n            } else {\n                for (x,y) in self.actuals[input_index].iter_mut().zip(input) {\n                    *x = *y;\n                }\n            }\n        }\n\n        self.sequence_index += 1;\n        self.recompute_errors()?;\n        for (x,y) in self.last_deviations.iter_mut().zip(deviations){\n            *x = *y;\n        };\n        Ok(())\n    }\n\n    pub fn augment_descriptor(&self,descriptor : &mut Descriptor) {\n        descriptor.error_information = Some(\n            ErrorInformation{\n                interval_precision: self.interval_precision.clone(),\n                error_distribution: self.error_distribution.clone(),\n                error_rmse: self.error_rmse.clone(),\n                error_mean: self.error_mean.clone()\n            }\n        )\n    }\n\n    pub fn update_forecasts(&mut self, range_vector: &RangeVector<f32>) -> Result<()> {\n        check_argument(range_vector.values.len() == self.input_length *self.forecast_horizon, \"incorrect input\")?;\n        let array_length = self.past_forecasts.len();\n        let stored_forecast_index = (self.sequence_index + array_length - 1) % (array_length);\n        if stored_forecast_index < array_length + 1 {\n            check_argument(self.past_forecasts.len() == stored_forecast_index, \"incorrect accounting\")?;\n            self.past_forecasts.push(range_vector.clone());\n        } else {\n            for (x,y) in self.past_forecasts[stored_forecast_index].values.iter_mut().zip(&range_vector.values) {\n                *x = *y;\n            }\n            for (x,y) in self.past_forecasts[stored_forecast_index].lower.iter_mut().zip(&range_vector.lower) {\n                *x = *y;\n            }\n            for (x,y) in self.past_forecasts[stored_forecast_index].upper.iter_mut().zip(&range_vector.upper) {\n                *x = *y;\n            }\n        };\n        Ok(())\n    }\n\n\n    fn length(sequence_index : usize, error_horizon : usize, index : usize) -> usize {\n        if sequence_index > error_horizon + index + 1 {error_horizon}\n        else  if sequence_index < index + 1 {0}\n        else {sequence_index - index - 1}\n    }\n\n    fn recompute_errors(&mut self) -> Result<()>{\n        let array_length = self.past_forecasts.len();\n        let input_index = (self.sequence_index - 2 + array_length) % array_length;\n        let mut median_error = vec![0.0f32;self.error_horizon];\n\n        for x in self.interval_precision.iter_mut(){\n            *x =0.0;\n        }\n\n        for i in 0..self.forecast_horizon {\n            let len = Self::length(self.sequence_index, self.error_horizon, i);\n            for j in 0..self.input_length {\n                let pos = i * self.input_length + j;\n                if len > 0 {\n                    let mut positive_sum = 0.0f64;\n                    let mut positive_count = 0;\n                    let mut negative_sum = 0.0f64;\n                    let mut positive_sq_sum = 0.0f64;\n                    let mut negative_sq_sum = 0.0f64;\n                    for k in 0..len  {\n                        let past_index = (input_index - i - k + array_length) % array_length;\n                        let index = (input_index - k + array_length) % array_length;\n                        let error = (self.actuals[index][j] - self.past_forecasts[past_index].values[pos]) as f64;\n                        median_error[k] = error as f32;\n                        let within = self.past_forecasts[past_index].upper[pos] >= self.actuals[index][j]\n                            && self.actuals[index][j] >= self.past_forecasts[past_index].lower[pos];\n                        self.interval_precision[pos] += if within { 1.0 } else { 0.0 };\n\n                        if error >= 0.0 {\n                            positive_sum += error;\n                            positive_sq_sum += error * error;\n                            positive_count += 1;\n                        } else {\n                            negative_sum += error;\n                            negative_sq_sum += error * error;\n                        }\n                    }\n                    self.error_mean[pos] = (positive_sum + negative_sum) as f32 / len as f32;\n                    self.error_rmse.high[pos] = if positive_count == 0 { 0.0 } else {\n                        f64::sqrt(positive_sq_sum / positive_count as f64)\n                    };\n                    self.error_rmse.low[pos] = if positive_count == len { 0.0 } else {\n                        - f64::sqrt(negative_sq_sum / (len - positive_count) as f64)\n                    };\n\n                    if len as f32 * self.percentile >= 1.0 {\n                        median_error[0..(len as usize)].sort_by(|o1, o2| o1.partial_cmp(&o2).unwrap());\n                        self.error_distribution.values[pos] = Self::interpolated_median(&median_error, len)?;\n                        self.error_distribution.upper[pos] = Self::interpolated_upper_rank(&median_error, len, len as f32 * self.percentile);\n                        self.error_distribution.lower[pos] = Self::interpolated_lower_rank(&median_error, len as f32 * self.percentile);\n                    }\n                    self.interval_precision[pos] = self.interval_precision[pos] / len as f32;\n                } else {\n                    self.error_mean[pos] = 0.0;\n                    self.error_rmse.high[pos] = 0.0;\n                    self.error_rmse.low[pos] = 0.0;\n                    self.error_distribution.values[pos] = 0.0;\n                    self.error_distribution.upper[pos] = 0.0;\n                    self.error_distribution.lower[pos] = 0.0;\n                    self.interval_precision[pos] = 0.0;\n                }\n            }\n        };\n        Ok(())\n    }\n\n    pub fn calibrate(&self,calibration : Calibration, ranges : &mut RangeVector<f32>) -> Result<()>{\n        check_argument(self.input_length * self.forecast_horizon == ranges.values.len(), \"mismatched lengths\")?;\n        for i in 0..self.forecast_horizon {\n            let len = Self::length(self.sequence_index, self.error_horizon, i);\n            for j in 0..self.input_length {\n                let pos = i * self.input_length + j;\n                if len > 0 {\n                    if calibration != Calibration::NONE {\n                        if len as f32 * self.percentile < 1.0 {\n                            let deviation = self.last_deviations[j];\n                            ranges.upper[pos] = maxf32(ranges.upper[pos], ranges.values[pos] + (1.3 * deviation));\n                            ranges.lower[pos] = minf32(ranges.lower[pos], ranges.values[pos] - (1.3 * deviation));\n                        } else {\n                            match calibration {\n                                Calibration::SIMPLE => { Self::adjust(pos, ranges, &self.error_distribution)?; },\n                                Calibration::MINIMAL => { Self::adjust_minimal(pos, ranges, &self.error_distribution)?; },\n                                _ => {}\n                            }\n                        }\n                    }\n                }\n            }\n        };\n        Ok(())\n    }\n\n    fn interpolated_median(ascending_array : &[f32], len : usize) -> Result<f32>{\n        check_argument(ascending_array.len() >= len, \"incorrect length parameter\")?;\n        let lower = if len % 2 == 0 { ascending_array[len / 2 - 1] }\n                else { (ascending_array[len / 2] + ascending_array[len / 2 - 1]) / 2.0 };\n        let upper = if len % 2 == 0 { ascending_array[len / 2] }\n                else { (ascending_array[len / 2] + ascending_array[len / 2 - 1]) / 2.0 };\n\n        if lower <= 0.0 && 0.0 <= upper {\n            return Ok(0.0);\n        } else {\n            return Ok((upper + lower) / 2.0);\n        }\n    }\n\n    fn interpolated_lower_rank(ascending_array: &[f32], frac_rank : f32) -> f32{\n        let rank = f32::floor(frac_rank) as usize;\n        return ascending_array[rank - 1]\n                + (frac_rank - rank as f32) * (ascending_array[rank] - ascending_array[rank - 1]);\n    }\n\n    fn interpolated_upper_rank(ascending_array : &[f32], len : usize, frac_rank : f32) -> f32 {\n        let rank = f32::floor(frac_rank) as usize;\n        return ascending_array[len - rank]\n                + (frac_rank - rank as f32) * (ascending_array[len - rank - 1] - ascending_array[len - rank]);\n    }\n\n    fn adjust(pos: usize, range_vector : &mut RangeVector<f32>, other : &RangeVector<f32>) -> Result<()>{\n        check_argument(other.values.len() == range_vector.values.len(), \" mismatch in lengths\")?;\n        check_argument(pos < other.values.len(), \" cannot be this large\")?;\n        range_vector.values[pos] += other.values[pos];\n        range_vector.upper[pos] = maxf32(range_vector.values[pos], range_vector.upper[pos] + other.upper[pos]);\n        range_vector.lower[pos] = minf32(range_vector.values[pos], range_vector.lower[pos] + other.lower[pos]);\n        Ok(())\n    }\n\n   fn adjust_minimal(pos: usize, range_vector : &mut RangeVector<f32>, other : &RangeVector<f32>)  -> Result<()> {\n       check_argument(other.values.len() == range_vector.values.len(), \" mismatch in lengths\")?;\n       check_argument(pos < other.values.len(), \"cannot be this large\")?;\n       let old_val = range_vector.values[pos];\n       range_vector.values[pos] += other.values[pos];\n       range_vector.upper[pos] = maxf32(range_vector.values[pos], old_val + other.upper[pos]);\n       range_vector.lower[pos] = minf32(range_vector.values[pos], old_val + other.lower[pos]);\n       Ok(())\n   }\n}\n"
  },
  {
    "path": "Rust/src/trcf/mod.rs",
    "content": "mod predictorcorrector;\npub(crate) mod basicthresholder;\npub mod basictrcf;\nmod preprocessor;\nmod transformer;\npub mod types;\npub mod errorhandler;\npub mod rcfcaster;\npub mod multitrcf;\n"
  },
  {
    "path": "Rust/src/trcf/multitrcf.rs",
    "content": "use std::collections::HashMap;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::{RngCore, SeedableRng};\nuse rayon::prelude::*;\nuse crate::common::descriptor::Descriptor;\nuse crate::rcf::{AugmentedRCF, RCF, RCFBuilder, RCFOptions};\nuse crate::types::{Result};\nuse crate::trcf::predictorcorrector::PredictorCorrector;\nuse crate::trcf::preprocessor::{PreprocessorBuilder};\nuse crate::trcf::types::ForestMode::{STANDARD};\nuse crate::trcf::types::{ScoringStrategy, TransformMethod};\nuse crate::util::{check_argument, maxf32, minf32};\nuse crate::rcf::RCFOptionsBuilder;\nuse crate::trcf::basictrcf::{core_process, State, TRCFOptions, TRCFOptionsBuilder};\nuse crate::trcf::types::ScoringStrategy::EXPECTED_INVERSE_HEIGHT;\n\n\npub struct MultiTRCF {\n    arms : usize,\n    rcfs : Vec<Box<dyn AugmentedRCF<(u64,u64),u64> + Send + Sync>>,\n    states: HashMap<u64,State>,\n    input_dimensions: usize,\n    shingle_size: usize,\n    transform_decay: f64,\n    transform_method: TransformMethod,\n    scoring_strategy: ScoringStrategy,\n    random_seed: u64,\n    probability: f32,\n    parallel_enabled: bool,\n    selector : fn(&Descriptor) -> bool\n}\n\nfn is_anomaly(a:&Descriptor) -> bool {\n   a.anomaly_grade>0.0\n}\n\nimpl MultiTRCF {\n\n    fn select_for_update(&self, _a: u64, _b: usize, _score: f32, probability : f32) -> bool {\n        probability < self.probability\n    }\n\n    fn core_multi(&self, state:& mut State, point: &[f32],timestamp : u64)\n        -> Result<((usize,(u64,u64),Option<Vec<f32>>),Option<Descriptor>)> {\n        let internal_timestamp = state.preprocessor.internal_timestamp();\n        if self.arms > 1 && state.bandit.is_evaluating(self.arms, internal_timestamp) {\n            let shingled_point = state.preprocessor.shingled_point(None as Option<&Box<dyn RCF>>, &point, timestamp)?;\n            match shingled_point.as_ref() {\n                Some(x) => {\n                    let mut scores = vec![-1.0f32; self.arms];\n\n                    for i in 0..self.arms {\n                        if self.rcfs[i].is_output_ready() {\n                            scores[i] = self.rcfs[i].score(x).unwrap() as f32;\n                        }\n                    }\n                    state.bandit.update(&scores)?;\n                    let random = state.random();\n                    state.bandit.choose(internal_timestamp, state.preprocessor.shingle_size(), random);\n                },\n                _ => {}\n            }\n        }\n        let rcf = if state.bandit.current_model() < self.arms {\n            Some(&self.rcfs[state.bandit.current_model()])\n        } else {\n            None\n        };\n        let t = core_process(rcf, state, point, timestamp).unwrap();\n        let probability = state.random();\n        let point = if state.preprocessor.is_ready()\n            && self.select_for_update(state.id, state.preprocessor.internal_timestamp(), t.score,probability) {\n            t.rcf_point.clone()\n        } else {\n            None\n        };\n        if (self.selector)(&t) {\n            Ok(((state.bandit.current_model(),(state.id,timestamp), point) , Some(t)))\n        } else {\n            Ok(((state.bandit.current_model(),(state.id,timestamp), point), None))\n        }\n    }\n\n    pub fn process(&mut self, input: HashMap<u64,(&[f32],u64)>) -> Result<Vec<Descriptor>>{\n        let mut join : Vec<(u64,(State,&[f32],u64))> = input.into_iter().map(|e| {\n             if !self.states.contains_key(&e.0) {\n                let preprocessor = PreprocessorBuilder::new(self.input_dimensions, self.shingle_size)\n                    .transform_method(self.transform_method).transform_decay(self.transform_decay)\n                    .start_normalization(3 * self.shingle_size).build().unwrap();\n                let predictor_corrector = PredictorCorrector::new(self.transform_decay, true, self.input_dimensions)?;\n                let state = State::new(e.0,self.arms,self.scoring_strategy,predictor_corrector,preprocessor)?;\n                 Ok((e.0,(state,e.1.0,e.1.1)))\n            } else {\n                Ok((e.0,(self.states.remove(&e.0).unwrap(),e.1.0,e.1.1)))\n            }}).collect::<Result<Vec<(u64,(State,&[f32],u64))>>>()?;\n\n        let collection = if self.parallel_enabled {\n            join.par_iter_mut().map(|(_x,(state,point,timestamp))| {\n                self.core_multi(state,*point,*timestamp)\n            }).collect::<Result<Vec<((usize,(u64,u64),Option<Vec<f32>>),Option<Descriptor>)>>>()?\n        } else {\n            join.iter_mut().map(|(_x,(state,point,timestamp))|  {\n                self.core_multi(state,*point,*timestamp)\n            }).collect::<Result<Vec<((usize,(u64,u64),Option<Vec<f32>>),Option<Descriptor>)>>>()?\n        };\n\n        join.into_iter().for_each(|(id,(state,_,_))| { self.states.insert(id,state);});\n\n        let (updates,proto) : (Vec<(usize,(u64,u64),Option<Vec<f32>>)>,Vec<_>) = collection.into_iter().unzip();\n\n        updates.into_iter().map(|y|  -> Result<()> {\n            match y.2.as_ref() {\n                Some(x) => {\n                    // if there is only one arm then the following will be equivalent -- first branch would\n                    // be taken since y.0 == 1 in such a case\n                    if y.0 != self.arms {\n                        self.rcfs[y.0].update(x, y.1)\n                    } else {\n                        self.rcfs.iter_mut().map(|z| {\n                            z.update(x, y.1)\n                        }).collect()\n                    }\n                },\n                None => { Ok(())}\n            }}).collect::<Result<Vec<()>>>()?;\n\n        let answer: Vec<Descriptor> = proto.into_iter().filter_map(|x| x).collect();\n\n        Ok(answer)\n    }\n\n    pub fn switches(&self) -> usize {\n        self.states.iter().map(|x| x.1.bandit.switches()).sum()\n    }\n\n    pub fn affirmations(&self) -> usize {\n        self.states.iter().map(|x| x.1.bandit.affirmations()).sum()\n    }\n\n    pub fn states(&self) -> Vec<State> {\n        self.states.iter().map(|x| x.1.clone()).collect::<Vec<State>>()\n    }\n\n    pub fn updates(&self) -> Vec<(usize,u64)>{\n        self.rcfs.iter().zip(0..self.rcfs.len()).map(|x| (x.1,x.0.entries_seen())).collect()\n    }\n}\n\npub struct MultiTRCFBuilder {\n    input_dimensions: usize,\n    shingle_size: usize,\n    arms : usize,\n    probability : f32,\n    scoring_strategy:ScoringStrategy,\n    rcf_options: RCFOptions<(u64,u64),u64>,\n    trcf_options: TRCFOptions\n}\n\nimpl Default for MultiTRCFBuilder {\n    // parallel_enabled will apply to MultiTRCF instead of RCF\n    fn default() -> Self {\n        MultiTRCFBuilder {\n            input_dimensions: 1,\n            shingle_size: 10,\n            arms: 1,\n            probability: 0.01,\n            scoring_strategy: EXPECTED_INVERSE_HEIGHT,\n            rcf_options: RCFOptions { parallel_enabled: true, internal_shingling: false, ..Default::default()},\n            trcf_options: Default::default()\n        }\n    }\n}\n\n//just picking time stamp\nfn attribute_creator(_a:&[(u64,u64)],label: (u64,u64)) -> Result<u64> {\n    Ok(label.1)\n}\n\nimpl MultiTRCFBuilder {\n    pub fn new(input_dimensions:usize, shingle_size: usize, number_of_models: usize, approx_cardinality: usize) -> Self {\n        MultiTRCFBuilder{\n            input_dimensions,\n            shingle_size,\n            arms: number_of_models,\n            probability : maxf32(0.01,1.0/maxf32(number_of_models as f32, (1+approx_cardinality) as f32)),\n            ..Default::default()\n        }\n    }\n\n    pub fn probability(&mut self, probability:f32) -> &mut Self {\n        self.probability = probability;\n        self\n    }\n\n    pub fn build(&self) -> Result<MultiTRCF>{\n        check_argument(self.arms >0,\" cannot be zero\")?;\n        check_argument(!self.rcf_options.internal_shingling, \"internal shingling is not feasible\")?;\n        check_argument( self.rcf_options.bounding_box_cache_fraction == 1.0, \" bounding box fraction should be 1\")?;\n        check_argument(self.trcf_options.forest_mode==STANDARD, \"forest mode not supported\")?;\n        let mut random_seed = self.rcf_options.random_seed.unwrap_or(ChaCha20Rng::from_entropy().next_u64());\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let transform_decay = self.trcf_options.transform_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let mut rcfs= Vec::new();\n        for _i in 0..self.arms {\n            rcfs.push(RCFBuilder::<(u64,u64),u64>::new(self.input_dimensions, self.shingle_size)\n                .tree_capacity(self.rcf_options.capacity)\n                .number_of_trees(self.rcf_options.number_of_trees)\n                .time_decay(time_decay)\n                .store_attributes(self.rcf_options.store_attributes)\n                .output_after(output_after)\n                .initial_accept_fraction(self.rcf_options.initial_accept_fraction)\n                .internal_shingling(false).random_seed(random_seed)\n                .build_to_u64(attribute_creator).unwrap());\n            random_seed += 1;\n        }\n        Ok(MultiTRCF {\n            arms: self.arms,\n            parallel_enabled : self.rcf_options.parallel_enabled,\n            rcfs,\n            states: Default::default(),\n            input_dimensions : self.input_dimensions,\n            shingle_size : self.shingle_size,\n            transform_decay,\n            transform_method: self.trcf_options.transform_method,\n            scoring_strategy: self.trcf_options.scoring_strategy,\n            random_seed,\n            probability: self.probability,\n            selector: is_anomaly\n        })\n    }\n}\n\nimpl RCFOptionsBuilder<(u64,u64),u64> for MultiTRCFBuilder {\n    fn get_rcf_options(&mut self) -> &mut RCFOptions<(u64,u64),u64> {\n        &mut self.rcf_options\n    }\n}\n\nimpl TRCFOptionsBuilder for MultiTRCFBuilder {\n    fn get_trcf_options(&mut self) -> &mut TRCFOptions {\n        &mut self.trcf_options\n    }\n}"
  },
  {
    "path": "Rust/src/trcf/predictorcorrector.rs",
    "content": "use std::f32::consts::E;\nuse num::abs;\nuse crate::common::divector::DiVector;\nuse crate::rcf::{AugmentedRCF, RCF};\nuse crate::types::{Result};\nuse crate::trcf::basicthresholder::BasicThresholder;\nuse crate::common::descriptor::Descriptor;\nuse crate::common::deviation::Deviation;\nuse crate::trcf::types::CorrectionMode::{ANOMALY_IN_SHINGLE, CONDITIONAL_FORECAST, DATA_DRIFT, FORECAST, NOISE};\nuse crate::trcf::types::ScoringStrategy::EXPECTED_INVERSE_HEIGHT;\nuse crate::trcf::types::TransformMethod::{DIFFERENCE, NORMALIZE_DIFFERENCE};\nuse crate::util::{absf32, maxf32, minf32};\n\nconst DEFAULT_NORMALIZATION_PRECISION:f32 = 1e-3;\nconst DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS: usize = 5;\nconst NOISE_FACTOR: f32 = 1.0;\nconst DEFAULT_SAMPLING_SUPPORT : f32 = 0.1;\nconst DEFAULT_DIFFERENTIAL_FACTOR : f32 = 0.3;\nconst DEFAULT_RUN_ALLOWED : usize = 2;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct PredictorCorrector {\n    basic_thresholder: BasicThresholder,\n    auto_adjust : bool,\n    run_length : usize,\n    deviations_actual : Vec<Deviation>,\n    deviations_expected: Vec<Deviation>,\n    max_attributors: usize\n}\n\nimpl PredictorCorrector {\n    // for mappers\n    pub fn new(discount: f64, auto_adjust:bool, base_dimension : usize) -> Result<Self> {\n        let mut a = Vec::new();\n        let mut b =Vec::new();\n        if auto_adjust {\n            for _ in 0..base_dimension {\n                a.push(Deviation::new(discount)?);\n                b.push(Deviation::new(discount)?);\n            }\n        }\n        Ok(PredictorCorrector {\n            basic_thresholder: BasicThresholder::new(discount)?,\n            auto_adjust,\n            run_length : 0,\n            deviations_actual: a,\n            deviations_expected: b,\n            max_attributors: DEFAULT_NUMBER_OF_MAX_ATTRIBUTORS\n        })\n    }\n\n    pub fn expected_point<U :?Sized,Label : Sync + Copy, Attributes: Sync + Copy>(di_vector: &DiVector, max_attributors: usize, position: usize, base_dimension: usize, point: &[f32],\n                          forest: &Box<U>) -> Result<Vec<f32>>  where U: AugmentedRCF<Label,Attributes> {\n        let mut likely_missing_indices: Vec<usize>;\n\n        if base_dimension == 1 {\n            likely_missing_indices = vec![position; 1];\n        } else {\n            let mut sum = 0.0;\n            let mut values = vec![0.0; base_dimension];\n            for i in 0..base_dimension {\n                values[i] = di_vector.high_low_sum(i + position);\n                sum += values[i];\n            }\n            // sort decreasing\n            values.sort_by(|a, b| b.partial_cmp(a).unwrap());\n\n            let mut pick = 0;\n            while pick < base_dimension && values[pick] >= (sum * 0.5 / max_attributors as f64) {\n                pick += 1;\n            }\n            likely_missing_indices = Vec::new();\n\n            if pick != 0 && pick <= max_attributors {\n                let cutoff = values[pick - 1];\n                for i in 0..base_dimension {\n                    if di_vector.high_low_sum(i + position) >= cutoff && likely_missing_indices.len() < max_attributors {\n                        likely_missing_indices.push(position + i);\n                    }\n                }\n            }\n        }\n\n        let mut answer = Vec::from(point);\n        if likely_missing_indices.len() != 0 && (2 * likely_missing_indices.len()  < forest.dimensions()) {\n            let prediction = forest.conditional_field(&likely_missing_indices, point, 1.0, false, 0)?.median;\n            for i in likely_missing_indices{\n                answer[i] = prediction[i];\n            }\n        }\n        Ok(answer)\n    }\n\n    fn trigger(&self, candidate: &DiVector, gap: usize, base_dimension: usize, ideal: &DiVector,\n               last_descriptor: &Descriptor, threshold: f32) -> bool {\n        match &last_descriptor.last_anomaly {\n            None => { return true; },\n            Some(y) => {\n                match &y.attribution {\n                    None => { return true; },\n                    Some(_x) => {\n                        let last_score = y.score;\n                        let dimensions = candidate.dimensions();\n                        let difference = gap * base_dimension;\n                        if difference < dimensions {\n                            let mut differential_remainder = 0.0;\n                            for i in (dimensions - difference)..dimensions {\n                                let low_diff = candidate.low[i] - ideal.low[i];\n                                differential_remainder += absf32(low_diff as f32);\n                                let high_diff = candidate.high[i] - ideal.high[i];\n                                differential_remainder += absf32(high_diff as f32)\n                            }\n                            return differential_remainder > DEFAULT_DIFFERENTIAL_FACTOR * last_score\n                                && differential_remainder as f32 * (dimensions as f32) / difference as f32 > threshold;\n                        } else {\n                            return true;\n                        }\n                    }\n                }\n            }\n        }\n    }\n\n    pub fn apply_basic_corrector(point: &[f32], gap: usize, shingle_size: usize, base_dimension: usize,\n                                 last_descriptor: &Descriptor, use_difference: bool, time_augmented: bool) -> Vec<f32> {\n\n        let mut corrected_point = Vec::from(point);\n        if gap > shingle_size || last_descriptor.last_anomaly.is_none() {\n            return corrected_point;\n        }\n        let last_expected_point = &last_descriptor.last_anomaly.as_ref().unwrap().expected_rcf_point;\n        let last_anomaly_point = &last_descriptor.last_anomaly.as_ref().unwrap().anomalous_rcf_point;\n        let last_relative_index = last_descriptor.last_anomaly.as_ref().unwrap().relative_index;\n        if gap < shingle_size {\n            for i in gap * base_dimension..point.len() {\n                corrected_point[i - gap * base_dimension] = last_expected_point[i];\n            }\n        }\n        if last_relative_index == 0 { // is is possible to fix other cases, but is more complicated\n            if use_difference {\n                for y in 0..base_dimension {\n                    corrected_point[point.len() - gap * base_dimension + y] +=\n                        last_anomaly_point[point.len() - base_dimension + y] -\n                            last_expected_point[point.len() - base_dimension + y];\n                }\n            } else if time_augmented {\n                // definitely correct the time dimension which is always differenced\n                // this applies to the non-differenced cases\n                corrected_point[point.len() - (gap - 1) * base_dimension - 1] +=\n                    last_anomaly_point[point.len() - 1]\n                        - last_expected_point[point.len() - 1];\n            }\n        }\n        return corrected_point;\n    }\n\n    fn centered_transform_pass(base_dimensions: usize, result: &Descriptor, point : &[f32]) -> f32 {\n        let mut max_factor = 0.0f32;\n        let scale = result.scale.as_ref().unwrap();\n        let shift = result.shift.as_ref().unwrap();\n        let deviations = result.difference_deviations.as_ref().unwrap();\n        for i in 0..point.len() {\n            if absf32(point[i]) * scale[i%base_dimensions] > DEFAULT_NORMALIZATION_PRECISION * (1.0+absf32(shift[i%base_dimensions])) {\n                max_factor = 1.0;\n            }\n        }\n\n        if max_factor>0.0 {\n            for i in 0..base_dimensions {\n                let z = absf32(point[point.len() - base_dimensions + i])*scale[i];\n                let dev = maxf32(0.0,deviations[i]);\n                if z > NOISE_FACTOR * dev {\n                    max_factor = minf32(1.0, maxf32(max_factor, z / (3.0 * dev)));\n                }\n            }\n        }\n        max_factor\n    }\n\n    fn calculate_path_deviation(point : &[f32], start_position: usize, index: usize, base_dimension : usize, differenced : bool) -> f32 {\n        let mut position = start_position;\n        let mut variation = 0.0;\n        let mut observation: usize = 0;\n        while position + index + base_dimension < point.len() {\n            variation += if differenced { absf32(point[position + index]) } else { absf32(point[position + index] - point[position + base_dimension + index]) };\n            position += base_dimension;\n            observation += 1;\n        }\n        if observation == 0 {\n            0.0\n        } else {\n            variation / observation as f32\n        }\n    }\n\n    fn construct_uncertainty_box(point :&[f32], start_position : usize, base_dimension : usize, result: &Descriptor) -> Vec<f32>{\n        let method = result.transform_method;\n        let differenced = (method == DIFFERENCE)  || (method == NORMALIZE_DIFFERENCE);\n        let scale = result.scale.as_ref().unwrap();\n        let shift = result.shift.as_ref().unwrap();\n        let mut answer = vec![0.0f32;base_dimension];\n        for y in 0..base_dimension {\n            let shift_amount = DEFAULT_NORMALIZATION_PRECISION * scale[y] * absf32(shift[y]);\n            let path_gap = Self::calculate_path_deviation(point, start_position, y, base_dimension, differenced);\n            let noise_gap = NOISE_FACTOR * result.difference_deviations.as_ref().unwrap()[y];\n            answer[y] = maxf32(scale[y] * path_gap,noise_gap) + shift_amount;\n        }\n        answer\n    }\n\n    fn within_unertainty_box(uncertainty_box: &[f32], start_position : usize, scale: &[f32], point: &[f32], other_point : &[f32]) -> bool {\n        let mut answer = false;\n        for y in 0..uncertainty_box.len() {\n            let a = scale[y] * point[start_position + y];\n            let b = scale[y] * other_point[start_position + y];\n            answer = answer || a < b - uncertainty_box[y] || a > b + uncertainty_box[y];\n        }\n        return !(answer);\n    }\n\n    fn explained_by_conditional_field<U :?Sized,Label : Sync + Copy, Attributes: Sync + Copy>(uncertainty_box: &[f32], point: &[f32], corrected_point : &[f32], start_position : usize,\n                                      result :&Descriptor, forest :&Box<U>) -> Result<bool>\n        where U: AugmentedRCF<Label,Attributes> {\n        let list = forest.near_neighbor_list(corrected_point, 50)?;\n        let mut weight = 0;\n        let total = list.len();\n        for e in list {\n            if Self::within_unertainty_box(uncertainty_box, start_position, result.scale.as_ref().unwrap(), point, &e.1) {\n                weight += 1;\n            }\n        }\n        return Ok(weight as f32 >= DEFAULT_SAMPLING_SUPPORT * total as f32);\n    }\n\n\n    pub fn update_auto_adjust(&mut self, point : &[f32]){\n        if self.auto_adjust && self.run_length > 0 {\n            for y in 0..self.deviations_actual.len() {\n                self.deviations_actual[y].update(point[y] as f64);\n            }\n            self.run_length +=1;\n        }\n    }\n\n    fn update_score(&mut self, score: f32, corrected_score:f32, result: &Descriptor, last_descriptor: &Descriptor){\n        if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n            let last_score = if last_descriptor.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                last_descriptor.score\n            } else {\n                0.0\n            };\n            self.basic_thresholder.update(score,corrected_score,last_score);\n        } else {\n            self.basic_thresholder.update_primary(score as f64);\n        }\n    }\n\n    pub fn detect_and_modify<U :?Sized,Label : Sync + Copy, Attributes: Sync + Copy>(&mut self, result: &mut Descriptor, last_descriptor : &Descriptor, shingle_size: usize, forest : &Box<U>)\n        -> Result<()>\n    where U : AugmentedRCF<Label,Attributes> {\n        match &result.rcf_point {\n            None => return Ok(()),\n            Some(point) => {\n                let score = if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                    forest.score(point)? as f32\n                } else {\n                    forest.density_interpolant(point)?.distance.total() as f32\n                };\n                let method = result.transform_method;\n                result.score = score;\n                let internal_timestamp = result.values_seen;\n                let gap = internal_timestamp - if last_descriptor.last_anomaly.is_none() {0}\n                                          else {last_descriptor.last_anomaly.as_ref().unwrap().values_seen };\n                if score == 0.0 {\n                    return Ok(());\n                }\n\n                let dimension = forest.dimensions();\n                let base_dimension = dimension / shingle_size;\n                let (threshold, grade) = if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                    self.basic_thresholder.threshold_and_grade(score, method, dimension, shingle_size)\n                } else {\n                    self.basic_thresholder.primary_threshold_and_grade(score)\n                };\n                result.threshold = threshold;\n\n                if grade == 0.0 {\n                    if self.auto_adjust {\n                        self.run_length = 0;\n                        for y in 0..base_dimension {\n                            self.deviations_actual[y].reset();\n                            self.deviations_expected[y].reset();\n                        }\n                    }\n                    result.anomaly_grade = 0.0;\n                    self.update_score(score, score, result,last_descriptor);\n                    return Ok(());\n                }\n\n                let candidate = result.scoring_strategy == last_descriptor.scoring_strategy &&\n                    (score > last_descriptor.score\n                    || last_descriptor.score - last_descriptor.threshold > score\n                    - maxf32(threshold, last_descriptor.threshold)\n                    * (1.0 + maxf32(0.2, self.run_length as f32\n                    / (2.0 * maxf32(10.0, shingle_size as f32)))));\n\n\n                let use_difference = method == DIFFERENCE || method == NORMALIZE_DIFFERENCE;\n                let corrected_point = Self::apply_basic_corrector(point, gap, shingle_size, base_dimension,\n                                                                  last_descriptor, use_difference, result.time_augmented);\n                let mut corrected_score = score;\n                if gap > 0 && gap <= shingle_size && last_descriptor.last_anomaly.is_some() {\n                    corrected_score = if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                        forest.score(&corrected_point)? as f32\n                    } else {\n                        forest.density_interpolant(&corrected_point)?.distance.total() as f32\n                    };\n                    let (_,newgrade) = if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                        self.basic_thresholder.threshold_and_grade(corrected_score, method, dimension, shingle_size)\n                    } else {\n                        self.basic_thresholder.primary_threshold_and_grade(corrected_score)\n                    };\n                    // we know we are looking previous anomalies\n                    if newgrade == 0.0 {\n                        self.update_auto_adjust(point);\n                        result.correction_mode = ANOMALY_IN_SHINGLE;\n                        self.update_score(score, corrected_score, result,last_descriptor);\n                        result.anomaly_grade = 0.0;\n                        return Ok(());\n                    }\n                }\n\n                let working_grade = grade * Self::centered_transform_pass(base_dimension, result, &corrected_point);\n                if working_grade == 0.0 {\n                    self.update_auto_adjust(point);\n                    result.correction_mode = NOISE;\n                    result.anomaly_grade = 0.0;\n                    self.update_score(score, corrected_score, result, last_descriptor);\n                    return Ok(());\n                }\n\n\n                let mut attribution = if result.scoring_strategy == EXPECTED_INVERSE_HEIGHT {\n                    forest.attribution(&corrected_point)?\n                } else {\n                    forest.density_interpolant(&corrected_point)?.distance\n                };\n\n                let index = attribution.max_gap_contribution(base_dimension, gap)?;\n                let start_position = index * point.len() / shingle_size;\n                let uncertainty_box = Self::construct_uncertainty_box(point, start_position, base_dimension, result);\n\n                if self.auto_adjust &&\n                    Self::explained_by_conditional_field(&uncertainty_box, point, &corrected_point, start_position,\n                                                        result, forest)? {\n                    self.update_auto_adjust(point);\n                    result.correction_mode = CONDITIONAL_FORECAST;\n                    result.anomaly_grade = 0.0;\n                    self.update_score(score, corrected_score, result, last_descriptor);\n                    return Ok(());\n                }\n\n                let expected_point = Self::expected_point(&attribution, self.max_attributors, start_position, base_dimension, point, forest)?;\n                if gap < shingle_size {\n                    let new_attribution = forest.attribution(&expected_point)?;\n                    if !self.trigger(&attribution, gap, base_dimension, &new_attribution, last_descriptor, threshold) {\n                        result.correction_mode = ANOMALY_IN_SHINGLE;\n                        self.update_auto_adjust(point);\n                        result.anomaly_grade = 0.0;\n                        self.update_score(score, corrected_score, result,last_descriptor);\n                        return Ok(());\n                    }\n                }\n\n                if Self::within_unertainty_box(&uncertainty_box, start_position, result.scale.as_ref().unwrap(), point,\n                                    &expected_point) {\n                    result.correction_mode = FORECAST;\n                    self.update_auto_adjust(point);\n                    result.anomaly_grade = 0.0;\n                    self.update_score(score, corrected_score, result,last_descriptor);\n                    return Ok(());\n                }\n\n\n                if candidate {\n                    if self.auto_adjust {\n                        for y in 0..base_dimension {\n                            self.deviations_actual[y].update(point[dimension - base_dimension + y] as f64);\n                            self.deviations_expected[y].update(expected_point[dimension - base_dimension + y] as f64);\n                        }\n                        if self.run_length > DEFAULT_RUN_ALLOWED {\n                            let mut within = true;\n                            for y in 0..base_dimension {\n                                within =\n                                    absf32(self.deviations_actual[y].mean() as f32 - point[dimension - base_dimension + y]) <\n                                        maxf32(2.0 * self.deviations_actual[y].deviation() as f32,\n                                               NOISE_FACTOR * result.difference_deviations.as_ref().unwrap()[y]);\n                                within = within && absf32(self.deviations_expected[y].mean() as f32\n                                    - expected_point[dimension - base_dimension + y]) < 2.0\n                                    * maxf32(self.deviations_expected[y].deviation() as f32,\n                                             self.deviations_actual[y].deviation() as f32)\n                                    + 0.1 * absf32(\n                                    (self.deviations_actual[y].mean() - self.deviations_expected[y].mean()) as f32);\n                            }\n                            if within { // already adjusted\n                                result.correction_mode = DATA_DRIFT;\n                                result.anomaly_grade = 0.0;\n                                self.update_score(score, corrected_score,result, last_descriptor);\n                                return Ok(());\n                            }\n                        }\n                    }\n                }\n\n\n                self.run_length += 1;\n                result.expected_rcf_point=Some(expected_point);\n                result.anomaly_grade = working_grade;\n                self.update_score(score, corrected_score, result,last_descriptor);\n                result.relative_index = index as i32 - shingle_size as i32 + 1;\n                attribution.normalize(score as f64);\n                result.attribution = Some(attribution);\n                return Ok(());\n            }\n        }\n    }\n\n\n    pub fn set_z_factor(&mut self, factor: f32) {\n        self.basic_thresholder.set_z_factor(factor);\n    }\n\n    pub fn set_lower_threshold(&mut self, lower :f32) {\n        self.basic_thresholder.set_absolute_threshold(lower);\n    }\n\n    pub fn set_horizon(&mut self, horizon : f32) {\n        self.basic_thresholder.set_score_differencing(horizon);\n    }\n\n    pub fn set_initial_threshold(&mut self, initial : f32) {\n        self.basic_thresholder.set_initial_threshold(initial);\n    }\n}\n"
  },
  {
    "path": "Rust/src/trcf/preprocessor.rs",
    "content": "use std::cmp::min;\nuse rand::Rng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::SeedableRng;\nuse crate::common::descriptor::{AnomalyInformation, Descriptor};\nuse crate::common::deviation::Deviation;\nuse crate::common::rangevector::RangeVector;\nuse crate::rcf::{AugmentedRCF, RCF};\nuse crate::trcf::basictrcf::BasicTRCFBuilder;\nuse crate::trcf::transformer::WeightedTransformer;\nuse crate::trcf::types::{ForestMode, ImputationMethod, TransformMethod};\nuse crate::trcf::types::ForestMode::{STANDARD, STREAMING_IMPUTE, TIME_AUGMENTED};\nuse crate::trcf::types::ImputationMethod::USE_RCF;\nuse crate::trcf::types::TransformMethod::{NORMALIZE, NORMALIZE_DIFFERENCE};\nuse crate::util::check_argument;\nuse crate::types::Result;\n\nconst DEFAULT_START_NORMALIZATION: usize = 10;\n\nconst DEFAULT_STOP_NORMALIZATION : usize = usize::MAX;\n\nconst DEFAULT_CLIP_FACTOR : f32 = 100.0;\n\nconst DEFAULT_NORMALIZATION : bool = false;\n\npub const DEFAULT_DEVIATION_STATES : usize = 5;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct Preprocessor {\n    timestamp_deviations : Vec<Deviation>,\n    normalize_time : bool,\n    weight_time : f32,\n    transform_decay : f64,\n    previous_timestamps : Vec<u64>,\n    internal_timestamp : usize,\n    initial_values : Vec<Vec<f32>>,\n    initial_timestamps : Vec<u64>,\n    start_normalization : usize,\n    stop_normalization : usize,\n    values_seen : usize,\n    default_fill : Vec<f32>,\n    use_imputed_fraction : f32,\n    number_of_imputed : usize,\n    clip_factor : f32,\n    shingle_size : usize,\n    input_dimensions : usize,\n    last_shingled_input: Vec<f32>,\n    last_shingled_point: Vec<f32>,\n    data_quality : Vec<Deviation>,\n    imputation_method : ImputationMethod,\n    transform_method : TransformMethod,\n    forest_mode : ForestMode,\n    transformer : WeightedTransformer\n}\n\nimpl Preprocessor {\n\n    fn past_initial(&self) -> bool{\n        self.values_seen + 1> self.start_normalization ||\n            (self.transform_method != NORMALIZE && self.transform_method != NORMALIZE_DIFFERENCE)\n    }\n\n    pub fn shingled_point<U :?Sized,Label : Sync + Copy, Attributes: Sync + Copy>(&mut self, _rcf: Option<&Box<U>>, input : &[f32], timestamp: u64) -> Result<Option<Vec<f32>>> where\n    U : AugmentedRCF<Label,Attributes> {\n        check_argument(input.len() == self.input_dimensions, \"incorrect length\")?;\n        for x in input {\n            check_argument(f32::is_finite(*x), \" numbers should be finite\")?;\n        }\n\n        // the shingle will always be created, possibly wih leading 0's\n        if self.past_initial() {\n            if self.initial_values.len() > 0 {\n                // corresponds to external shingling in MultiRCF\n                // For anomaly detection it is not relevant when we update the state\n                // it can be on seeing the next input (here) or having seen the last input\n                // but forecasting for a single time series is better served\n                // having seen all available data specially when there are a few values\n                self.drain(None as Option<&mut Box<dyn RCF>>)?;\n            }\n            let input = &self.transform(input,timestamp);\n            let mut copy = self.last_shingled_point.clone();\n            Self::shift_vector(&mut copy,input);\n            Ok(Some(copy))\n        } else {\n            Ok(None)\n        }\n    }\n\n    fn shift_vector<T:Copy>(shingle: &mut Vec<T>, point: &[T]) {\n        let dimension = shingle.len();\n        for i in 0..(dimension-point.len()) {\n            shingle[i] = shingle[i + point.len()];\n        }\n        for i in 0..point.len() {\n            shingle[dimension - point.len() + i] = point[i];\n        }\n    }\n\n    fn update_timestamps(&mut self, timestamp:u64,previous: u64){\n        self.timestamp_deviations[0].update(timestamp as f64);\n        self.timestamp_deviations[1].update((timestamp - previous) as f64);\n        let deviation = self.timestamp_deviations[0].deviation();\n        self.timestamp_deviations[2].update(deviation);\n        let difference_mean = self.timestamp_deviations[1].mean();\n        let difference_deviation = self.timestamp_deviations[1].deviation();\n        self.timestamp_deviations[3].update( difference_mean);\n        self.timestamp_deviations[4].update(difference_deviation);\n    }\n\n    fn update(&mut self, input : &[f32], last_shingled_point: &[f32], timestamp: u64) -> Result<()>{\n        let dimension = self.last_shingled_input.len();\n        if self.values_seen < self.stop_normalization {\n            self.transformer.update(input, &self.last_shingled_input[(dimension - self.input_dimensions)..dimension])?;\n            self.update_timestamps(timestamp, self.previous_timestamps[self.shingle_size - 1]);\n        }\n\n        Self::shift_vector(&mut self.last_shingled_input,&input);\n        for (x,y) in self.last_shingled_point.iter_mut().zip(last_shingled_point) {\n            *x = *y;\n        }\n        Self::shift_vector(&mut self.previous_timestamps, &vec![timestamp]);\n        self.internal_timestamp += 1; // will count number of updates\n        self.values_seen += 1;\n        Ok(())\n    }\n\n    pub fn post_process(&mut self, result: &mut Descriptor, point:&[f32],timestamp:u64, _last_descriptor:&Descriptor) -> Result<()> {\n        if self.past_initial() {\n            if let Some(y) = &result.rcf_point {\n                if result.anomaly_grade > 0.0 {\n                    let base_dimension = self.input_dimensions;\n                    let block = (self.shingle_size as i32 + result.relative_index - 1) as usize;\n                    let start: usize = block * base_dimension;\n                    let past_values = if result.relative_index == 0 {\n                        Vec::from(point)\n                    } else {\n                        Vec::from(&self.last_shingled_input[(start + base_dimension)..(start + 2 * base_dimension)])\n                    };\n                    if let Some(x) = &result.expected_rcf_point {\n                        let expected_values_list = vec![self.transformer.invert(&x[start..start + base_dimension],\n                                                                                &self.last_shingled_input[start..start + base_dimension])];\n                        let likelihood_of_values = vec![1.0f32];\n                        result.last_anomaly = Some(\n                            AnomalyInformation {\n                                expected_rcf_point: x.clone(),\n                                anomalous_rcf_point: y.clone(),\n                                relative_index: result.relative_index,\n                                values_seen: result.values_seen,\n                                attribution: result.attribution.clone(),\n                                score: result.score,\n                                grade: result.anomaly_grade,\n                                expected_timestamp: timestamp, // changes not implemented\n                                relevant_attribution: None,\n                                time_attribution: 0.0, // not implemented\n                                past_values,\n                                past_timestamp: self.previous_timestamps[block],\n                                expected_values_list,\n                                likelihood_of_values\n                            }\n                        )\n                    }\n                }\n                self.update(point, y, timestamp)?;\n                result.deviations_post = Some(self.difference_deviations());\n            }\n        } else {\n            self.initial_values.push(Vec::from(point));\n            self.initial_timestamps.push(timestamp);\n            self.values_seen += 1;\n            if self.values_seen == self.start_normalization {\n                result.deviations_post = Some(self.difference_deviations());\n            }\n        }\n        Ok(())\n    }\n\n    pub fn drain<U:?Sized>(&mut self, rcf: Option<&mut Box<U>>) -> Result<()> where U:RCF {\n        if self.values_seen == self.start_normalization {\n            let mut previous = &self.initial_values[0];\n            let mut previous_timestamp = self.initial_timestamps[0];\n            for i in 0..self.initial_values.len() {\n                self.transformer.update(&self.initial_values[i],&previous)?;\n                self.update_timestamps(self.initial_timestamps[i],previous_timestamp);\n                previous_timestamp = self.initial_timestamps[i];\n                previous = &self.initial_values[i];\n            }\n\n            self.previous_timestamps[self.shingle_size - 1] = self.initial_timestamps[0];\n            let dimension = self.shingle_size * self.input_dimensions;\n            for i in 0..self.input_dimensions {\n                self.last_shingled_input[dimension - self.input_dimensions + i] = self.initial_values[0][i];\n            }\n\n            match rcf {\n                None => {\n                    // transformations will work at this point\n                    for (x, &y) in self.initial_values.iter().zip(&self.initial_timestamps) {\n                        let z = &self.transform(x, y);\n                        self.internal_timestamp += 1;\n                        Self::shift_vector(&mut self.last_shingled_input, x);\n                        Self::shift_vector(&mut self.last_shingled_point, z);\n                        Self::shift_vector(&mut self.previous_timestamps, &vec![y]);\n                    }\n                },\n                Some(f) => {\n                    for (x, &y) in self.initial_values.iter().zip(&self.initial_timestamps) {\n                        let z = &self.transform(x, y);\n                        self.internal_timestamp += 1;\n                        Self::shift_vector(&mut self.last_shingled_input, x);\n                        Self::shift_vector(&mut self.last_shingled_point, z);\n                        Self::shift_vector(&mut self.previous_timestamps, &vec![y]);\n                        f.update(z, y)?;\n                    }\n                }\n            }\n            // block deallocation\n            self.initial_timestamps = Vec::new();\n            self.initial_values = Vec::new();\n        }\n        Ok(())\n    }\n\n    pub fn invert_extrapolation(&self, mut range_vector: RangeVector<f32>) -> Result<(RangeVector<f32>,Option<RangeVector<f64>>)> {\n        if self.forest_mode != TIME_AUGMENTED {\n            let dimension = self.input_dimensions * self.shingle_size;\n            self.transformer.invert_forecast(&mut range_vector,&self.last_shingled_input[(dimension - self.input_dimensions)..dimension])?;\n            return Ok((range_vector,None))\n        } else {\n            let augmented = self.get_dimension();\n            let lookahead = range_vector.upper.len()/augmented;\n            let dimension = self.input_dimensions * self.shingle_size;\n            let mut sub_range_vector : RangeVector<f32> = RangeVector::<f32>::new(lookahead*dimension);\n            let mut time_range_vector : RangeVector<f64> = RangeVector::<f64>::new(lookahead);\n            for i in 0..lookahead {\n                for j in 0..dimension {\n                    sub_range_vector.upper[i * dimension + j] = range_vector.upper[i * augmented + j];\n                    sub_range_vector.values[i * dimension + j] = range_vector.values[i * augmented + j];\n                    sub_range_vector.lower[i * dimension + j] = range_vector.lower[i * augmented + j];\n                }\n                time_range_vector.upper[i] = self.invert_time(range_vector.upper[i*augmented + dimension]);\n                time_range_vector.values[i] = self.invert_time(range_vector.values[i*augmented + dimension]);\n                time_range_vector.lower[i] = self.invert_time(range_vector.lower[i*augmented + dimension]);\n                if time_range_vector.upper[i] < time_range_vector.values[i] {\n                    time_range_vector.upper[i] = time_range_vector.values[i];\n                }\n                if time_range_vector.lower[i] > time_range_vector.values[i] {\n                    time_range_vector.lower[i] = time_range_vector.values[i];\n                }\n            }\n            self.transformer.invert_forecast(&mut sub_range_vector,&self.last_shingled_input[(dimension - self.input_dimensions)..dimension])?;\n            time_range_vector.cascaded_add(&vec![self.previous_timestamps[self.shingle_size-1] as f64])?;\n            return Ok((sub_range_vector,Some(time_range_vector)));\n        }\n    }\n\n    fn invert_time(&self,value:f32) -> f64{\n        let factor = if self.weight_time == 0.0 { 0.0 } else { 1.0 / self.weight_time as f64};\n        (value as f64)*factor*self.timescale() + self.timedrift()\n    }\n\n    pub fn internal_timestamp(&self) -> usize {\n        self.internal_timestamp\n    }\n\n    pub fn values_seen(&self) -> usize {\n        self.values_seen\n    }\n\n    pub fn input_dimensions(&self) -> usize {\n        self.input_dimensions\n    }\n\n    pub fn shingle_size(&self) -> usize {\n        self.shingle_size\n    }\n\n    pub fn get_dimension(&self) -> usize {\n        if self.forest_mode == TIME_AUGMENTED {\n            (self.input_dimensions + 1)*self.shingle_size\n        } else {\n            self.input_dimensions * self.shingle_size\n        }\n    }\n\n    pub fn transformation_method(&self) -> TransformMethod {\n           self.transform_method\n    }\n\n    pub fn forest_mode(&self) -> ForestMode {\n        self.forest_mode\n    }\n\n    pub fn shift(&self) -> Vec<f32> {\n        if self.forest_mode != TIME_AUGMENTED {\n            self.transformer.shift()\n        } else {\n            let mut answer = self.transformer.shift();\n             answer.push( self.previous_timestamps[self.shingle_size-1] as f32 + self.timedrift() as f32);\n            answer\n        }\n    }\n\n    pub fn scale(&self) -> Vec<f32> {\n        if self.forest_mode != TIME_AUGMENTED {\n            self.transformer.scale()\n        } else {\n            let mut answer = self.transformer.scale();\n            let factor = if self.weight_time == 0.0 { 0.0 } else { 1.0 / self.weight_time };\n            answer.push(factor * (self.timescale()) as f32);\n            answer\n        }\n    }\n\n    pub fn transform(&self,input:&[f32],timestamp: u64) -> Vec<f32> {\n        let dimension = self.input_dimensions * self.shingle_size;\n        let mut answer = self.transformer.transform\n        (input, &self.last_shingled_input[(dimension - self.input_dimensions)..dimension]);\n\n        if self.forest_mode == TIME_AUGMENTED {\n            let previous = if self.values_seen > 0 {\n                self.previous_timestamps[self.shingle_size - 1]\n            } else {\n                timestamp\n            };\n            answer.push((timestamp as f64 - previous as f64 * (self.weight_time as f64) /\n                self.timescale()) as f32);\n        }\n        answer\n    }\n\n    pub fn difference_deviations(&self) -> Vec<f32> {\n        if self.forest_mode != TIME_AUGMENTED {\n            self.transformer.difference_deviations()\n        } else {\n            let mut answer = self.transformer.difference_deviations();\n            answer.push(self.weight_time as f32 * (self.timestamp_deviations[1].deviation() as f32));\n            answer\n        }\n    }\n\n    fn timescale(&self) -> f64 {\n        self.timestamp_deviations[4].mean() + 1.0\n    }\n\n    fn timedrift(&self) -> f64 {\n        self.timestamp_deviations[3].mean()\n    }\n\n    pub fn is_ready(&self) -> bool {\n        self.internal_timestamp >= self.shingle_size\n    }\n\n    pub fn start_normalization(&self) -> usize {\n        self.start_normalization\n    }\n\n }\n\n\npub struct PreprocessorBuilder {\n    normalize_time: bool,\n    input_dimensions: usize ,\n    transform_decay:Option<f64>,\n    weights: Option<Vec<f32>>,\n    weight_time: f32,\n    imputation_method: ImputationMethod,\n    number_of_imputed: usize,\n    clip_factor: f32,\n    use_imputed_fraction: f32,\n    transform_method: TransformMethod,\n    forest_mode : ForestMode,\n    default_fill: Option<Vec<f32>>,\n    shingle_size: usize,\n    random_seed: Option<u64>,\n    start_normalization: usize,\n    stop_normalization : usize\n}\n\nimpl Default for PreprocessorBuilder {\n    fn default() -> Self {\n        PreprocessorBuilder{\n            normalize_time: DEFAULT_NORMALIZATION,\n            input_dimensions: 1,\n            transform_decay: Some(0.001),\n            weights: None,\n            weight_time: 1.0,\n            imputation_method: USE_RCF,\n            number_of_imputed: 0,\n            clip_factor: DEFAULT_CLIP_FACTOR,\n            use_imputed_fraction: 0.0,\n            transform_method: NORMALIZE,\n            forest_mode: STANDARD,\n            default_fill: None,\n            shingle_size: 8,\n            random_seed: None,\n            start_normalization: DEFAULT_START_NORMALIZATION,\n            stop_normalization: DEFAULT_STOP_NORMALIZATION,\n        }\n    }\n}\n\nimpl PreprocessorBuilder {\n    pub fn new(input_dimensions: usize, shingle_size: usize) -> Self {\n        PreprocessorBuilder { input_dimensions, shingle_size, ..Default::default() }\n    }\n\n    pub fn transform_decay(&mut self, transform_decay: f64) -> &mut PreprocessorBuilder {\n        self.transform_decay = Some(transform_decay);\n        self\n    }\n\n    pub fn forest_mode(&mut self, forest_mode: ForestMode) -> &mut PreprocessorBuilder {\n        self.forest_mode = forest_mode;\n        self\n    }\n\n    pub fn transform_method(&mut self, transform_method: TransformMethod) -> &mut PreprocessorBuilder {\n        self.transform_method = transform_method;\n        self\n    }\n\n    pub fn imputation_method(&mut self, imputation_method: ImputationMethod) -> &mut PreprocessorBuilder {\n        self.imputation_method = imputation_method;\n        self\n    }\n\n    pub fn start_normalization(&mut self, start_normalization: usize) -> &mut PreprocessorBuilder {\n        self.start_normalization = start_normalization;\n        self\n    }\n\n    pub fn stop_normalization(&mut self, stop_normalization: usize) -> &mut PreprocessorBuilder {\n        self.stop_normalization = stop_normalization;\n        self\n    }\n\n    pub fn initial_accept_fraction(&mut self, use_imputed_fraction: f32) -> &mut PreprocessorBuilder {\n        self.use_imputed_fraction = use_imputed_fraction;\n        self\n    }\n\n    pub fn default_fill(&mut self, default_fill: &[f32]) -> &mut PreprocessorBuilder {\n        self.default_fill = Some(Vec::from(default_fill));\n        self\n    }\n\n    pub fn random_seed(&mut self, random_seed: u64) -> &mut PreprocessorBuilder {\n        self.random_seed = Some(random_seed);\n        self\n    }\n\n    pub fn weights(&mut self, weights: &[f32]) -> &mut PreprocessorBuilder {\n        self.weights = Some(Vec::from(weights));\n        self\n    }\n\n    pub fn build(&self) -> Result<Preprocessor> {\n        check_argument(self.forest_mode != STREAMING_IMPUTE, \"not yet supported\")?;\n        check_argument(self.input_dimensions > 0, \"input_dimensions cannot be 0\")?;\n        check_argument(self.shingle_size > 0, \"shingle size cannot be 0\")?;\n        let transform_decay = self.transform_decay.unwrap_or(0.001);\n        let weights = match &self.weights {\n            Some(x) => x.clone(),\n            _ => vec![1.0; self.input_dimensions],\n        };\n        check_argument(self.input_dimensions == weights.len(), \"incorrect length of weights\")?;\n        check_argument(transform_decay >=0.0 && transform_decay<=1.0, \"transform decay must be in [0,1]\")?;\n        let mut timestamp_deviations = Vec::new();\n        timestamp_deviations.push(Deviation::new(transform_decay)?);\n        timestamp_deviations.push(Deviation::new(transform_decay)?);\n        for _ in 0..(DEFAULT_DEVIATION_STATES - 2) {\n            timestamp_deviations.push(Deviation::new(0.1*transform_decay)?);\n        }\n        let dimension= self.input_dimensions * self.shingle_size;\n        let augmented = dimension + if self.forest_mode == TIME_AUGMENTED {self.shingle_size} else {0};\n        check_argument(self.start_normalization <= self.stop_normalization, \" cannot stop normalization before starting\")?;\n        check_argument(self.start_normalization < 2000, \"can cause delays, large memory usage\")?;\n        let random_seed = self.random_seed.unwrap_or(ChaCha20Rng::from_entropy().gen::<u64>());\n        let default_fill = match &self.default_fill {\n            Some(x) => x.clone(),\n            _ => vec![0.0; self.input_dimensions],\n        };\n        let preprocessor = Preprocessor {\n            timestamp_deviations,\n            normalize_time: self.normalize_time,\n            weight_time: self.weight_time,\n            transform_decay,\n            previous_timestamps: vec![0;self.shingle_size],\n            internal_timestamp: 0,\n            initial_values: vec![],\n            initial_timestamps: vec![],\n            start_normalization: self.start_normalization,\n            stop_normalization: self.stop_normalization,\n            values_seen: 0,\n            default_fill,\n            use_imputed_fraction: self.use_imputed_fraction,\n            number_of_imputed: 0,\n            clip_factor: self.clip_factor,\n            shingle_size: self.shingle_size,\n            input_dimensions: self.input_dimensions,\n            last_shingled_input: vec![0.0;dimension],\n            last_shingled_point: vec![0.0;augmented],\n            data_quality: vec![],\n            imputation_method: self.imputation_method,\n            transform_method: self.transform_method,\n            forest_mode: self.forest_mode,\n            transformer: WeightedTransformer::new(self.transform_method, self.input_dimensions, transform_decay, &weights)?\n        };\n        Ok(preprocessor)\n    }\n}"
  },
  {
    "path": "Rust/src/trcf/rcfcaster.rs",
    "content": "use rand::Rng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::SeedableRng;\nuse crate::common::descriptor::Descriptor;\nuse crate::common::rangevector::RangeVector;\nuse crate::rcf::{RCF, RCFBuilder, RCFOptions};\nuse crate::trcf::errorhandler::ErrorHandler;\nuse crate::trcf::predictorcorrector::PredictorCorrector;\nuse crate::trcf::preprocessor::{Preprocessor, PreprocessorBuilder};\nuse crate::trcf::types::{Calibration, ForestMode, TransformMethod};\nuse crate::trcf::types::Calibration::{MINIMAL, NONE};\nuse crate::trcf::types::ForestMode::{STANDARD, STREAMING_IMPUTE, TIME_AUGMENTED};\nuse crate::trcf::types::TransformMethod::NORMALIZE;\nuse crate::types::{Result};\nuse crate::rcf::RCFOptionsBuilder;\nuse crate::trcf::basictrcf::{BasicTRCF, core_process, State, TRCFOptions, TRCFOptionsBuilder};\nuse crate::util::check_argument;\n\npub const DEFAULT_ERROR_PERCENTILE : f32 = 0.1;\n\npub const MAX_ERROR_HORIZON : usize = 1024;\n\npub struct RCFCaster {\n    forecast_horizon : usize,\n    rcf : Box<dyn RCF + Send + Sync>,\n    state: State,\n    error_handler : ErrorHandler,\n    calibration_method : Calibration,\n}\n\nimpl RCFCaster {\n\n    pub fn process(&mut self, point: &[f32], timestamp: u64) -> Result<Descriptor>{\n        let mut result = core_process(Some(&self.rcf), &mut self.state, point, timestamp)?;\n        match result.rcf_point.as_ref() {\n            // this path would be taken for all un-normalized transformations\n            // relies on internal shingling\n            Some(x) => {\n                let dimension = x.len();\n                let shingle_size = self.state.preprocessor.shingle_size();\n                self.rcf.update(&x[(dimension - (dimension / shingle_size))..dimension], timestamp as u64)?;\n            },\n            _ => { self.state.preprocessor.drain(Some(&mut self.rcf))?; }\n        }\n\n        if self.rcf.is_output_ready() {\n            self.error_handler.update_actuals(&result.current_input, result.deviations_post.as_ref().expect(\"should be present\"))?;\n            self.error_handler.augment_descriptor(&mut result);\n\n            let mut forecast = self.extrapolate(self.forecast_horizon)?;\n            self.error_handler.update_forecasts(&mut forecast.0)?;\n            result.forecast = Some(forecast.0);\n        }\n        Ok(result)\n    }\n\n    pub fn extrapolate(&self, look_ahead: usize) -> Result<(RangeVector<f32>,Option<RangeVector<f64>>)> {\n        let mut a = self.state.preprocessor.invert_extrapolation(self.rcf.extrapolate(look_ahead)?)?;\n        self.error_handler.calibrate(self.calibration_method, &mut a.0)?;\n        Ok(a)\n    }\n\n    pub fn process_sequentially(&mut self, input: &[(&[f32],u64)]) -> Result<Vec<Descriptor>> {\n       input.iter().map(|(a, b)| self.process(*a, *b))\n            .into_iter().collect()\n    }\n}\n\n\npub struct RCFCasterOptions {\n    add_error: bool,\n    calibration : Calibration,\n    forecast_horizon: usize,\n    error_horizon: Option<usize>,\n}\n\npub trait RCFCasterOptionsBuilder: TRCFOptionsBuilder {\n    fn get_rcf_caster_options(&mut self) -> &mut RCFCasterOptions;\n    fn transform_decay(&mut self, transform_decay: f64) -> &mut Self {\n        self.get_trcf_options().transform_decay = Some(transform_decay);\n        self\n    }\n    fn calibration(&mut self, calibration: Calibration) -> &mut Self {\n        self.get_rcf_caster_options().calibration = calibration;\n        self\n    }\n    fn forecast_horizon(&mut self, forecast_horizon: usize) -> &mut Self {\n        self.get_rcf_caster_options().forecast_horizon = forecast_horizon;\n        self\n    }\n    fn error_horizon(&mut self, error_horizon: usize) -> &mut Self {\n        self.get_rcf_caster_options().error_horizon = Some(error_horizon);\n        self\n    }\n}\n\n\nimpl Default for RCFCasterOptions {\n    fn default() -> Self {\n        RCFCasterOptions{\n            add_error: false,\n            calibration: MINIMAL,\n            forecast_horizon: 10,\n            error_horizon: None\n        }\n    }\n}\n\npub struct RCFCasterBuilder {\n    id: u64,\n    input_dimensions: usize,\n    shingle_size: usize,\n    trcf_options: TRCFOptions,\n    rcf_options : RCFOptions<u64,u64>,\n    rcf_caster_options: RCFCasterOptions,\n}\n\nimpl RCFOptionsBuilder<u64,u64> for RCFCasterBuilder {\n    fn get_rcf_options(&mut self) -> &mut RCFOptions<u64,u64> {\n        &mut self.rcf_options\n    }\n}\n\nimpl TRCFOptionsBuilder for RCFCasterBuilder {\n    fn get_trcf_options(&mut self) -> &mut TRCFOptions {\n        &mut self.trcf_options\n    }\n}\n\nimpl RCFCasterOptionsBuilder for RCFCasterBuilder {\n    fn get_rcf_caster_options(&mut self) -> &mut RCFCasterOptions {\n        &mut self.rcf_caster_options\n    }\n}\n\nimpl RCFCasterBuilder {\n    pub fn new(id : u64,input_dimensions: usize, shingle_size: usize, forecast_horizon: usize) -> Self {\n        RCFCasterBuilder {\n            id,\n            input_dimensions,\n            shingle_size,\n            trcf_options: Default::default(),\n            rcf_options : Default::default(),\n            rcf_caster_options: RCFCasterOptions {forecast_horizon, ..Default::default()}\n        }\n    }\n\n    pub fn build(&self) -> Result<RCFCaster> {\n        check_argument(self.trcf_options.forest_mode!= STREAMING_IMPUTE, \"not yet supported\")?;\n        check_argument( self.input_dimensions > 0, \"input_dimensions cannot be 0\")?;\n        check_argument( self.shingle_size > 0, \"shingle size cannot be 0\")?;\n        self.rcf_options.validate()?;\n        self.trcf_options.validate(self.input_dimensions)?;\n        let output_after = self.rcf_options.output_after.unwrap_or(1 + self.rcf_options.capacity / 4);\n        let time_decay = self.rcf_options.time_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let transform_decay = self.trcf_options.transform_decay.unwrap_or(0.1/self.rcf_options.capacity as f64);\n        let weights = match &self.trcf_options.weights {\n            Some(x) => x.clone(),\n            _ => vec![1.0; self.input_dimensions]\n        };\n        let random_seed = self.rcf_options.random_seed.unwrap_or( ChaCha20Rng::from_entropy().gen::<u64>());\n        let rcf = RCFBuilder::<u64,u64>::new(self.input_dimensions,self.shingle_size)\n            .tree_capacity(self.rcf_options.capacity).number_of_trees(self.rcf_options.number_of_trees)\n            .random_seed(random_seed)\n            .parallel_enabled(self.rcf_options.parallel_enabled).time_decay(time_decay)\n            .bounding_box_cache_fraction(self.rcf_options.bounding_box_cache_fraction)\n            .output_after(output_after)\n            .initial_accept_fraction(self.rcf_options.initial_accept_fraction).build_default()?;\n        let preprocessor = PreprocessorBuilder::new(self.input_dimensions,self.shingle_size)\n            .transform_decay(transform_decay)\n            .transform_method(self.trcf_options.transform_method)\n            .forest_mode(self.trcf_options.forest_mode)\n            .random_seed(random_seed+1)\n            .weights(&weights)\n            .start_normalization(self.trcf_options.start_normalization)\n            .stop_normalization(self.trcf_options.stop_normalization).build()?;\n        let predictor_corrector = PredictorCorrector::new(transform_decay,!self.trcf_options.verbose,self.input_dimensions)?;\n\n        let error_horizon= self.rcf_caster_options.error_horizon.unwrap_or(MAX_ERROR_HORIZON);\n        check_argument(error_horizon<=MAX_ERROR_HORIZON, \"calibration horizon should be smaller\")?;\n        let error_handler = ErrorHandler::new(self.rcf_caster_options.add_error,self.input_dimensions,self.rcf_caster_options.forecast_horizon,error_horizon,DEFAULT_ERROR_PERCENTILE);\n        Ok(RCFCaster{\n            forecast_horizon: self.rcf_caster_options.forecast_horizon,\n            rcf,\n            state: State::new(self.id,1,self.trcf_options.scoring_strategy,predictor_corrector,preprocessor)?,\n            error_handler,\n            calibration_method: self.rcf_caster_options.calibration\n        })\n    }\n}\n"
  },
  {
    "path": "Rust/src/trcf/transformer.rs",
    "content": "use crate::common::deviation::Deviation;\nuse crate::common::rangevector::RangeVector;\nuse crate::trcf;\nuse crate::trcf::preprocessor::Preprocessor;\nuse crate::trcf::types::TransformMethod;\nuse crate::trcf::types::TransformMethod::{DIFFERENCE, NONE, NORMALIZE, NORMALIZE_DIFFERENCE, SUBTRACT_MA,WEIGHTED};\nuse crate::util::check_argument;\nuse crate::trcf::preprocessor::DEFAULT_DEVIATION_STATES;\nuse crate::types::Result;\n\n#[repr(C)]\n#[derive(Clone)]\npub struct WeightedTransformer {\n    transform_method : TransformMethod,\n    deviations : Vec<Deviation>,\n    input_length : usize,\n    weights : Vec<f32>,\n}\n\nimpl WeightedTransformer {\n\n    pub fn new(transform_method : TransformMethod, input_length : usize, transform_decay: f64, weights : &[f32]) -> Result<Self> {\n        check_argument(input_length == weights.len(), \"incorrect lengths\")?;\n        let mut deviations : Vec<Deviation> = Vec::new();\n        for _i in 0..2*input_length {\n            deviations.push(Deviation::new(transform_decay)?);\n        }\n        for _i in 0..(DEFAULT_DEVIATION_STATES - 2) * input_length {\n            deviations.push(Deviation::new(0.1 * transform_decay)?);\n        }\n        if transform_method == NONE{\n            for w in weights {\n                check_argument( *w ==1.0, \"incorrect setting for NONE transformation\")?;\n            }\n        }\n        Ok(WeightedTransformer{\n            transform_method,\n            deviations,\n            input_length,\n            weights: Vec::from(weights)\n        })\n    }\n\n     pub fn update(&mut self, input: &[f32], previous: &[f32]) -> Result<()> {\n         check_argument(input.len() == self.input_length, \" incorrect length\")?;\n         check_argument(previous.len() == self.input_length, \" incorrect length\")?;\n         for i in 0..self.input_length {\n             self.deviations[i].update(input[i] as f64);\n             let deviation = self.deviations[i].deviation();\n             self.deviations[i + self.input_length].update((input[i] - previous[i]) as f64);\n             let difference_mean = self.deviations[i + self.input_length].mean();\n             let difference_deviation = self.deviations[i + self.input_length].deviation();\n             self.deviations[i + 2 * self.input_length].update(deviation);\n             self.deviations[i + 3 * self.input_length].update(difference_mean);\n             self.deviations[i + 4 * self.input_length].update(difference_deviation);\n         }\n         Ok(())\n     }\n\n    fn normalized_scale(&self, i: usize) -> f32 {\n        (self.deviations[ i + 2 * self.input_length].mean() + 1.0) as f32\n    }\n\n    fn basic_shift(&self, i: usize) -> f32 {\n        self.deviations[i].mean() as f32\n    }\n\n    fn shift_difference(&self, i: usize) -> f32 {\n        self.deviations[ i + self.input_length].mean()  as f32\n    }\n\n    fn basic_drift(&self, i: usize) -> f32 {\n        self.deviations[ i + 3*self.input_length].mean()  as f32\n    }\n\n    fn difference(&self, input : &mut [f32], previous : &[f32]) {\n         for (x,y) in input.iter_mut().zip(previous){\n             *x -= y;\n         }\n    }\n\n    fn add(&self, input : &mut [f32], previous : &[f32]) {\n        for (x,y) in input.iter_mut().zip(previous){\n            *x += y;\n        }\n    }\n\n    fn add_ma(&self, input : &mut [f32]) {\n        for i in 0..input.len() {\n            input[i] += self.basic_shift(i);\n        }\n    }\n\n    fn subtract_ma(&self, input : &mut [f32]) {\n        for i in 0..input.len() {\n            input[i] -=  self.basic_shift(i);\n        }\n    }\n\n    fn weight(&self, input : &mut[f32]){\n        for (x,y) in input.iter_mut().zip(&self.weights){\n            *x = *x * (*y);\n        }\n    }\n\n    fn weight_invert(&self, input : &mut[f32]){\n        for (x,y) in input.iter_mut().zip(&self.weights){\n            *x = if *y == 0.0 {0.0} else {*x/y};\n        }\n    }\n\n    fn normalize(&self, input : &mut [f32]){\n        for i in 0..input.len() {\n                input[i] -= self.basic_shift(i);\n                input[i] = input[i] /self.normalized_scale(i);\n        }\n    }\n\n    fn normalize_invert(&self, input : &mut [f32]){\n        for i in 0..input.len() {\n            input[i] = input[i]*self.normalized_scale(i);\n            input[i] += self.basic_shift(i);\n        }\n    }\n\n    fn normalize_difference(&self, input : &mut [f32], previous : &[f32]){\n        for i in 0..input.len() {\n            input[i] -= previous[i];\n            input[i] = input[i]/ self.normalized_scale(i);\n        }\n    }\n\n    fn normalize_difference_invert(&self, input : &mut [f32], previous : &[f32]) {\n        for i in 0..input.len() {\n            input[i] = input[i]* self.normalized_scale(i);\n            input[i] +=  previous[i];\n        }\n    }\n\n    pub fn transform(&self, input : &[f32], previous : &[f32]) -> Vec<f32> {\n        let mut answer: Vec<f32> = Vec::from(input);\n        match &self.transform_method {\n            NONE => {},\n            DIFFERENCE=> {self.difference(&mut answer,previous)},\n            SUBTRACT_MA=> {self.subtract_ma(&mut answer)},\n            NORMALIZE=> {self.normalize(&mut answer)},\n            NORMALIZE_DIFFERENCE => {self.normalize_difference(&mut answer, previous)},\n            WEIGHTED => {self.weight(&mut answer)},\n        };\n        answer\n    }\n\n    pub fn invert(&self, input : &[f32], previous : &[f32]) -> Vec<f32> {\n        let mut answer: Vec<f32> = Vec::from(input);\n        for (x,y) in answer.iter_mut().zip(&self.weights){\n            *x = if *y ==0.0 {0.0} else {*x/y};\n        }\n        match &self.transform_method {\n            NONE => {},\n            DIFFERENCE=> {self.add(&mut answer,previous)},\n            SUBTRACT_MA=> {self.add_ma(&mut answer)},\n            NORMALIZE=> {self.normalize_invert(&mut answer)},\n            NORMALIZE_DIFFERENCE => {self.normalize_difference_invert(&mut answer,previous)},\n            WEIGHTED => {self.weight_invert(&mut answer)},\n        };\n        answer\n    }\n\n    pub fn invert_forecast(&self, forecast :&mut RangeVector<f32>, previous : &[f32]) -> Result<()>{\n        let horizon = forecast.values.len() / self.input_length;\n        for i in 0..horizon {\n            for j in 0..self.input_length {\n                let factor = if self.weights[j] == 0.0 { 0.0 } else { 1.0 / self.weights[j] };\n                if self.transform_method != NONE {\n                    forecast.scale(i * self.input_length + j, factor as f32);\n                }\n\n                if self.transform_method == NORMALIZE || self.transform_method == NORMALIZE_DIFFERENCE {\n                    forecast.scale(i * self.input_length + j, self.normalized_scale(j));\n                }\n\n                forecast.shift(i * self.input_length + j, i as f32 * self.basic_drift(j));\n\n                if self.transform_method == NORMALIZE || self.transform_method == SUBTRACT_MA {\n                    forecast.shift(i * self.input_length + j, self.basic_shift(j));\n                }\n            }\n        }\n        if self.transform_method == DIFFERENCE || self.transform_method == NORMALIZE_DIFFERENCE {\n            forecast.cascaded_add(previous)?;\n        }\n        Ok(())\n    }\n\n    pub fn scale(&self) -> Vec<f32> {\n        let mut answer = self.weights.clone();\n        if self.transform_method == NORMALIZE || self.transform_method == NORMALIZE_DIFFERENCE {\n            for i in 0..self.input_length {\n                answer[i] *= self.normalized_scale(i);\n            }\n        }\n        answer\n    }\n\n    pub fn shift(&self) -> Vec<f32> {\n        let mut answer = vec![0.0;self.input_length];\n        if self.transform_method == NORMALIZE || self.transform_method == SUBTRACT_MA {\n            for i in 0..self.input_length {\n                answer[i] += self.basic_shift(i);\n            }\n        }\n        answer\n    }\n\n    pub fn difference_deviations(&self) -> Vec<f32> {\n        let mut answer = vec![0.0f32;self.input_length];\n        for i in 0..self.input_length {\n            answer[i] = self.deviations[ i + 4*self.input_length].mean() as f32;\n        }\n        answer\n    }\n\n}\n"
  },
  {
    "path": "Rust/src/trcf/types.rs",
    "content": "use std::fmt;\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum TransformMethod {\n    NONE,\n    WEIGHTED,\n    DIFFERENCE,\n    SUBTRACT_MA,\n    NORMALIZE,\n    NORMALIZE_DIFFERENCE\n}\n\nimpl fmt::Display for TransformMethod {\n    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n        let printable = match *self {\n            TransformMethod::NONE => \"NONE\",\n            TransformMethod::NORMALIZE => \"NORMALIZE\" ,\n            TransformMethod::NORMALIZE_DIFFERENCE=> \"NORMALIZE_DIFFERENCE\",\n            TransformMethod::SUBTRACT_MA=> \"SUBTRACT_MA\",\n            TransformMethod::DIFFERENCE => \"DIFFERENCE\",\n            TransformMethod::WEIGHTED=> \"WEIGHTED\",\n        };\n        write!(f, \"{}\", printable)\n    }\n}\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum ImputationMethod {\n\n    //use a fixed set of specified values (same as input dimension)\n    FIXED,\n    // last known value in each input dimension\n    PREVIOUS,\n    //next seen value in each input dimension\n    NEXT,\n    // linear interpolation\n    LINEAR,\n    // use the RCF imputation; but would often require a minimum number of\n    // observations and would use defaults (often LINEAR) till that point\n    USE_RCF\n}\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum ForestMode {\n    /**\n     * a standard mode that uses shingling and most known applications; it uses the\n     * last K data points where K=1 would correspond to non time series (population)\n     * analysis\n     */\n    STANDARD,\n    /**\n     * time stamp is added automatically to data to correlate within RCF itself;\n     * this is useful for event streaams and for modeling sparse events. Option is\n     * provided to normalize the time gaps.\n     */\n    TIME_AUGMENTED,\n    /**\n     * uses various Fill-In strageies for data with gaps but not really sparse. Must\n     * have shingleSize greater than 1, typically larger shingle size is better, and\n     * so is fewer input dimensions\n     */\n    STREAMING_IMPUTE\n}\n\n// alternate scoring that can be thresholded differently\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum ScoringStrategy{\n    EXPECTED_INVERSE_HEIGHT,\n    /**\n     * This is the same as STANDARD mode where the scoring function is switched to\n     * distances between the vectors. Since RCFs build a multiresolution tree, and\n     * in the aggregate, preserves distances to some approximation, this provides an\n     * alternate anomaly detection mechanism which can be useful for shingleSize = 1\n     * and (dynamic) population analysis via RCFs. Specifially it switches the\n     * scoring to be based on the distance computation in the Density Estimation\n     * (interpolation). This allows for a direct comparison of clustering based\n     * outlier detection and RCFs over numeric vectors. All transformations\n     * available to the STANDARD mode in the ThresholdedRCF are available for this\n     * mode as well; this does not affect RandomCutForest core in any way. For\n     * timeseries analysis the STANDARD mode is recommended, but this does provide\n     * another option in combination with the TransformMethods.\n     */\n    DISTANCE\n}\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum CorrectionMode {\n\n    /**\n     * default behavior, no correction\n     */\n    NONE,\n\n    /**\n     * due to transforms, or due to input noise\n     */\n    NOISE,\n\n    /**\n     * elimination due to multi mode operation, not in use currently\n     */\n\n    MULTI_MODE,\n\n    /**\n     * effect of an anomaly in shingle\n     */\n\n    ANOMALY_IN_SHINGLE,\n\n    /**\n     * conditional forecast, using conditional fields\n     */\n\n    CONDITIONAL_FORECAST,\n\n    /**\n     * forecasted value was not very different\n     */\n\n    FORECAST,\n\n    /**\n     * data drifts and level shifts, will not be corrected unless level shifts are\n     * turned on\n     */\n\n    DATA_DRIFT\n\n}\n\n#[derive(Clone, Copy)]\n#[derive(PartialEq)]\npub enum Calibration {\n\n    NONE,\n\n    /**\n     * a basic staring point where the intervals are adjusted to be the minimal\n     * necessary based on past error the intervals are smaller -- but the interval\n     * precision will likely be close to 1 - 2 * percentile\n     */\n    MINIMAL,\n\n    /**\n     * a Markov inequality based interval, where the past error and model errors are\n     * additive. The interval precision is likely higher than MINIMAL but so are the\n     * intervals.\n     */\n    SIMPLE,\n\n}"
  },
  {
    "path": "Rust/src/types.rs",
    "content": "use std::hash::Hash;\nuse crate::errors;\n\n/// A trait that defines a maximum value constant.\npub trait Max {\n    const MAX: Self;\n}\n\nimpl Max for u8 {\n    const MAX: u8 = u8::MAX;\n}\n\nimpl Max for u16 {\n    const MAX: u16 = u16::MAX;\n}\n\nimpl Max for usize {\n    const MAX: usize = usize::MAX;\n}\n\n/// The Location trait is used as a shorthand for the various traits needed by store (e.g., point\n/// store, node store) locations. These are the values vended by stores to reference a stored\n/// value.\npub trait Location:\n    Copy + Max + Eq + Hash + PartialEq + TryFrom<usize> + Send + Sync\n{\n}\n\nimpl Location for u8 {}\nimpl Location for u16 {}\nimpl Location for usize {}\n\npub type Result<R> = std::result::Result<R, errors::RCFError>;\n"
  },
  {
    "path": "Rust/src/util.rs",
    "content": "use crate::{errors::RCFError, types::Result};\n\npub(crate) fn add_to(a: &f64, b: &mut f64) {\n    *b += *a;\n}\npub(crate) fn divide(a: &mut f64, b: usize) {\n    *a /= b as f64;\n}\n\npub(crate) fn maxf32(a : f32, b:f32) -> f32 {\n    if a<b {b} else {a}\n}\n\npub(crate) fn minf32(a : f32, b:f32) -> f32 {\n    if a<b {a} else {b}\n}\n\npub(crate) fn absf32(a : f32) -> f32 {\n    if a<0.0 {-a} else {a}\n}\n\npub(crate) fn add_nbr(a: &(f64, usize, f64), b: &mut Vec<(f64, usize, f64)>) {\n    b.push(*a)\n}\n\npub(crate) fn nbr_finish(_a: &mut Vec<(f64, usize, f64)>, _b: usize) {}\n\n/// If the test condition is false, return an InvalidArgument error with\n/// the given error message. Otherwise return Ok.\npub(crate) fn check_argument(test: bool, msg: &'static str) -> Result<()> {\n    if test {\n        Ok(())\n    } else {\n        Err(RCFError::InvalidArgument { msg: msg })\n    }\n}\n"
  },
  {
    "path": "Rust/src/visitor/attributionvisitor.rs",
    "content": "use num::abs;\n\nuse crate::{\n    common::divector::DiVector,\n    samplerplustree::nodeview::LargeNodeView,\n    visitor::visitor::{Visitor, VisitorInfo},\n    types::Result,\n};\n\n#[repr(C)]\npub struct AttributionVisitor {\n    converged: bool,\n    leaf_index: usize,\n    score: f64,\n    tree_mass: usize,\n    hit_duplicate: bool,\n    use_shadow_box: bool,\n    attribution: DiVector,\n    probability: DiVector,\n}\n\nimpl AttributionVisitor {\n    pub fn new(tree_mass: usize, dimension: usize, _visitor_info: &VisitorInfo) -> Self {\n        AttributionVisitor {\n            tree_mass,\n            leaf_index: usize::MAX,\n            converged: false,\n            score: 0.0,\n            hit_duplicate: false,\n            use_shadow_box: false,\n            attribution: DiVector::empty(dimension),\n            probability: DiVector::empty(dimension),\n        }\n    }\n\n    pub fn create_visitor(\n        tree_mass: usize,\n        parameters: &[usize],\n        visitor_info: &VisitorInfo,\n    ) -> AttributionVisitor {\n        let dimension = parameters[0];\n        AttributionVisitor::new(tree_mass, dimension, visitor_info)\n    }\n}\n\nimpl Visitor<LargeNodeView, DiVector> for AttributionVisitor {\n    fn accept_leaf(\n        &mut self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n        node_view: &LargeNodeView,\n    ) -> Result<()> {\n        let mass = node_view.mass();\n        self.leaf_index = node_view.leaf_index();\n        if mass > visitor_info.ignore_mass {\n            if node_view.is_duplicate() {\n                self.score = (visitor_info.damp)(mass, self.tree_mass)\n                    * (visitor_info.score_seen)(node_view.depth(), mass);\n                self.hit_duplicate = true;\n                self.use_shadow_box = true;\n            } else {\n                self.score = (visitor_info.score_unseen)(node_view.depth(), mass);\n                node_view.assign_probability_of_cut(&mut self.probability, point);\n                assert!(abs(self.probability.total() - 1.0) < 1e-6);\n                self.attribution.add_from(&self.probability, self.score);\n            }\n        } else {\n            self.score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            self.use_shadow_box = true;\n        }\n        Ok(())\n    }\n\n    fn accept(&mut self, point: &[f32], visitor_info: &VisitorInfo, node_view: &LargeNodeView) -> Result<()>{\n        if !self.converged {\n            if !self.use_shadow_box {\n                node_view.assign_probability_of_cut(&mut self.probability, point);\n            } else {\n                node_view.assign_probability_of_cut_shadow_box(&mut self.probability, point);\n            };\n            let prob = self.probability.total();\n            if prob == 0.0 {\n                self.converged = true;\n            } else {\n                let new_value =\n                    (visitor_info.score_unseen)(node_view.depth(), node_view.mass());\n                if !self.hit_duplicate {\n                    self.score = (1.0 - prob) * self.score + prob * new_value;\n                }\n                self.attribution.scale(1.0 - prob);\n                self.attribution.add_from(&self.probability, new_value);\n            }\n        }\n        Ok(())\n    }\n\n    fn result(&self, visitor_info: &VisitorInfo) -> Result<DiVector> {\n        let t = (visitor_info.normalizer)(self.score, self.tree_mass);\n        let mut answer = self.attribution.clone();\n        answer.normalize(t);\n        Ok(answer)\n    }\n\n    fn is_converged(&self) -> Result<bool> {\n        Ok(self.converged)\n    }\n\n    fn use_shadow_box(&self) -> bool {\n        self.use_shadow_box\n    }\n}\n"
  },
  {
    "path": "Rust/src/visitor/imputevisitor.rs",
    "content": "use num::abs;\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\n\nuse crate::{\n    samplerplustree::nodeview::MediumNodeView,\n    visitor::visitor::{SimpleMultiVisitor, Visitor, VisitorInfo},\n    types::Result\n};\nuse crate::util::check_argument;\n\n#[repr(C)]\npub struct ImputeVisitor {\n    centrality: f64,\n    tree_mass: usize,\n    rng: ChaCha20Rng,\n    missing: Vec<usize>,\n    stack: Vec<ImputeVisitorStackElement>,\n    use_shadow_box: bool,\n}\n\n#[repr(C)]\nstruct ImputeVisitorStackElement {\n    converged: bool,\n    score: f64,\n    random: f32,\n    index: usize,\n    distance: f64,\n}\n\nimpl ImputeVisitor {\n    pub fn new(missing: &[usize], centrality: f64, tree_mass: usize, seed: u64) -> Self {\n        ImputeVisitor {\n            tree_mass,\n            centrality,\n            rng: ChaCha20Rng::seed_from_u64(seed),\n            missing: Vec::from(missing),\n            stack: Vec::new(),\n            use_shadow_box: false,\n        }\n    }\n\n    pub fn create_nbr_visitor(\n        tree_mass: usize,\n        parameters: &[usize],\n        _visitor_info: &VisitorInfo,\n    ) -> Self {\n        let percentile = if parameters.len() > 0 {\n            parameters[0]\n        } else {\n            50\n        };\n        let seed = if parameters.len() > 1 {\n            parameters[1]\n        } else {\n            0\n        };\n        let centrality = if percentile < 5 || percentile > 95 {\n            0.0\n        } else {\n            1.0 - abs(1.0 - percentile as f64 / 50.0)\n        };\n        ImputeVisitor::new(&Vec::new(), centrality, tree_mass, seed as u64)\n    }\n\n    /// the following function allows the score to vary between the score used in\n    /// anomaly detection and fully random sample based on the parameter centrality\n    /// these two cases correspond to centrality = 1 and centrality = 0 respectively\n\n    fn adjusted_score(&self, e: &ImputeVisitorStackElement, visitor_info: &VisitorInfo) -> f64 {\n        self.centrality * (visitor_info.normalizer)(e.score, self.tree_mass)\n            + (1.0 - self.centrality) * e.random as f64\n    }\n}\n\nimpl Visitor<MediumNodeView, (f64, usize, f64)> for ImputeVisitor {\n    fn accept_leaf(\n        &mut self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n        node_view: &MediumNodeView,\n    ) -> Result<()>{\n        let mass = node_view.mass();\n        let leaf_point = node_view.leaf_point();\n        let mut new_point = Vec::from(point);\n        for i in self.missing.iter() {\n            new_point[*i] = leaf_point[*i];\n        }\n\n        let mut converged = false;\n        let score: f64;\n        if mass > visitor_info.ignore_mass || self.missing.len() != 0 {\n            if node_view.is_duplicate() {\n                score = (visitor_info.damp)(mass, self.tree_mass)\n                    * (visitor_info.score_seen)(node_view.depth(), mass);\n                converged = true;\n            } else {\n                score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            }\n        } else {\n            // shadow box is undefined for missing values\n            // for not missing values, this block corresponds to exact same evaluation\n            // in score and attribution visitor\n            // note that multi-visitors ignore the shadow box anyways\n            score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            self.use_shadow_box = true;\n        }\n        let dist = (visitor_info.distance)(&new_point, &leaf_point);\n        self.stack.push(ImputeVisitorStackElement {\n            converged,\n            score,\n            index: node_view.leaf_index(),\n            random: self.rng.gen::<f32>(),\n            distance: dist,\n        });\n        Ok(())\n    }\n\n    fn accept(&mut self, _point: &[f32], visitor_info: &VisitorInfo, node_view: &MediumNodeView) -> Result<()>{\n        let mut top_of_stack = self.stack.pop().expect(\" stack cannot be empty\");\n        if !top_of_stack.converged {\n            let prob = if !self.use_shadow_box {\n                // note that this probability ignores any missing coordinates\n                // which would be accurate since the value used is inside the box\n                node_view.probability_of_cut()\n            } else {\n                node_view.shadow_box_probability_of_cut()\n            };\n            if prob == 0.0 {\n                top_of_stack.converged = true;\n            } else {\n                let new_score = (1.0 - prob) * top_of_stack.score\n                    + prob\n                        * (visitor_info.score_unseen)(node_view.depth(), node_view.mass());\n                top_of_stack.converged = false;\n                top_of_stack.score = new_score;\n            }\n            self.stack.push(top_of_stack);\n        }\n        Ok(())\n    }\n\n    fn result(&self, visitor_info: &VisitorInfo) -> Result<(f64, usize, f64)> {\n        check_argument(self.stack.len() == 1,\n            \"incorrect state, stack length should be 1\"\n        )?;\n        let top_of_stack = self.stack.last().expect(\"should be length 1\");\n        let t = (visitor_info.normalizer)(top_of_stack.score, self.tree_mass);\n        Ok((t, top_of_stack.index, top_of_stack.distance))\n    }\n\n    fn is_converged(&self) -> Result<bool> {\n        Ok(self.stack.len() != 0 && self.stack.last().expect(\"cannot be empty\").converged)\n    }\n\n    fn use_shadow_box(&self) -> bool {\n        self.use_shadow_box\n    }\n}\n\nimpl SimpleMultiVisitor<MediumNodeView, (f64, usize, f64)> for ImputeVisitor {\n    fn combine_branches(\n        &mut self,\n        _point: &[f32],\n        _node_view: &MediumNodeView,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>{\n        let mut top_of_stack = self.stack.pop().expect(\"has to be size 1, corrupt state\");\n        let mut next_of_stack = self.stack.pop().expect(\"has to be size 2, corrupt state\");\n\n        if self.adjusted_score(&top_of_stack, &visitor_info)\n            < self.adjusted_score(&next_of_stack, &visitor_info)\n        {\n            top_of_stack.converged = top_of_stack.converged || next_of_stack.converged;\n            self.stack.push(top_of_stack);\n        } else {\n            next_of_stack.converged = top_of_stack.converged || next_of_stack.converged;\n            self.stack.push(next_of_stack);\n        }\n        Ok(())\n    }\n}\n"
  },
  {
    "path": "Rust/src/visitor/interpolationvisitor.rs",
    "content": "use crate::{\n    common::{directionaldensity::InterpolationMeasure},\n    samplerplustree::nodeview::LargeNodeView,\n    visitor::visitor::{Visitor, VisitorInfo},\n    types::Result,\n};\nuse crate::errors::RCFError;\n\n#[repr(C)]\npub struct InterpolationVisitor {\n    converged: bool,\n    leaf_index: usize,\n    score: f64,\n    tree_mass: usize,\n    hit_duplicate: bool,\n    use_shadow_box: bool,\n    interpolation_measure: InterpolationMeasure,\n}\n\nimpl InterpolationVisitor {\n    pub fn new(tree_mass: usize, dimension: usize, visitor_info: &VisitorInfo) -> Self {\n        InterpolationVisitor {\n            tree_mass,\n            leaf_index: usize::MAX,\n            converged: false,\n            score: 0.0,\n            hit_duplicate: false,\n            use_shadow_box: false,\n            interpolation_measure: InterpolationMeasure::empty(dimension, tree_mass as f32),\n        }\n    }\n\n    pub fn create_visitor(\n        tree_mass: usize,\n        parameters: &[usize],\n        visitor_info: &VisitorInfo,\n    ) -> InterpolationVisitor {\n        let dimension = parameters[0];\n        InterpolationVisitor::new(tree_mass, dimension, visitor_info)\n    }\n}\n\nimpl Visitor<LargeNodeView, InterpolationMeasure> for InterpolationVisitor {\n    fn accept_leaf(\n        &mut self,\n        point: &[f32],\n        visitor_info: &VisitorInfo,\n        node_view: &LargeNodeView,\n    ) ->Result<()>{\n        let mass = node_view.mass();\n        self.leaf_index = node_view.leaf_index();\n        if mass > visitor_info.ignore_mass {\n            if node_view.is_duplicate() {\n                self.score = (visitor_info.damp)(mass, self.tree_mass)\n                    * (visitor_info.score_seen)(node_view.depth(), mass);\n                self.hit_duplicate = true;\n                self.use_shadow_box = true;\n            } else {\n                let t = (visitor_info.score_unseen)(node_view.depth(), mass);\n                self.score = t;\n                match &node_view.bounding_box() {\n                    Some(x) => {self.interpolation_measure.update(point, x, t); Ok(())},\n                    _ => Err(RCFError::InvalidArgument {msg :\" incorrect state\"})\n                }?;\n            }\n        } else {\n            self.score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            self.use_shadow_box = true;\n        }\n        Ok(())\n    }\n\n    fn accept(&mut self, point: &[f32], visitor_info: &VisitorInfo, node_view: &LargeNodeView) -> Result<()>{\n        if !self.converged {\n            let bounding_box = if !self.use_shadow_box {\n                node_view.bounding_box()\n            } else {\n                node_view.shadow_box()\n            };\n            let new_value =\n                (visitor_info.score_unseen)(node_view.depth(), node_view.mass());\n            let prob = match &bounding_box {\n                Some(x) => Ok(self.interpolation_measure.update(point, &x, new_value)),\n                _ => Err(RCFError::InvalidArgument {msg: \"incorrect state\"})\n            }?;\n            if prob == 0.0 {\n                self.converged = true;\n            } else {\n                if !self.hit_duplicate {\n                    self.score = (1.0 - prob) * self.score + prob * new_value;\n                }\n            }\n        }\n        Ok(())\n    }\n\n    fn result(&self, visitor_info: &VisitorInfo) -> Result<InterpolationMeasure> {\n        let t = (visitor_info.normalizer)(self.score, self.tree_mass);\n        let mut answer = self.interpolation_measure.clone();\n        answer.measure.normalize(t);\n        Ok(answer)\n    }\n\n    fn is_converged(&self) -> Result<bool> {\n        Ok(self.converged)\n    }\n\n    fn use_shadow_box(&self) -> bool {\n        self.use_shadow_box\n    }\n}\n"
  },
  {
    "path": "Rust/src/visitor/mod.rs",
    "content": "pub mod attributionvisitor;\npub mod imputevisitor;\npub mod interpolationvisitor;\npub mod scalarscorevisitor;\npub mod visitor;\n"
  },
  {
    "path": "Rust/src/visitor/scalarscorevisitor.rs",
    "content": "use crate::{\n    samplerplustree::nodeview::SmallNodeView,\n    visitor::visitor::{Visitor, VisitorInfo},\n    types::Result,\n};\n\n#[repr(C)]\npub struct ScalarScoreVisitor {\n    converged: bool,\n    leaf_index: usize,\n    score: f64,\n    tree_mass: usize,\n    use_shadow_box: bool,\n}\n\nimpl ScalarScoreVisitor {\n    pub fn default(tree_mass: usize, _parameters: &[usize], _visitor_info: &VisitorInfo) -> Self {\n        ScalarScoreVisitor {\n            tree_mass,\n            leaf_index: usize::MAX,\n            converged: false,\n            score: 0.0,\n            use_shadow_box: false,\n        }\n    }\n}\n\nimpl Visitor<SmallNodeView, f64> for ScalarScoreVisitor {\n    fn accept_leaf(\n        &mut self,\n        _point: &[f32],\n        visitor_info: &VisitorInfo,\n        node_view: &SmallNodeView,\n    ) ->Result<()> {\n        let mass = node_view.mass();\n        self.leaf_index = node_view.leaf_index();\n        if mass > visitor_info.ignore_mass {\n            if node_view.is_duplicate() {\n                self.score = (visitor_info.damp)(mass, self.tree_mass)\n                    * (visitor_info.score_seen)(node_view.depth(), mass);\n                self.converged = true;\n            } else {\n                self.score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            }\n        } else {\n            self.score = (visitor_info.score_unseen)(node_view.depth(), mass);\n            self.use_shadow_box = true;\n        }\n        Ok(())\n    }\n\n    fn accept(&mut self, _point: &[f32], visitor_info: &VisitorInfo, node_view: &SmallNodeView) -> Result<()>{\n        if !self.converged {\n            let prob = if !self.use_shadow_box {\n                node_view.probability_of_cut()\n            } else {\n                node_view.shadow_box_probability_of_cut()\n            };\n            if prob == 0.0 {\n                self.converged = true;\n            } else {\n                self.score = (1.0 - prob) * self.score\n                    + prob\n                        * (visitor_info.score_unseen)(node_view.depth(), node_view.mass());\n            }\n        }\n        Ok(())\n    }\n\n    fn result(&self, visitor_info: &VisitorInfo) -> Result<f64> {\n        Ok((visitor_info.normalizer)(self.score, self.tree_mass))\n    }\n\n    fn is_converged(&self) -> Result<bool> {\n        Ok(self.converged)\n    }\n\n    fn use_shadow_box(&self) -> bool {\n        self.use_shadow_box\n    }\n}\n"
  },
  {
    "path": "Rust/src/visitor/visitor.rs",
    "content": "use crate::{\n    l1distance,\n    rcf::{\n        damp, displacement_normalizer, identity, normalizer, score_seen, score_seen_displacement,\n        score_unseen, score_unseen_displacement,\n    },\n    types::Result,\n};\n\n#[repr(C)]\npub struct VisitorInfo {\n    pub ignore_mass: usize,\n    pub score_seen: fn(usize, usize) -> f64,\n    pub score_unseen: fn(usize, usize) -> f64,\n    pub damp: fn(usize, usize) -> f64,\n    pub normalizer: fn(f64, usize) -> f64,\n    pub distance: fn(&[f32], &[f32]) -> f64,\n}\n\npub trait Visitor<NodeView, R> {\n    fn accept(&mut self, point: &[f32], visitor_info: &VisitorInfo, node_view: &NodeView) -> Result<()>;\n    fn accept_leaf(&mut self, point: &[f32], visitor_info: &VisitorInfo, node_view: &NodeView) -> Result<()>;\n    fn is_converged(&self) -> Result<bool>;\n    fn result(&self, visitor_info: &VisitorInfo) -> Result<R>;\n    fn use_shadow_box(&self) -> bool;\n}\n\npub trait SimpleMultiVisitor<NodeView, R>: Visitor<NodeView, R> {\n    fn combine_branches(\n        &mut self,\n        point: &[f32],\n        _node_view: &NodeView,\n        visitor_info: &VisitorInfo,\n    ) -> Result<()>;\n}\n\npub trait UniqueMultiVisitor<NodeView, R>: SimpleMultiVisitor<NodeView, R> {\n    fn trigger(&self, point: &[f32], node_view: &NodeView, visitor_info: &VisitorInfo) -> bool;\n    fn unique_answer(&self, visitor_info: &VisitorInfo) -> Vec<f32>;\n}\n\npub trait StreamingMultiVisitor<NodeView, R>: UniqueMultiVisitor<NodeView, R> {\n    fn initialize_branch_split(\n        &mut self,\n        point: &[f32],\n        node_view: &NodeView,\n        visitor_info: &VisitorInfo,\n    );\n    fn second_branch(&mut self, point: &[f32], node_view: &NodeView, visitor_info: &VisitorInfo);\n}\n\nimpl VisitorInfo {\n    pub fn default() -> Self {\n        VisitorInfo {\n            ignore_mass: 0,\n            score_seen,\n            score_unseen,\n            damp,\n            normalizer,\n            distance: l1distance,\n        }\n    }\n    pub fn displacement() -> Self {\n        VisitorInfo {\n            ignore_mass: 0,\n            score_seen: score_seen_displacement,\n            score_unseen: score_unseen_displacement,\n            damp,\n            normalizer: displacement_normalizer,\n            distance: l1distance,\n        }\n    }\n    pub fn density() -> Self {\n        VisitorInfo {\n            ignore_mass: 0,\n            score_seen: score_unseen_displacement,\n            score_unseen: score_unseen_displacement,\n            damp,\n            normalizer: identity,\n            distance: l1distance,\n        }\n    }\n    pub fn use_score(\n        ignore_mass: usize,\n        score_seen: fn(usize, usize) -> f64,\n        score_unseen: fn(usize, usize) -> f64,\n        damp: fn(usize, usize) -> f64,\n        normalizer: fn(f64, usize) -> f64,\n    ) -> Self {\n        VisitorInfo {\n            ignore_mass,\n            score_seen,\n            score_unseen,\n            damp,\n            normalizer,\n            distance: l1distance,\n        }\n    }\n    pub fn use_distance(distance: fn(&[f32], &[f32]) -> f64) -> Self {\n        VisitorInfo {\n            ignore_mass: 0,\n            score_seen,\n            score_unseen,\n            damp,\n            normalizer,\n            distance,\n        }\n    }\n}\n"
  },
  {
    "path": "Rust/tests/anomalydetectionattributionupdate.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse num::abs;\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn anomalydetection_attribution_and_update() {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 100000;\n    let number_of_trees = 30;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n            .tree_capacity(capacity)\n            .number_of_trees(number_of_trees)\n            .random_seed(random_seed)\n            .store_attributes(store_attributes)\n            .parallel_enabled(parallel_enabled)\n            .internal_shingling(internal_shingling)\n            .internal_rotation(internal_rotation)\n            .time_decay(time_decay)\n            .initial_accept_fraction(initial_accept_fraction)\n            .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 100.0);\n    }\n    let data_with_key = MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut score: f64 = 0.0;\n    let _next_index = 0;\n\n    for i in 0..data_with_key.data.len() {\n        let attribution = forest.attribution(&data_with_key.data[i]).unwrap();\n        let new_score = forest.score(&data_with_key.data[i]).unwrap();\n        assert!(abs(new_score - attribution.total()) < 1e-6);\n\n        /*\n        if next_index < data_with_key.change_indices.len() && data_with_key.change_indices[next_index] == i {\n            println!(\" score at change {} position {} \", new_score, i);\n            next_index += 1;\n        }\n        */\n\n        score += attribution.total();\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\n        \"Average score {} \",\n        (score / data_with_key.data.len() as f64)\n    );\n    assert!(\n        score < data_with_key.data.len() as f64,\n        \" average score is above 1\"\n    );\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n}\n"
  },
  {
    "path": "Rust/tests/anomalydetectionimputescoreupdate.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn anomalydetection_impute_score_and_update() {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 100000;\n    let number_of_trees = 30;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = true;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest: Box<dyn RCF> = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build_default().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 100.0);\n    }\n    let data_with_key = MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut score: f64 = 0.0;\n    let _next_index = 0;\n    let mut error = 0.0;\n    let mut count = 0;\n\n    for i in 0..data_with_key.data.len() {\n        if i > 200 {\n            let next_values = forest.extrapolate(1).unwrap().values;\n            assert_eq!(next_values.len(), base_dimension);\n            error += next_values\n                .iter()\n                .zip(&data_with_key.data[i])\n                .map(|(x, y)| ((x - y) as f64 * (x - y) as f64))\n                .sum::<f64>();\n            count += base_dimension;\n        }\n\n        let new_score = forest.score(&data_with_key.data[i]).unwrap();\n        //println!(\"{} {} score {}\",y,i,new_score);\n        /*\n        if next_index < data_with_key.change_indices.len() && data_with_key.change_indices[next_index] == i {\n            println!(\" score at change {} position {} \", new_score, i);\n            next_index += 1;\n        }\n        */\n\n        score += new_score;\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\n        \"Average score {} \",\n        (score / data_with_key.data.len() as f64)\n    );\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n    println!(\n        \" RMSE {},  noise {} \",\n        f64::sqrt(error / count as f64),\n        noise\n    );\n}\n"
  },
  {
    "path": "Rust/tests/anomalydetectionscoreupdate.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn anomalydetection_score_and_update() {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 100000;\n    let number_of_trees = 30;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest: Box<dyn RCF> = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build_default().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 100.0);\n    }\n    let data_with_key = MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut score: f64 = 0.0;\n    let _next_index = 0;\n\n    for i in 0..data_with_key.data.len() {\n        let new_score = forest.score(&data_with_key.data[i]).unwrap();\n        //println!(\"{} {} score {}\",y,i,new_score);\n        /*\n        if next_index < data_with_key.change_indices.len() && data_with_key.change_indices[next_index] == i {\n            println!(\" score at change {} position {} \", new_score, i);\n            next_index += 1;\n        }\n        */\n\n        score += new_score;\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\n        \"Average score {} \",\n        (score / data_with_key.data.len() as f64)\n    );\n    assert!(\n        score < data_with_key.data.len() as f64,\n        \" average score is above 1\"\n    );\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n}\n"
  },
  {
    "path": "Rust/tests/basicrcftest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse num::abs;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{RCF},\n    visitor::visitor::VisitorInfo,\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn two_distribution_test_static() {\n    let data_size = 1000;\n    let dimensions = 20;\n    let yard_stick = 5.0;\n    let mut vec1 = vec![0.0f32; dimensions];\n    let mut vec2 = vec![0.0f32; dimensions];\n    vec1[0] = yard_stick;\n    vec2[0] = -yard_stick;\n    let scale = vec![vec![0.1f32; dimensions], vec![0.1f32; dimensions]];\n    let mean = vec![vec1.clone(), vec2.clone()].clone();\n    let data_with_key =\n        MultiDimDataWithKey::mixture(data_size, &mean, &scale, &vec![0.5f32, 0.5f32], 0).unwrap();\n\n    let shingle_size = 1;\n    let number_of_trees = 50;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = true;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = false;\n    let internal_rotation = false;\n\n    let mut forest = RCFBuilder::<u64,u64>::new(dimensions,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n\n    let mut another_forest =\n        RCFBuilder::<u64,u64>::new(dimensions,shingle_size)\n            .tree_capacity(2*capacity)\n            .number_of_trees(number_of_trees)\n            .random_seed(random_seed)\n            .store_attributes(store_attributes)\n            .parallel_enabled(parallel_enabled)\n            .internal_shingling(internal_shingling)\n            .internal_rotation(internal_rotation)\n            .time_decay(time_decay)\n            .initial_accept_fraction(initial_accept_fraction)\n            .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n\n    for i in 0..data_with_key.data.len() {\n        forest.update(&data_with_key.data[i], 0).unwrap();\n        another_forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    let anomaly = vec![0.0f32; dimensions];\n\n    assert!(forest.score(&anomaly).unwrap() > 1.5);\n    assert!(forest.displacement_score(&anomaly).unwrap() * f64::log2(capacity as f64) > 1.5);\n    let interpolant = forest\n        .interpolation_visitor_traversal(&anomaly, &VisitorInfo::default())\n        .unwrap();\n    let attribution = forest.attribution(&anomaly).unwrap();\n    assert!(attribution.high[0] > 0.75);\n    assert!(attribution.low[0] > 0.75);\n    for i in 1..dimensions {\n        assert!(attribution.high[i] < 0.1);\n        assert!(attribution.low[i] < 0.1);\n        assert!(abs(attribution.low[i] - interpolant.measure.low[i]) < 1e-6);\n        assert!(abs(attribution.high[i] - interpolant.measure.high[i]) < 1e-6);\n    }\n    assert!(abs(attribution.low[0] - interpolant.measure.low[0]) < 1e-6);\n    assert!(abs(attribution.high[0] - interpolant.measure.high[0]) < 1e-6);\n\n    // a three signma radius\n    assert!(\n        abs(interpolant.distance.high[0] - yard_stick as f64 * interpolant.probability_mass.high[0])\n            < 0.3\n    );\n    assert!(\n        abs(interpolant.distance.low[0] - yard_stick as f64 * interpolant.probability_mass.low[0])\n            < 0.3\n    );\n    assert!(interpolant.distance.high[1] < 0.1);\n    assert!(interpolant.distance.low[1] < 0.1);\n    assert!(interpolant.probability_mass.high[1] < 0.1);\n    assert!(interpolant.probability_mass.low[1] < 0.1);\n    assert!(interpolant.probability_mass.high[0] > 0.4);\n    assert!(interpolant.probability_mass.low[0] > 0.4);\n    let score = forest.score(&anomaly).unwrap();\n\n    assert!(abs(score - attribution.total()) < 1e-6);\n    // score is calibrated for clear cut anomalies, even if sample size doubles ...\n    assert!(abs(score - another_forest.score(&anomaly).unwrap()) < 0.1 * score);\n\n    // scores of non-anomalies are not calibrated to be he same but\n    // are below 1 and should be close\n\n    assert!(abs(forest.score(&vec1).unwrap() - another_forest.score(&vec1).unwrap()) < 0.1);\n    assert!(abs(forest.score(&vec2).unwrap() - another_forest.score(&vec2).unwrap()) < 0.1);\n    assert!(forest.score(&vec1).unwrap() < 0.8);\n    assert!(forest.score(&vec2).unwrap() < 0.8);\n\n    let displacement_score = forest.displacement_score(&anomaly).unwrap();\n    // displacement is calibrated for clear cut anomalies\n    // samplesize did not matter for such\n    assert!(\n        abs(displacement_score - another_forest.displacement_score(&anomaly).unwrap())\n            < 0.1 * displacement_score\n    );\n\n    // displacement is NOT the same for dense regions; larger samplesize\n    // leads to lower score; in fact the gap is close to the ratio of samplesize\n    // due to normalization\n    assert!(\n        forest.displacement_score(&vec1).unwrap()\n            > 1.5 * another_forest.displacement_score(&vec1).unwrap()\n    );\n    assert!(\n        forest.displacement_score(&vec2).unwrap()\n            > 1.5 * another_forest.displacement_score(&vec2).unwrap()\n    );\n\n    // multiplied by log_2 ; the displacement score is in the same numeric\n    // range [0..log_2(sample size) as the regular score\n    assert!(displacement_score * f64::log2(capacity as f64) > 2.0);\n\n    // in contrast to displacement, density is calibrated at the dense points\n    assert!(\n        abs(forest.density(&vec1).unwrap() - another_forest.density(&vec1).unwrap())\n            < 0.1 * forest.density(&vec1).unwrap()\n    );\n    assert!(\n        abs(forest.density(&vec2).unwrap() - another_forest.density(&vec2).unwrap())\n            < 0.1 * forest.density(&vec2).unwrap()\n    );\n    // and much more than at anomalous points\n    assert!(forest.density(&vec1).unwrap() > capacity as f64 * forest.density(&anomaly).unwrap());\n    assert!(forest.density(&vec2).unwrap() > capacity as f64 * forest.density(&anomaly).unwrap());\n\n    // but now, unlike displacement, the  calibration is awry at potential anomalies\n    // and moreover is in the other direction; larger samplesize gives larger densities because\n    // of spurious points coming closer. This is a core intuition of observations/observability; the\n    // calibration of central tendency (often used in forecast, also densities of dense regions)\n    // has different requirements compared to callibration at extremeties (anomalies, sparse regions)\n    assert!(another_forest.density(&anomaly).unwrap() > 1.5 * forest.density(&anomaly).unwrap());\n}\n"
  },
  {
    "path": "Rust/tests/basictrcftest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey,\n    trcf::basictrcf::BasicTRCF,\n    trcf::types::TransformMethod::NONE\n};\nuse rcflib::trcf::basictrcf::BasicTRCFBuilder;\nuse rcflib::trcf::types::ForestMode::STANDARD;\nuse rcflib::trcf::types::{TransformMethod};\nuse rcflib::trcf::types::TransformMethod::{DIFFERENCE, NORMALIZE, NORMALIZE_DIFFERENCE, SUBTRACT_MA, WEIGHTED};\nuse crate::rcflib::rcf::RCFOptionsBuilder;\nuse crate::rcflib::trcf::basictrcf::TRCFOptionsBuilder;\n#[cfg(test)]\nparameterized_test::create! { test_basic_trcf, (method), {\nbasic_trcf(method);\n}}\n\n#[cfg(test)]\nparameterized_test::create! { basic_trcf_scale, (method,base_dimension,verbose), {\n    trcf_scale(method,base_dimension,verbose,false);\n}}\n\n#[cfg(test)]\nparameterized_test::create! { trcf_scale_spikes, (method,base_dimension,verbose), {\n    trcf_scale(method,base_dimension,verbose,true);\n}}\n\nfn basic_trcf(transform_method : TransformMethod) {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 1000;\n    let number_of_trees = 50;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let noise = 5.0;\n\n    let mut trcf = BasicTRCFBuilder::new(base_dimension,shingle_size)\n        .tree_capacity(capacity).number_of_trees(number_of_trees).random_seed(random_seed)\n        .transform_method(transform_method)\n        .forest_mode(STANDARD).parallel_enabled(parallel_enabled).verbose(true)\n        .time_decay(time_decay).transform_decay(time_decay).initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 60.0);\n    }\n\n    let data_with_key = multidimdatawithkey::MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        42,\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut next_index = 0;\n\n    println!(\"{}\", transform_method);\n    for i in 0..data_with_key.data.len() {\n        if next_index < data_with_key.change_indices.len() && data_with_key.change_indices[next_index] == i {\n            print!(\"timestamp {} INJECT [ {}\", i, data_with_key.changes[next_index][0]);\n            for j in 1..base_dimension {\n                print!(\", {}\", data_with_key.changes[next_index][j]);\n            }\n            println!(\"]\");\n            next_index += 1;\n        }\n        let result = trcf.process(&data_with_key.data[i], i as u64).unwrap();\n        if result.anomaly_grade > 0.0 {\n            print!(\"timestamp {} \", i);\n            let gap = -result.last_anomaly.as_ref().unwrap().relative_index;\n            if gap != 0 {\n                if gap == 1 {\n                    print!(\"1 step ago, \");\n                } else {\n                    print!(\"{} steps ago, \", gap);\n                }\n            }\n\n            let expected = &result.last_anomaly.as_ref().unwrap().expected_values_list[0];\n            let past = &result.last_anomaly.as_ref().unwrap().past_values;\n            print!(\"DETECT [ {}\", (past[0] - expected[0]));\n            for j in 1..base_dimension {\n                print!(\", {}\", (past[j] - expected[j]));\n            }\n            print!(\"]\");\n\n            println!(\" score {}, grade {}\", result.score, result.anomaly_grade);\n        }\n    }\n}\n\ntest_basic_trcf! {\n    a1: NONE,\n    a2: NORMALIZE,\n    a3: SUBTRACT_MA,\n    a4: NORMALIZE_DIFFERENCE,\n    a5: DIFFERENCE,\n    a6: WEIGHTED,\n}\n\nbasic_trcf_scale! {\n    b1: (NONE,3,false),\n    b2: (NONE,3,true),\n    b3: (NORMALIZE,3,false),\n    b4: (NORMALIZE,3,true),\n    b5: (SUBTRACT_MA,3,false),\n    b6: (SUBTRACT_MA,3,true),\n    b7: (NORMALIZE_DIFFERENCE,3,false),\n    b8: (NORMALIZE_DIFFERENCE,3,true),\n    b9: (DIFFERENCE,3,false),\n    b10: (DIFFERENCE,3,true),\n    b11: (WEIGHTED,3,false),\n    b12: (WEIGHTED,3,true),\n}\n\ntrcf_scale_spikes! {\n    c1: (NONE,1,false),\n    c2: (NONE,1,true),\n    c3: (NORMALIZE,1,false),\n    c4: (NORMALIZE,1,true),\n    c5: (SUBTRACT_MA,1,false),\n    c6: (SUBTRACT_MA,1,true),\n    c7: (NORMALIZE_DIFFERENCE,1,false),\n    c8: (NORMALIZE_DIFFERENCE,1,true),\n    c9: (DIFFERENCE,1,false),\n    c10: (DIFFERENCE,1,true),\n    c11: (WEIGHTED,1,false),\n    c12: (WEIGHTED,1,true),\n}\n\nfn trcf_scale(transform_method:TransformMethod, base_dimension: usize, verbose:bool, add_spikes: bool) {\n    let shingle_size = 8;\n    let data_size = 100000;\n    let number_of_trees = 50;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let noise = 5.0;\n\n    println!(\"At scale {}, add spikes? {} verbose = {} \", transform_method, add_spikes, verbose);\n    let mut trcf : BasicTRCF = BasicTRCFBuilder::new(base_dimension,shingle_size)\n        .tree_capacity(capacity).number_of_trees(number_of_trees).random_seed(random_seed)\n        .transform_method(transform_method)\n        .forest_mode(STANDARD).parallel_enabled(parallel_enabled).verbose(verbose)\n        .time_decay(time_decay).transform_decay(time_decay).initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n    let mut rng = ChaCha20Rng::from_entropy();\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 60.0);\n    }\n    let mut data_with_key = multidimdatawithkey::MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        rng.gen::<u64>(),\n        base_dimension.into(),\n    ).unwrap();\n\n    let mut potential_anomalies:Vec<usize> = Vec::new();\n\n    let mut next = 100 + (rng.gen::<f32>()*100.0) as usize;\n    for i in 0..data_with_key.data.len() {\n        if add_spikes && i == next {\n            data_with_key.data[i][0] += 100.0 * ( 1.0 + 0.05*rng.gen::<f32>());\n            next = 100 + (rng.gen::<f32>()*100.0) as usize;\n        }\n\n        let result = trcf.process(&data_with_key.data[i], 0).unwrap();\n        if result.anomaly_grade > 0.0 {\n            // some anomalies will be detected late\n            // we will keep the vector unsorted, so out of order detection will be penalized\n            potential_anomalies.push((i as i32 + result.last_anomaly.as_ref().unwrap().relative_index) as usize);\n        }\n    }\n\n    println!(\"{} anomalies injected in {} points\", data_with_key.changes.len(), data_size);\n    let mut common =0;\n    let mut i:usize =0;\n    let mut j:usize =0;\n    while i<potential_anomalies.len() && j<data_with_key.change_indices.len() {\n        if potential_anomalies[i] == data_with_key.change_indices[j] {\n            i += 1;\n            j += 1;\n            common += 1;\n        } else if potential_anomalies[i] < data_with_key.change_indices[j] {\n            i += 1;\n        } else {\n            j += 1;\n        }\n    }\n\n    println!(\"{} detected, precision {}, recall {}\",potential_anomalies.len(),\n             common as f32/potential_anomalies.len() as f32,\n             common as f32/data_with_key.change_indices.len() as f32);\n}\n"
  },
  {
    "path": "Rust/tests/clustertest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse num::abs;\n/// try cargo test --release\n/// these tests are designed to be longish\nuse rand::{prelude::ThreadRng, Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse rcflib::{\n    common::{multidimdatawithkey::MultiDimDataWithKey},\n    l1distance, l2distance,\n};\nuse rcflib::common::cluster::{Center, multi_cluster_as_object_with_weight_array, multi_cluster_as_ref, multi_cluster_as_weighted_ref, multi_cluster_obj, persist, single_centroid_cluster_slice_with_weight_arrays, single_centroid_cluster_weighted_vec, single_centroid_cluster_weighted_vec_with_distance_over_slices, single_centroid_unweighted_cluster_slice};\n\n\nfn gen_data(data_size:usize, test_dimension:usize, seed:u64,yard_stick : f32) -> MultiDimDataWithKey {\n    let mut mean = Vec::new();\n    let mut scale = Vec::new();\n    for i in 0..test_dimension {\n        let mut vec1 = vec![0.0f32; test_dimension];\n        let mut vec2 = vec![0.0f32; test_dimension];\n        vec1[i] = 2.0 * yard_stick;\n        vec2[i] = -2.0 * yard_stick;\n        mean.push(vec1);\n        mean.push(vec2);\n        scale.push(vec![0.1f32; test_dimension]);\n        scale.push(vec![0.1f32; test_dimension]);\n    }\n    MultiDimDataWithKey::mixture(\n        data_size,\n        &mean,\n        &scale,\n        &vec![0.5 / test_dimension as f32; 2 * test_dimension],\n        seed,\n    ).unwrap()\n}\n\nfn test_center(result: &mut Vec<Center>,test_dimension:usize, yard_stick : f32) -> bool {\n    let mut answer = true;\n    for i in 0..test_dimension {\n        result.sort_by(|a, b| a.representative()[i].partial_cmp(&b.representative()[i]).unwrap());\n        answer = answer && abs(result[0].representative()[i] + 2.0 * yard_stick) < 0.2;\n        answer = answer\n            && abs(result[2 * test_dimension - 1].representative()[i] - 2.0 * yard_stick) < 0.2;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result[j].representative()[i]) < 0.2;\n        }\n    }\n    answer\n}\n\nfn bad_distance<T :?Sized>(_a : &T, _b:&T) -> f64{\n    -1.0\n}\n\n#[test]\nfn test_config() {\n    let test_dimension = 3;\n    let yard_stick = l1distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(1000,test_dimension,0u64,yard_stick);\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((data_with_key.data[i].clone(), 1.0f32));\n    }\n    let result = single_centroid_cluster_weighted_vec_with_distance_over_slices(&input, bad_distance, 2 * test_dimension + 3, false);\n\n    match &result {\n        Ok(_x) => assert!(false),\n        Err(_y) => assert!(true),\n    };\n\n    let result = single_centroid_cluster_weighted_vec_with_distance_over_slices(&input, l2distance, 0, false);\n\n    match &result {\n        Ok(_x) => assert!(false),\n        Err(_y) => assert!(true),\n    };\n\n    let result = single_centroid_cluster_weighted_vec_with_distance_over_slices(&input, l2distance, 200, false);\n\n    match &result {\n        Ok(_x) => assert!(false),\n        Err(_y) => assert!(true),\n    };\n\n    let result = single_centroid_cluster_weighted_vec_with_distance_over_slices(&input, l2distance, 20, false);\n\n    match &result {\n        Ok(_x) => assert!(true),\n        Err(_y) => assert!(false),\n    };\n}\n\nfn core(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((data_with_key.data[i].clone(), 1.0f32));\n    }\n    let mut result = single_centroid_cluster_weighted_vec_with_distance_over_slices(&input, distance, 2 * test_dimension + 3, false).unwrap();\n    let answer = (result.len() == 2 * test_dimension) && test_center(&mut result,test_dimension,yard_stick);\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_cluster() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (core(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\nfn core_as_slice_uniform(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n\n    let mut input:Vec<&[f32]> = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push(&data_with_key.data[i]);\n    }\n    let mut result = single_centroid_unweighted_cluster_slice(&input, distance, 2 * test_dimension + 3, false).unwrap();\n    let answer = (result.len() == 2 * test_dimension) && test_center(&mut result,test_dimension,yard_stick);\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_slice_uniform() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (core_as_slice_uniform(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\n\nfn core_as_slice_weighted(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input:Vec<&[f32]> = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push(&data_with_key.data[i]);\n    }\n    let weights = vec![1.0f32;data_with_key.data.len()];\n    let mut result = single_centroid_cluster_slice_with_weight_arrays(&input, &weights, distance, 2 * test_dimension + 3, false).unwrap();\n    let answer = (result.len() == 2 * test_dimension) && test_center(&mut result,test_dimension,yard_stick);\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_slice_weighted() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (core_as_slice_weighted(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\nfn vec_dist(a: &Vec<f32>, b: &Vec<f32>) -> f64 {\n    l1distance(&a,&b)\n}\n\n\nfn core_vec(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&Vec<f32>, &Vec<f32>) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((data_with_key.data[i].clone(),1.0f32));\n    }\n\n    let mut result = single_centroid_cluster_weighted_vec(&input, distance, 2 * test_dimension + 3, false).unwrap();\n    let answer = (result.len() == 2 * test_dimension) && test_center(&mut result,test_dimension,yard_stick);\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_vec() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (core_vec(200000, d, seed, vec_dist) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\nfn multi_as_vec(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input:Vec<Vec<f32>> = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push(data_with_key.data[i].clone());\n    }\n    let mut result = multi_cluster_obj(&input,   vec_dist, 5,0.1,true,2 * test_dimension + 3, false).unwrap();\n    let mut answer = result.len() == 2 * test_dimension;\n    for i in 0..test_dimension {\n        result.sort_by(|a, b| a.representatives()[0].0[i].partial_cmp(&b.representatives()[0].0[i]).unwrap());\n        answer = answer && abs(result[0].representatives()[0].0[i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result[2 * test_dimension - 1].representatives()[0].0[i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result[j].representatives()[0].0[i]) < 0.5;\n        }\n    }\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_multi_vec() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (multi_as_vec(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\nfn multi_as_ref(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push(&data_with_key.data[i]);\n    }\n    let mut result = multi_cluster_as_ref(&input,   vec_dist, 5,0.1,true,2 * test_dimension + 3, false).unwrap();\n    let mut answer = result.len() == 2 * test_dimension;\n    for i in 0..test_dimension {\n        result.sort_by(|a, b| a.representatives()[0].0[i].partial_cmp(&b.representatives()[0].0[i]).unwrap());\n        answer = answer && abs(result[0].representatives()[0].0[i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result[2 * test_dimension - 1].representatives()[0].0[i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result[j].representatives()[0].0[i]) < 0.5;\n        }\n    }\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_multi_ref() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (multi_as_ref(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\nfn multi_as_weighted_ref(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((&data_with_key.data[i],1.0f32));\n    }\n    let mut result = multi_cluster_as_weighted_ref(&input,   vec_dist, 5,0.1,true,2 * test_dimension + 3, false).unwrap();\n    let mut answer = result.len() == 2 * test_dimension;\n    for i in 0..test_dimension {\n        result.sort_by(|a, b| a.representatives()[0].0[i].partial_cmp(&b.representatives()[0].0[i]).unwrap());\n        answer = answer && abs(result[0].representatives()[0].0[i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result[2 * test_dimension - 1].representatives()[0].0[i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result[j].representatives()[0].0[i]) < 0.5;\n        }\n    }\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_multi_weighted_ref() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (multi_as_weighted_ref(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\n\nfn multi_as_vec_weighted(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    println!(\" starting {}\",test_dimension);\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    let data_with_key = gen_data(data_size,test_dimension,seed,yard_stick);\n    let mut input:Vec<Vec<f32>> = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push(data_with_key.data[i].clone());\n    }\n    let weights = vec![1.0f32;data_with_key.data.len()];\n    let ref_result = multi_cluster_as_object_with_weight_array(&input,  &weights, vec_dist, 5,0.1,true,2 * test_dimension + 3, false).unwrap();\n    let mut result = persist(&ref_result);\n    let mut answer = result.len() == 2 * test_dimension;\n    for i in 0..test_dimension {\n        result.sort_by(|a, b| a.representatives()[0].0[i].partial_cmp(&b.representatives()[0].0[i]).unwrap());\n        answer = answer && abs(result[0].representatives()[0].0[i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result[2 * test_dimension - 1].representatives()[0].0[i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result[j].representatives()[0].0[i]) < 0.5;\n        }\n    }\n    println!(\" done {} {}\",test_dimension,answer);\n    answer\n}\n\n#[test]\nfn benchmark_multi_vec_weighted() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (multi_as_vec_weighted(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n"
  },
  {
    "path": "Rust/tests/dynamicdensitytest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse std::f32::consts::PI;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{ RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn dynamic_density() {\n    let base_dimension = 2;\n    let shingle_size = 1;\n    let number_of_trees = 50;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = false;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = false;\n    let internal_rotation = false;\n\n    let mut forest = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n\n    let data: Vec<Vec<f32>> = generate_fan(1000, 3);\n    let query_point = vec![0.7, 0.0f32];\n\n    for degree in 0..360 {\n        for j in 0..data.len() {\n            forest.update(\n                &rotate_clockwise(&data[j], 2.0 * PI * degree as f32 / 360.0),\n                0,\n            ).unwrap();\n        }\n\n        let density = forest.directional_density(&query_point).unwrap();\n        let value = density.total();\n\n        if (degree <= 60)\n            || ((degree >= 120) && (degree <= 180))\n            || ((degree >= 240) && (degree <= 300))\n        {\n            assert!(density.total() < 0.8 * capacity as f64); // the fan is above at 90,210,330\n        }\n\n        if ((degree >= 75) && (degree <= 105))\n            || ((degree >= 195) && (degree <= 225))\n            || ((degree >= 315) && (degree <= 345))\n        {\n            assert!(density.total() > 0.5 * (capacity as f64));\n        }\n\n        // Testing for directionality\n        // There can be unclear directionality when the\n        // blades are right above\n\n        let blade_above_in_y = density.low[1];\n        let blade_below_in_y = density.high[1];\n        let blade_to_the_left = density.high[0];\n        let blade_to_the_right = density.low[0];\n\n        // the tests below have a freedom of 10% of the total value\n        if ((degree >= 75) && (degree <= 85))\n            || ((degree >= 195) && (degree <= 205))\n            || ((degree >= 315) && (degree <= 325))\n        {\n            assert!(blade_above_in_y + 0.1 * value > blade_below_in_y);\n            assert!(blade_above_in_y + 0.1 * value > blade_to_the_right);\n        }\n\n        if ((degree >= 95) && (degree <= 105))\n            || ((degree >= 215) && (degree <= 225))\n            || ((degree >= 335) && (degree <= 345))\n        {\n            assert!(blade_below_in_y + 0.1 * value > blade_above_in_y);\n            assert!(blade_below_in_y + 0.1 * value > blade_to_the_right);\n        }\n\n        if ((degree >= 60) && (degree <= 75))\n            || ((degree >= 180) && (degree <= 195))\n            || ((degree >= 300) && (degree <= 315))\n        {\n            assert!(blade_above_in_y + 0.1 * value > blade_to_the_left);\n            assert!(blade_above_in_y + 0.1 * value > blade_to_the_right);\n        }\n\n        if ((degree >= 105) && (degree <= 120))\n            || ((degree >= 225) && (degree <= 240))\n            || (degree >= 345)\n        {\n            assert!(blade_below_in_y + 0.1 * value > blade_to_the_left);\n            assert!(blade_below_in_y + 0.1 * value > blade_to_the_right);\n        }\n\n        // fans are farthest to the left at 30,150 and 270\n        if ((degree >= 15) && (degree <= 45))\n            || ((degree >= 135) && (degree <= 165))\n            || ((degree >= 255) && (degree <= 285))\n        {\n            assert!(\n                blade_to_the_left + 0.1 * value\n                    > blade_above_in_y + blade_below_in_y + blade_to_the_right\n            );\n            assert!(blade_above_in_y + blade_below_in_y + 0.1 * value > blade_to_the_right);\n        }\n    }\n}\n\nfn generate_fan(num_per_blade: usize, blades: usize) -> Vec<Vec<f32>> {\n    let mut data = Vec::new();\n\n    let data_with_key = MultiDimDataWithKey::mixture(\n        num_per_blade * blades,\n        &vec![vec![0f32, 0f32]],\n        &vec![vec![0.05, 0.2]],\n        &vec![1.0f32],\n        0,\n    ).unwrap();\n    let mut rng = ChaCha20Rng::seed_from_u64(72345);\n    for point in data_with_key.data {\n        let toss: f64 = rng.gen();\n        let mut i = 0;\n        while i < blades + 1 {\n            if toss < i as f64 / blades as f64 {\n                let theta = 2.0 * PI * i as f32 / blades as f32;\n                let mut vec = rotate_clockwise(&point, theta);\n                vec[0] += 0.6 * theta.sin();\n                vec[1] += 0.6 * theta.cos();\n                data.push(vec);\n                break;\n            } else {\n                i += 1;\n            }\n        }\n    }\n    data\n}\n\nfn rotate_clockwise(point: &[f32], theta: f32) -> Vec<f32> {\n    let mut result = vec![0.0f32; 2];\n    result[0] = theta.cos() * point[0] + theta.sin() * point[1];\n    result[1] = -theta.sin() * point[0] + theta.cos() * point[1];\n    return result;\n}\n"
  },
  {
    "path": "Rust/tests/gladtest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse std::f32::consts::PI;\n/// try cargo test --release\n/// these tests are designed to be longish\nuse rand::{prelude::ThreadRng, Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse rcflib::{\n    common::{multidimdatawithkey::MultiDimDataWithKey},\n    l2distance,\n};\nuse rcflib::common::cluster::MultiCenter;\nuse rcflib::common::multidimdatawithkey::new_vec;\nuse rcflib::glad::GlobalLocalAnomalyDetector;\n\n\nfn rotate_clockwise(point: &[f32], theta: f32) -> Vec<f32> {\n    let mut result = vec![0.0f32; 2];\n    result[0] = theta.cos() * point[0] + theta.sin() * point[1];\n    result[1] = -theta.sin() * point[0] + theta.cos() * point[1];\n    return result;\n}\n\nfn gen_numeric_data(data_size:usize, seed:u64, shift : (f32,f32), number_of_fans: usize) -> MultiDimDataWithKey {\n    let vec_mean = vec![shift.0,shift.1];\n    let scale = vec![1.0,0.5/number_of_fans as f32];\n    let mut data :Vec<Vec<f32>> = Vec::new();\n    let mut labels:Vec<usize> = Vec::new();\n    let mut rng = ChaCha20Rng::seed_from_u64(seed);\n    for i in 0..data_size {\n        let vec = new_vec(&vec_mean, &scale, &mut rng);\n        if rng.gen::<f64>() < 0.005 {\n            let j :usize = (rng.next_u32() as usize)%number_of_fans;\n            data.push(rotate_clockwise(&vec,(2.0*PI * i as f32)/data_size as f32 + PI*(1.0 + 2.0*j as f32)/number_of_fans as f32));\n            labels.push( number_of_fans + 2*j) ;\n        } else {\n            let j :usize = (rng.next_u32() as usize)%number_of_fans;\n            data.push(rotate_clockwise(&vec,(2.0*PI * i as f32)/data_size as f32 + PI*(2.0*j as f32)/number_of_fans as f32));\n            labels.push(j) ;\n        }\n    }\n    MultiDimDataWithKey{\n        data,\n        change_indices: Vec::new(),\n        labels,\n        changes: Vec::new()\n    }\n}\n\nfn vec_dist(a: &Vec<f32>, b : &Vec<f32>) -> f64 {\n    l2distance(a,b)\n}\n\nfn bad_distance<T :?Sized>(_a : &T, _b:&T) -> f64{\n    -0.0001\n}\n\n#[test]\nfn numeric_glad() {\n    let data_size = 1000000; // should be sufficiently large for covering a 360 degree rotation, for |capacity| points\n    let number_of_fans = 3;\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let capacity = 2000;\n    let time_decay = 1.0 /capacity as f64;\n    let data_with_key = gen_numeric_data(data_size,one_seed,(5.0 + 1.0/number_of_fans as f32,0.0),number_of_fans);\n    let mut glad = GlobalLocalAnomalyDetector::<Vec<f32>>::new(2000,0,time_decay,5, 0.1,true).unwrap();\n\n    glad.set_z_factor(6.0 + number_of_fans as f32/4.0);\n\n    let mut false_neg = 0;\n    let mut false_pos = 0;\n    let mut true_pos = 0;\n    let print_clusters = false;\n    // set the above to see the cluster centers printed with associated relative mass\n    // each block is separated by two println!()\n    // a simple visualization tool can plot an animation of the clusters\n    // for example in gnuplot, try something like\n    // set terminal gif transparent animate delay 5\n    // do for [i = 0:360] { plot [-10:10][-10:10] \"typescript\" i i u 1:2:3 w p palette pt 7 t \"\" }\n    let mut first = true;\n    for j in 0..data_with_key.data.len() {\n        let answer = glad.process(&data_with_key.data[j],1.0,vec_dist,vec_dist, false).unwrap();\n        if answer.grade != 0.0 {\n            if data_with_key.labels[j] < number_of_fans {\n                false_pos += 1;\n            } else {\n                true_pos += 1;\n            }\n        } else {\n            if data_with_key.labels[j] >= number_of_fans {\n                false_neg += 1;\n            }\n        }\n\n        if (j*360/data_size)%2 != 0 {\n            if print_clusters && !first {\n                println!();\n                println!();\n            }\n            first = true;\n        } else {\n            if print_clusters && first {\n                let a = glad.clusters();\n                for i in 0..a.len() {\n                    let item =&a[i];\n                    for rep in item.representatives() {\n                        println!(\"{} {} {} {}\", rep.0[0], rep.0[1], i , rep.1);\n                    }\n                }\n                first = false;\n            }\n        }\n\n    }\n    println!(\" precision {} recall {} out of {} injected anomalies\", (true_pos as f32)/(true_pos + false_pos) as f32, true_pos as f32/(true_pos + false_neg) as f32, (true_pos + false_neg));\n\n    // negative weight is error\n    assert!(glad.process(&data_with_key.data[0],-1.0,vec_dist,vec_dist,false).is_err());\n    // negative distance is error\n    assert!(glad.process(&data_with_key.data[0],1.0,vec_dist,bad_distance,false).is_err());\n}\n\n\npub fn toy_d(a:&Vec<char>, b: &Vec<char>) -> f64 {\n    if a.len() > b.len() {\n        return toy_d(b, a);\n    }\n    let mut one = vec![0.0;b.len()+1];\n    let mut two = vec![0.0;b.len()+1];\n\n    for j in 0..b.len()+1 {\n        one[j] = j as f64;\n    }\n    for i in 1..a.len()+1 {\n\n        two[0] = i as f64;\n        for ((x, y), z) in two[1..].iter_mut().zip(&one[..b.len()]).zip(b) {\n            *x = if a[i-1] == *z {*y} else {*y + 1.0};\n        }\n\n        for (x, y) in two.iter_mut().zip(&one) {\n            *x = if *x < *y + 1.0 {*x} else {*y+1.0};\n        }\n\n        for j in 1..b.len()+1 {\n            if two[j] > two[j - 1] + 1.0 {\n                two[j] = two[j - 1] + 1.0;\n            }\n        }\n\n        // change one\n        for(x,y) in one.iter_mut().zip(&two){\n            *x = *y;\n        }\n    }\n    one[b.len()]\n}\n\npub fn get_ab_array(size: usize, probability_of_a: f64, rng: &mut ChaCha20Rng, change_in_middle : bool, fraction : f64) -> Vec<char> {\n    let mut answer = Vec::new();\n    let new_size = size + (rng.next_u32() as usize)%(size / 5);\n    for i in 0..new_size {\n        let toss = if change_in_middle && (i as f64 > (1.0 - fraction) * new_size as f64 || (i as f64) < (new_size as f64 ) * fraction) {\n            1.0 - probability_of_a\n        } else {\n            probability_of_a\n        };\n        if rng.gen::<f64>() < toss {\n            answer.push('\\u{2014}');\n        } else {\n            answer.push('\\u{005F}');\n        }\n    }\n    answer\n}\n\nconst ANSI_RESET :&str = \"\\u{001B}[0m\";\nconst ANSI_RED : &str = \"\\u{001B}[31m\";\nconst ANSI_BLUE : &str = \"\\u{001B}[34m\";\n\npub fn print_array(a:&[char]) {\n    for i in 0..a.len() {\n        if a[i] == '\\u{2014}' {\n            print!(\"{}{}{}\", ANSI_RED,a[i],ANSI_RESET);\n        } else {\n            print!(\"{}{}{}\",ANSI_BLUE,a[i], ANSI_RESET);\n        }\n    }\n}\n\nfn print_clusters(clusters: &Vec<MultiCenter<Vec<char>>>) {\n    for i in 0..clusters.len()  {\n        println!(\" Cluster {},  weight {:.3}, average radius {:.3} \",i,clusters[i].weight(),clusters[i].average_radius());\n        for item in &clusters[i].representatives() {\n            print!(\"(wt {:.2}, len {})\", item.1,item.0.len());\n            print_array(&item.0);\n            println!();\n        }\n        println!();\n        println!();\n    }\n}\n\n#[test]\nfn string_glad() {\n    let data_size = 200000; // should be sufficiently large for covering a 360 degree rotation, for |capacity| points\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n    let string_size = 70;\n    let capacity= 2000;\n    let change_in_middle = true;\n    // the following should be away from 0.5 in [0.5,1]\n    let gap_prob_of_a = 0.85;\n    let time_decay = 1.0 /capacity as f64;\n    let anomaly_rate = 0.05;\n    let mut injected: bool;\n    let mut number_of_injected = 0;\n\n    let mut false_neg = 0;\n    let mut false_pos = 0;\n    let mut true_pos = 0;\n\n    let print_clusters_strings = true;\n\n    let mut glad = GlobalLocalAnomalyDetector::<Vec<char>>::new(2000,0,time_decay,5, 0.1,false).unwrap();\n\n    // we will not store the points but perform streaming\n\n    for i in 0..data_size {\n        if i>0 && i%10000 == 0 {\n            println!(\" at {} \",i);\n            if print_clusters_strings {\n                print_clusters(&glad.clusters());\n            }\n        }\n        let mut point = Vec::new();\n        if rng.gen::<f64>() < anomaly_rate {\n            injected = true;\n            number_of_injected += 1;\n            point = get_ab_array(string_size + 10, 0.5, &mut rng, false, 0.0);\n        } else {\n            let flag = change_in_middle && rng.gen::<f64>() < 0.25;\n            let prob = if rng.gen::<f64>() < 0.5 {\n                gap_prob_of_a\n            } else {\n                1.0 - gap_prob_of_a\n            };\n            injected = false;\n            point = get_ab_array(string_size, prob, &mut rng, flag, 0.25 * i as f64/ data_size as f64);\n        }\n        let answer = glad.process(&point,1.0,toy_d,toy_d, false).unwrap();\n        if answer.grade != 0.0 {\n            if !injected {\n                false_pos += 1;\n            } else {\n                true_pos += 1;\n            }\n        } else {\n            if injected && i > capacity/2 {\n                false_neg += 1;\n            }\n        }\n    }\n    println!(\"injected {}\", number_of_injected);\n    println!(\" precision {} recall {} out of {} injected anomalies\", (true_pos as f32)/(true_pos + false_pos) as f32, true_pos as f32/(true_pos + false_neg) as f32, (true_pos + false_neg));\n}"
  },
  {
    "path": "Rust/tests/imputedifferentperiod.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn impute_different_period() {\n    let shingle_size = 30;\n    let base_dimension = 3;\n    let data_size = 100000;\n    let number_of_trees = 100;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = true;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest= RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n        .tree_capacity(capacity)\n        .number_of_trees(number_of_trees)\n        .random_seed(random_seed)\n        .store_attributes(store_attributes)\n        .parallel_enabled(parallel_enabled)\n        .internal_shingling(internal_shingling)\n        .internal_rotation(internal_rotation)\n        .time_decay(time_decay)\n        .initial_accept_fraction(initial_accept_fraction)\n        .bounding_box_cache_fraction(bounding_box_cache_fraction).build().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 100.0);\n    }\n    let mut period_rng = ChaCha20Rng::seed_from_u64(7);\n    let mut period = Vec::new();\n    for _i in 0..base_dimension {\n        period.push(((1.0 + 0.2 * period_rng.gen::<f32>()) * 60.0) as usize);\n    }\n    let data_with_key = MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &period,\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let _next_index = 0;\n    let mut error = 0.0;\n    let mut count = 0;\n\n    for i in 0..data_with_key.data.len() {\n        if i > 200 {\n            let next_values = forest.extrapolate(1).unwrap().values;\n            assert_eq!(next_values.len(), base_dimension);\n            error += next_values\n                .iter()\n                .zip(&data_with_key.data[i])\n                .map(|(x, y)| ((x - y) as f64 * (x - y) as f64))\n                .sum::<f64>();\n            count += base_dimension;\n        }\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n    println!(\n        \" RMSE {},  noise {} \",\n        f64::sqrt(error / count as f64),\n        noise\n    );\n}\n"
  },
  {
    "path": "Rust/tests/imputesameperiod.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse rand::{Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rcflib::{\n    common::multidimdatawithkey::MultiDimDataWithKey,\n    rcf::{ RCF},\n};\nuse rcflib::rcf::{RCFBuilder, RCFOptionsBuilder};\n\n/// try cargo test --release\n/// these tests are designed to be longish\n\n#[test]\nfn impute_same_period() {\n    let shingle_size = 8;\n    let base_dimension = 5;\n    let data_size = 100000;\n    let number_of_trees = 30;\n    let capacity = 256;\n    let initial_accept_fraction = 0.1;\n    let _point_store_capacity = capacity * number_of_trees + 1;\n    let time_decay = 0.1 / capacity as f64;\n    let bounding_box_cache_fraction = 1.0;\n    let random_seed = 17;\n    let parallel_enabled: bool = true;\n    let store_attributes: bool = false;\n    let internal_shingling: bool = true;\n    let internal_rotation = false;\n    let noise = 5.0;\n\n    let mut forest: Box<dyn RCF> = RCFBuilder::<u64,u64>::new(base_dimension,shingle_size)\n            .tree_capacity(capacity)\n            .number_of_trees(number_of_trees)\n            .random_seed(random_seed)\n            .store_attributes(store_attributes)\n            .parallel_enabled(parallel_enabled)\n            .internal_shingling(internal_shingling)\n            .internal_rotation(internal_rotation)\n            .time_decay(time_decay)\n            .initial_accept_fraction(initial_accept_fraction)\n            .bounding_box_cache_fraction(bounding_box_cache_fraction).build_default().unwrap();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut amplitude = Vec::new();\n    for _i in 0..base_dimension {\n        amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 100.0);\n    }\n    let data_with_key = MultiDimDataWithKey::multi_cosine(\n        data_size,\n        &vec![60; base_dimension],\n        &amplitude,\n        noise,\n        0,\n        base_dimension.into(),\n    ).unwrap();\n\n    let _next_index = 0;\n    let mut error = 0.0;\n    let mut count = 0;\n\n    for i in 0..data_with_key.data.len() {\n        if i > 200 {\n            let next_values = forest.extrapolate(1).unwrap().values;\n            assert_eq!(next_values.len(), base_dimension);\n            error += next_values\n                .iter()\n                .zip(&data_with_key.data[i])\n                .map(|(x, y)| ((x - y) as f64 * (x - y) as f64))\n                .sum::<f64>();\n            count += base_dimension;\n        }\n        forest.update(&data_with_key.data[i], 0).unwrap();\n    }\n\n    println!(\"Success! {}\", forest.entries_seen());\n    println!(\"PointStore Size {} \", forest.point_store_size());\n    println!(\"Total size {} bytes (approx)\", forest.size());\n    println!(\n        \" RMSE {},  noise {} \",\n        f64::sqrt(error / count as f64),\n        noise\n    );\n}\n"
  },
  {
    "path": "Rust/tests/multitrcftest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse std::collections::{HashMap, HashSet};\nuse rand::Rng;\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::SeedableRng;\nuse rcflib::common::deviation::Deviation;\nuse rcflib::common::multidimdatawithkey;\nuse rcflib::rcf::{ RCFOptionsBuilder};\nuse rcflib::trcf::basictrcf::TRCFOptionsBuilder;\nuse rcflib::trcf::multitrcf::{MultiTRCF, MultiTRCFBuilder};\nuse rcflib::trcf::types::ScoringStrategy::EXPECTED_INVERSE_HEIGHT;\nuse rcflib::trcf::types::TransformMethod;\nuse rcflib::trcf::types::TransformMethod::{NONE,NORMALIZE};\n\n#[cfg(test)]\nparameterized_test::create! { multi_trcf_basic, (method,parallel_enabled), {\n    multi_trcf(method,parallel_enabled);\n}}\n\nmulti_trcf_basic! {\n    d1: (NONE,false),\n    d2: (NONE,true),\n}\n\n#[test]\npub fn multi_trcf_single_threaded(){\n    multi_trcf(NORMALIZE,false);\n}\n\n#[test]\npub fn multi_trcf_multi_threaded(){\n    multi_trcf(NORMALIZE,true);\n}\n\npub fn multi_trcf(transform_method:TransformMethod,parallel_enabled:bool) {\n    let shingle_size = 10;\n    let input_dimensions = 1;\n    let data_size = 1000;\n    let noise = 5.0;\n    let number_of_series = 1000;\n    // as number_of_series is increased the prec-recall for a fixed number of arms\n    let scoring_strategy = EXPECTED_INVERSE_HEIGHT;\n\n    let mut total_injected =0;\n    let mut total_found = 0;\n    let mut total_overlap = 0;\n    let mut late = 0;\n    let number_of_models = 3; // more than 10 may not be a great idea\n    let mut multi_trcf : MultiTRCF = MultiTRCFBuilder::new(input_dimensions, shingle_size, number_of_models, 2*number_of_series)\n        .scoring_strategy(scoring_strategy).parallel_enabled(parallel_enabled).build().unwrap();\n\n    let mut period_map = HashMap::new();\n\n    let mut rng = ChaCha20Rng::seed_from_u64(42);\n    let mut data_with_key = Vec::new();\n    for y in 0..number_of_series {\n        let mut amplitude = Vec::new();\n        let mut period = Vec::new();\n        for _i in 0..input_dimensions {\n            amplitude.push((1.0 + 0.2 * rng.gen::<f32>()) * 60.0);\n            // ranges from [30,90)\n            period.push(30 + ((rng.gen::<f32>())* 60.0) as usize);\n        }\n\n        period_map.insert(y as u64,period.clone());\n\n        data_with_key.push(multidimdatawithkey::MultiDimDataWithKey::multi_cosine(\n            data_size,\n            &period,\n            &amplitude,\n            noise,\n            rng.gen::<u64>(),\n            input_dimensions.into(),\n        ).unwrap());\n    }\n    let mut next_index = vec![0;number_of_series];\n    let mut late_discovered = vec![HashSet::new();shingle_size];\n\n    for i in 0..data_size {\n        let mut injected  = HashSet::new();\n        let mut map : HashMap<u64,(&[f32],u64)> = HashMap::new();\n        for j in 0..number_of_series {\n            if next_index[j] < data_with_key[j].change_indices.len() && data_with_key[j].change_indices[next_index[j]] == i {\n                next_index[j] += 1;\n                injected.insert(j);\n            }\n            map.insert(j as u64, (&data_with_key[j].data[i],i as u64));\n        }\n        total_injected += injected.len();\n\n        let result = multi_trcf.process(map).unwrap();\n        let y : usize = result.iter().map(|x| if x.relative_index == 0 && injected.contains(&(x.id as usize)) {1} else {0}).sum();\n        let z : usize = result.iter().map(|x| if x.relative_index != 0 {\n            let q = (i as i32 + x.relative_index) as usize;\n            if late_discovered[ q % shingle_size].contains(&(x.id as usize)) {1} else {0}\n        } else {0}).sum();\n        total_overlap += y;\n        total_found += result.len();\n        late += z;\n        late_discovered[ i % shingle_size] = injected;\n    }\n    println!(\"number of time series: {} size {} each, {} arms,  parallel enabled: {}\",number_of_series,data_size,number_of_models,parallel_enabled);\n    println!(\"shingle size: {}, normalization: {}, anomalies in total {}\",shingle_size,transform_method,total_injected);\n    println!(\"spot precision {} recall {}\",((total_overlap as f64)/(total_found as f64)),\n        ((total_overlap as f64)/(total_injected as f64)));\n    println!(\"with late detection, precision {} recall {}\",(((total_overlap + late) as f64)/(total_found as f64)),\n             (((total_overlap+late) as f64)/(total_injected as f64)));\n    println!(\"nontrivial bandit switches {}, affirmations {}\",multi_trcf.switches(),multi_trcf.affirmations());\n    print!(\"model updates across different arms:\");\n    for y in multi_trcf.updates() {\n        print!(\"({}, {}) \",y.0,y.1)\n    }\n    println!();\n    // lets check the period_map; just for the first dimension\n    let mut result = multi_trcf.states().iter().map(|x| (x.bandit.current_model(),x.id,period_map.get(&x.id).unwrap()[0]))\n        .collect::<Vec<(usize,u64,usize)>>();\n    result.sort();\n    let mut a = result[0].0;\n    let mut stat = vec![Deviation::new(0.0).unwrap();number_of_models];\n    for y in result {\n        if a == y.0 {\n            stat[y.0].update(y.2 as f64);\n        } else {\n            a = y.0;\n        }\n    }\n    for x in 0..number_of_models {\n        println!(\"Model {},  chosen by {}, average period {}, deviation {} \",x,stat[x].count,stat[x].mean(),stat[x].deviation());\n    }\n}\n"
  },
  {
    "path": "Rust/tests/samplesummarytest.rs",
    "content": "extern crate rand;\nextern crate rand_chacha;\nextern crate rcflib;\n\nuse num::abs;\n/// try cargo test --release\n/// these tests are designed to be longish\nuse rand::{prelude::ThreadRng, Rng, SeedableRng};\nuse rand_chacha::ChaCha20Rng;\nuse rand_core::RngCore;\nuse rcflib::{\n    common::{multidimdatawithkey::MultiDimDataWithKey, samplesummary::summarize},\n    l1distance, l2distance, linfinitydistance,\n};\nuse rcflib::common::samplesummary::multi_summarize_ref;\n\n#[cfg(test)]\nparameterized_test::create! { sample_summary_distance_test, (test_dimension,distance), {\nassert!(core(1000000,test_dimension,0,distance));\n}}\n\nfn core(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    let mut mean = Vec::new();\n    let mut scale = Vec::new();\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    for i in 0..test_dimension {\n        let mut vec1 = vec![0.0f32; test_dimension];\n        let mut vec2 = vec![0.0f32; test_dimension];\n        vec1[i] = 2.0 * yard_stick;\n        vec2[i] = -2.0 * yard_stick;\n        mean.push(vec1);\n        mean.push(vec2);\n        scale.push(vec![0.1f32; test_dimension]);\n        scale.push(vec![0.1f32; test_dimension]);\n    }\n\n    let data_with_key = MultiDimDataWithKey::mixture(\n        data_size,\n        &mean,\n        &scale,\n        &vec![0.5 / test_dimension as f32; 2 * test_dimension],\n        seed,\n    ).unwrap();\n\n    let mut input = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((data_with_key.data[i].clone(), 1.0f32));\n    }\n    let mut result = summarize(&input, distance, 2 * test_dimension + 3, false).unwrap();\n    let mut answer = result.summary_points.len() == 2 * test_dimension;\n    // should be two centers per dimension\n    // the top two should correspond to +/- 5.0 in first dimension\n    for i in 0..test_dimension {\n        result\n            .summary_points\n            .sort_by(|a, b| a[i].partial_cmp(&b[i]).unwrap());\n        answer = answer && abs(result.summary_points[0][i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result.summary_points[2 * test_dimension - 1][i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result.summary_points[j][i]) < 0.5;\n        }\n    }\n    answer\n}\n\nsample_summary_distance_test! {\n    a1 : (1,l1distance),\n    b1 : (1,l2distance),\n    c1 : (1,linfinitydistance),\n    a2 : (2,l1distance),\n    b2 : (3,l2distance),\n    a3 : (5,l1distance),\n    b3 : (5,l2distance),\n}\n\n#[test]\nfn benchmark() {\n    let mut generator = ThreadRng::default();\n    let one_seed: u64 = generator.gen();\n    println!(\" single seed is {}\", one_seed);\n    let mut rng = ChaCha20Rng::seed_from_u64(one_seed);\n\n    let mut error = 0;\n    for _ in 0..10 {\n        let seed = rng.next_u64();\n        let d = rng.gen_range(3..23);\n        error += (core(200000, d, seed, l1distance) == false) as i32;\n    }\n    assert!(error < 5);\n}\n\n#[cfg(test)]\nparameterized_test::create! { sample_summary_distance_test_ref, (test_dimension,distance), {\nassert!(core_ref(1000000,test_dimension,0,distance));\n}}\n\nsample_summary_distance_test_ref! {\n    a1 : (1,l1distance),\n    b1 : (1,l2distance),\n    c1 : (1,linfinitydistance),\n    a2 : (2,l1distance),\n    b2 : (3,l2distance),\n    c2 : (4,linfinitydistance),\n    a3 : (15,l1distance),\n    b3 : (15,l2distance),\n    c3 : (15,linfinitydistance),\n\n}\n\nfn core_ref(\n    data_size: usize,\n    test_dimension: usize,\n    seed: u64,\n    distance: fn(&[f32], &[f32]) -> f64,\n) -> bool {\n    let mut mean = Vec::new();\n    let mut scale = Vec::new();\n    let yard_stick = distance(&vec![0.0; test_dimension], &vec![1.0; test_dimension]) as f32;\n    for i in 0..test_dimension {\n        let mut vec1 = vec![0.0f32; test_dimension];\n        let mut vec2 = vec![0.0f32; test_dimension];\n        vec1[i] = 2.0 * yard_stick;\n        vec2[i] = -2.0 * yard_stick;\n        mean.push(vec1);\n        mean.push(vec2);\n        scale.push(vec![0.1f32; test_dimension]);\n        scale.push(vec![0.1f32; test_dimension]);\n    }\n\n    let data_with_key = MultiDimDataWithKey::mixture(\n        data_size,\n        &mean,\n        &scale,\n        &vec![0.5 / test_dimension as f32; 2 * test_dimension],\n        seed,\n    ).unwrap();\n\n    let mut input:Vec<(&[f32],f32)> = Vec::new();\n    for i in 0..data_with_key.data.len() {\n        input.push((&data_with_key.data[i], 1.0f32));\n    }\n    let mut result = multi_summarize_ref(&input, distance, 5,0.1,2 * test_dimension + 3, false).unwrap();\n    let mut answer = result.summary_points.len() == 2 * test_dimension;\n    // should be two centers per dimension\n    // the top two should correspond to +/- 5.0 in first dimension\n    for i in 0..test_dimension {\n        result\n            .summary_points\n            .sort_by(|a, b| a[i].partial_cmp(&b[i]).unwrap());\n        answer = answer && abs(result.summary_points[0][i] + 2.0 * yard_stick) < 0.5;\n        answer = answer\n            && abs(result.summary_points[2 * test_dimension - 1][i] - 2.0 * yard_stick) < 0.5;\n        for j in 1..(2 * test_dimension - 1) {\n            answer = answer && abs(result.summary_points[j][i]) < 0.5;\n        }\n    }\n    answer\n}"
  },
  {
    "path": "THIRD-PARTY",
    "content": "** Gson 2.8; version 2.8.6 -- https://github.com/google/gson\nCopyright 2008 Google Inc.\n\nApache License\n\nVersion 2.0, January 2004\n\nhttp://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND\nDISTRIBUTION\n\n   1. Definitions.\n\n      \"License\" shall mean the terms and conditions for use, reproduction, and\n      distribution as defined by Sections 1 through 9 of this document.\n\n      \"Licensor\" shall mean the copyright owner or entity authorized by the\n      copyright owner that is granting the License.\n\n      \"Legal Entity\" shall mean the union of the acting entity and all other\n      entities that control, are controlled by, or are under common control\n      with that entity. For the purposes of this definition, \"control\" means\n      (i) the power, direct or indirect, to cause the direction or management\n      of such entity, whether by contract or otherwise, or (ii) ownership of\n      fifty percent (50%) or more of the outstanding shares, or (iii)\n      beneficial ownership of such entity.\n\n      \"You\" (or \"Your\") shall mean an individual or Legal Entity exercising\n      permissions granted by this License.\n\n      \"Source\" form shall mean the preferred form for making modifications,\n      including but not limited to software source code, documentation source,\n      and configuration files.\n\n      \"Object\" form shall mean any form resulting from mechanical\n      transformation or translation of a Source form, including but not limited\n      to compiled object code, generated documentation, and conversions to\n      other media types.\n\n      \"Work\" shall mean the work of authorship, whether in Source or Object\n      form, made available under the License, as indicated by a copyright\n      notice that is included in or attached to the work (an example is\n      provided in the Appendix below).\n\n      \"Derivative Works\" shall mean any work, whether in Source or Object form,\n      that is based on (or derived from) the Work and for which the editorial\n      revisions, annotations, elaborations, or other modifications represent,\n      as a whole, an original work of authorship. For the purposes of this\n      License, Derivative Works shall not include works that remain separable\n      from, or merely link (or bind by name) to the interfaces of, the Work and\n      Derivative Works thereof.\n\n      \"Contribution\" shall mean any work of authorship, including the original\n      version of the Work and any modifications or additions to that Work or\n      Derivative Works thereof, that is intentionally submitted to Licensor for\n      inclusion in the Work by the copyright owner or by an individual or Legal\n      Entity authorized to submit on behalf of the copyright owner. For the\n      purposes of this definition, \"submitted\" means any form of electronic,\n      verbal, or written communication sent to the Licensor or its\n      representatives, including but not limited to communication on electronic\n      mailing lists, source code control systems, and issue tracking systems\n      that are managed by, or on behalf of, the Licensor for the purpose of\n      discussing and improving the Work, but excluding communication that is\n      conspicuously marked or otherwise designated in writing by the copyright\n      owner as \"Not a Contribution.\"\n\n      \"Contributor\" shall mean Licensor and any individual or Legal Entity on\n      behalf of whom a Contribution has been received by Licensor and\n      subsequently incorporated within the Work.\n\n   2. Grant of Copyright License. Subject to the terms and conditions of this\n   License, each Contributor hereby grants to You a perpetual, worldwide,\n   non-exclusive, no-charge, royalty-free, irrevocable copyright license to\n   reproduce, prepare Derivative Works of, publicly display, publicly perform,\n   sublicense, and distribute the Work and such Derivative Works in Source or\n   Object form.\n\n   3. Grant of Patent License. Subject to the terms and conditions of this\n   License, each Contributor hereby grants to You a perpetual, worldwide,\n   non-exclusive, no-charge, royalty-free, irrevocable (except as stated in\n   this section) patent license to make, have made, use, offer to sell, sell,\n   import, and otherwise transfer the Work, where such license applies only to\n   those patent claims licensable by such Contributor that are necessarily\n   infringed by their Contribution(s) alone or by combination of their\n   Contribution(s) with the Work to which such Contribution(s) was submitted.\n   If You institute patent litigation against any entity (including a\n   cross-claim or counterclaim in a lawsuit) alleging that the Work or a\n   Contribution incorporated within the Work constitutes direct or contributory\n   patent infringement, then any patent licenses granted to You under this\n   License for that Work shall terminate as of the date such litigation is\n   filed.\n\n   4. Redistribution. You may reproduce and distribute copies of the Work or\n   Derivative Works thereof in any medium, with or without modifications, and\n   in Source or Object form, provided that You meet the following conditions:\n\n      (a) You must give any other recipients of the Work or Derivative Works a\n      copy of this License; and\n\n      (b) You must cause any modified files to carry prominent notices stating\n      that You changed the files; and\n\n      (c) You must retain, in the Source form of any Derivative Works that You\n      distribute, all copyright, patent, trademark, and attribution notices\n      from the Source form of the Work, excluding those notices that do not\n      pertain to any part of the Derivative Works; and\n\n      (d) If the Work includes a \"NOTICE\" text file as part of its\n      distribution, then any Derivative Works that You distribute must include\n      a readable copy of the attribution notices contained within such NOTICE\n      file, excluding those notices that do not pertain to any part of the\n      Derivative Works, in at least one of the following places: within a\n      NOTICE text file distributed as part of the Derivative Works; within the\n      Source form or documentation, if provided along with the Derivative\n      Works; or, within a display generated by the Derivative Works, if and\n      wherever such third-party notices normally appear. The contents of the\n      NOTICE file are for informational purposes only and do not modify the\n      License. You may add Your own attribution notices within Derivative Works\n      that You distribute, alongside or as an addendum to the NOTICE text from\n      the Work, provided that such additional attribution notices cannot be\n      construed as modifying the License.\n\n      You may add Your own copyright statement to Your modifications and may\n      provide additional or different license terms and conditions for use,\n      reproduction, or distribution of Your modifications, or for any such\n      Derivative Works as a whole, provided Your use, reproduction, and\n      distribution of the Work otherwise complies with the conditions stated in\n      this License.\n\n   5. Submission of Contributions. Unless You explicitly state otherwise, any\n   Contribution intentionally submitted for inclusion in the Work by You to the\n   Licensor shall be under the terms and conditions of this License, without\n   any additional terms or conditions. Notwithstanding the above, nothing\n   herein shall supersede or modify the terms of any separate license agreement\n   you may have executed with Licensor regarding such Contributions.\n\n   6. Trademarks. This License does not grant permission to use the trade\n   names, trademarks, service marks, or product names of the Licensor, except\n   as required for reasonable and customary use in describing the origin of the\n   Work and reproducing the content of the NOTICE file.\n\n   7. Disclaimer of Warranty. Unless required by applicable law or agreed to in\n   writing, Licensor provides the Work (and each Contributor provides its\n   Contributions) on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n   KIND, either express or implied, including, without limitation, any\n   warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or\n   FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining\n   the appropriateness of using or redistributing the Work and assume any risks\n   associated with Your exercise of permissions under this License.\n\n   8. Limitation of Liability. In no event and under no legal theory, whether\n   in tort (including negligence), contract, or otherwise, unless required by\n   applicable law (such as deliberate and grossly negligent acts) or agreed to\n   in writing, shall any Contributor be liable to You for damages, including\n   any direct, indirect, special, incidental, or consequential damages of any\n   character arising as a result of this License or out of the use or inability\n   to use the Work (including but not limited to damages for loss of goodwill,\n   work stoppage, computer failure or malfunction, or any and all other\n   commercial damages or losses), even if such Contributor has been advised of\n   the possibility of such damages.\n\n   9. Accepting Warranty or Additional Liability. While redistributing the Work\n   or Derivative Works thereof, You may choose to offer, and charge a fee for,\n   acceptance of support, warranty, indemnity, or other liability obligations\n   and/or rights consistent with this License. However, in accepting such\n   obligations, You may act only on Your own behalf and on Your sole\n   responsibility, not on behalf of any other Contributor, and only if You\n   agree to indemnify, defend, and hold each Contributor harmless for any\n   liability incurred by, or claims asserted against, such Contributor by\n   reason of your accepting any such warranty or additional liability. END OF\n   TERMS AND CONDITIONS\n\nAPPENDIX: How to apply the Apache License to your work.\n\nTo apply the Apache License to your work, attach the following boilerplate\nnotice, with the fields enclosed by brackets \"[]\" replaced with your own\nidentifying information. (Don't include the brackets!) The text should be\nenclosed in the appropriate comment syntax for the file format. We also\nrecommend that a file or class name and description of purpose be included on\nthe same \"printed page\" as the copyright notice for easier identification\nwithin third-party archives.\n\nCopyright [yyyy] [name of copyright owner]\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\n\nyou may not use this file except in compliance with the License.\n\nYou may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\n\ndistributed under the License is distributed on an \"AS IS\" BASIS,\n\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\nSee the License for the specific language governing permissions and\n\nlimitations under the License.\n\n* For Gson 2.8 see also this required NOTICE:\n    Copyright 2008 Google Inc."
  },
  {
    "path": "example-data/rcf-paper.csv",
    "content": "-5.0048,0.0127,-0.0170\n4.9877,-0.0013,0.0085\n-5.0068,-0.0008,0.0160\n-4.9956,0.0017,0.0064\n5.0010,-0.0045,0.0116\n-5.0053,0.0017,0.0057\n5.0071,0.0020,-0.0192\n-4.9900,-0.0015,0.0010\n5.0119,0.0053,0.0101\n-5.0021,-0.0068,0.0075\n-5.0053,-0.0096,0.0017\n-4.9909,0.0008,-0.0104\n4.9965,-0.0091,0.0099\n-4.9918,0.0041,0.0048\n4.9957,-0.0191,0.0028\n5.0138,0.0136,0.0063\n5.0013,-0.0054,-0.0087\n-4.9732,-0.0020,-0.0100\n-5.0039,-0.0034,0.0152\n5.0147,-0.0055,0.0097\n4.9940,-0.0141,-0.0098\n4.9985,0.0050,0.0069\n-4.9991,0.0094,0.0061\n-4.9989,0.0152,0.0078\n5.0024,0.0101,-0.0094\n5.0009,-0.0173,-0.0164\n-4.9959,-0.0048,0.0000\n-4.9987,-0.0141,-0.0035\n-4.9974,0.0028,0.0144\n-5.0150,0.0253,0.0177\n4.9902,-0.0099,0.0017\n-5.0002,0.0038,0.0226\n4.9853,-0.0077,-0.0199\n5.0140,0.0118,-0.0114\n-5.0086,-0.0163,-0.0030\n4.9920,-0.0124,0.0011\n-5.0092,0.0025,0.0082\n-4.9911,-0.0006,-0.0150\n-5.0048,0.0230,-0.0106\n4.9841,0.0003,-0.0078\n-4.9890,-0.0013,0.0081\n5.0045,0.0161,-0.0113\n-4.9916,0.0014,-0.0127\n-4.9970,0.0073,0.0201\n5.0092,0.0018,-0.0131\n-5.0102,-0.0086,0.0126\n4.9922,0.0032,-0.0061\n5.0077,0.0142,-0.0034\n5.0027,0.0019,0.0074\n-5.0003,-0.0169,-0.0011\n4.9835,0.0024,-0.0040\n4.9951,0.0025,0.0088\n4.9800,-0.0089,0.0022\n4.9931,0.0020,-0.0069\n-4.9945,-0.0067,-0.0006\n-5.0202,-0.0098,-0.0019\n4.9994,0.0079,-0.0031\n4.9928,-0.0051,-0.0049\n-5.0037,-0.0179,-0.0006\n4.9859,-0.0078,-0.0017\n4.9997,-0.0029,-0.0019\n-5.0067,0.0003,-0.0064\n4.9974,0.0171,-0.0182\n5.0109,-0.0101,0.0054\n-4.9871,-0.0151,0.0068\n5.0041,-0.0108,0.0038\n-4.9966,-0.0244,0.0111\n-5.0110,0.0002,-0.0122\n4.9961,-0.0001,-0.0073\n-5.0002,-0.0084,-0.0094\n4.9891,0.0100,0.0018\n-5.0040,-0.0016,0.0179\n-4.9953,0.0075,-0.0089\n-4.9746,0.0093,0.0056\n-5.0020,0.0009,-0.0115\n5.0070,-0.0022,-0.0064\n-4.9841,-0.0064,-0.0113\n4.9999,0.0077,-0.0036\n-4.9979,0.0058,0.0293\n-5.0019,-0.0081,-0.0074\n-5.0014,-0.0038,0.0036\n-5.0029,-0.0200,-0.0115\n4.9983,0.0123,-0.0159\n-5.0039,-0.0109,0.0070\n-4.9869,-0.0043,0.0138\n4.9941,0.0079,-0.0008\n-5.0012,-0.0148,-0.0033\n-5.0032,-0.0049,0.0033\n-4.9958,-0.0124,0.0090\n5.0027,-0.0021,-0.0024\n5.0051,0.0159,-0.0004\n4.9996,0.0201,0.0030\n4.9962,0.0056,0.0078\n5.0046,0.0086,-0.0151\n5.0078,-0.0208,-0.0119\n-5.0157,-0.0118,0.0130\n-5.0120,0.0042,-0.0020\n-4.9913,-0.0088,0.0130\n4.9992,0.0055,0.0140\n-4.9990,-0.0045,-0.0065\n5.0001,0.0180,0.0095\n-5.0006,-0.0145,-0.0157\n-5.0054,0.0009,0.0105\n5.0147,0.0079,-0.0084\n4.9829,-0.0126,-0.0232\n5.0074,0.0101,0.0096\n-4.9930,-0.0030,-0.0110\n-4.9982,-0.0047,0.0027\n-4.9996,-0.0166,-0.0099\n-4.9921,0.0064,0.0034\n4.9848,0.0018,0.0088\n5.0019,-0.0025,-0.0136\n-4.9927,-0.0004,-0.0004\n4.9954,-0.0002,0.0021\n4.9892,-0.0167,0.0103\n-5.0195,-0.0091,0.0022\n5.0137,-0.0137,0.0102\n-5.0001,0.0017,-0.0219\n5.0101,-0.0028,0.0002\n-5.0063,-0.0074,0.0056\n-4.9934,0.0082,-0.0021\n4.9838,0.0138,0.0057\n-4.9981,0.0113,-0.0056\n5.0132,0.0018,-0.0244\n-5.0028,-0.0036,0.0016\n-4.9985,-0.0098,0.0088\n4.9992,0.0082,-0.0017\n-4.9901,-0.0019,-0.0168\n-5.0003,0.0043,0.0007\n5.0107,0.0022,-0.0289\n-4.9903,-0.0073,-0.0076\n-5.0048,0.0047,-0.0009\n-4.9997,0.0134,-0.0074\n-5.0058,0.0037,-0.0167\n-4.9991,0.0075,0.0056\n4.9964,-0.0075,0.0031\n4.9905,0.0144,0.0039\n4.9738,-0.0073,0.0074\n5.0076,0.0132,-0.0012\n4.9959,0.0075,0.0021\n-4.9873,0.0021,-0.0070\n5.0052,-0.0049,0.0030\n-4.9976,0.0039,0.0159\n-5.0166,-0.0013,-0.0036\n4.9984,-0.0002,-0.0210\n-4.9975,-0.0039,0.0121\n-5.0138,-0.0011,0.0090\n5.0115,0.0007,0.0021\n4.9906,-0.0032,-0.0003\n4.9862,-0.0092,-0.0016\n5.0027,0.0007,-0.0049\n-4.9950,0.0058,-0.0111\n4.9874,-0.0001,0.0070\n4.9957,-0.0034,-0.0015\n-4.9996,0.0153,0.0009\n5.0077,0.0061,-0.0001\n5.0244,-0.0002,0.0091\n5.0115,-0.0015,0.0143\n4.9828,-0.0106,0.0014\n5.0052,0.0047,0.0176\n-5.0152,-0.0022,-0.0078\n5.0094,0.0135,-0.0074\n-5.0000,0.0037,0.0041\n5.0084,-0.0016,0.0082\n-4.9960,0.0050,-0.0114\n5.0191,-0.0103,-0.0018\n5.0025,0.0055,0.0113\n-5.0047,-0.0220,0.0020\n4.9969,0.0122,-0.0109\n-4.9960,0.0122,0.0006\n-5.0028,0.0082,0.0000\n-5.0048,-0.0171,0.0061\n-5.0051,-0.0105,0.0250\n-4.9936,-0.0191,0.0066\n-5.0051,-0.0202,-0.0091\n4.9955,0.0094,0.0045\n4.9961,-0.0016,0.0116\n5.0140,-0.0023,-0.0144\n5.0111,-0.0036,0.0112\n5.0044,-0.0051,-0.0004\n5.0155,0.0011,0.0154\n-5.0132,-0.0037,-0.0095\n4.9951,0.0063,0.0064\n4.9976,0.0062,0.0008\n-5.0090,0.0134,0.0134\n-4.9909,0.0032,0.0079\n4.9992,-0.0052,0.0184\n4.9918,-0.0079,0.0039\n5.0072,-0.0046,0.0091\n5.0140,0.0209,0.0121\n-5.0014,-0.0123,0.0004\n-5.0110,-0.0040,-0.0074\n4.9806,0.0010,0.0128\n5.0024,0.0011,0.0039\n-4.9923,0.0036,-0.0177\n4.9844,-0.0051,0.0068\n4.9835,-0.0078,0.0072\n-5.0066,-0.0158,-0.0034\n5.0026,-0.0054,-0.0012\n4.9919,-0.0109,-0.0031\n-4.9996,0.0063,-0.0156\n-5.0101,-0.0103,0.0032\n-4.9808,-0.0212,0.0141\n-5.0080,-0.0028,-0.0059\n-4.9879,-0.0113,-0.0095\n-4.9867,-0.0069,-0.0015\n-4.9939,0.0090,-0.0013\n5.0160,0.0121,0.0054\n-5.0044,0.0018,-0.0050\n-4.9934,-0.0065,0.0017\n5.0004,-0.0072,0.0069\n5.0073,0.0191,-0.0132\n-4.9968,-0.0014,-0.0096\n5.0038,-0.0147,-0.0026\n-4.9972,-0.0028,-0.0116\n5.0005,0.0063,-0.0051\n-4.9924,-0.0140,-0.0076\n5.0084,0.0026,0.0079\n-4.9996,0.0064,0.0059\n4.9970,-0.0203,0.0012\n-4.9939,-0.0105,0.0121\n4.9812,0.0020,0.0119\n4.9699,-0.0005,0.0083\n4.9977,0.0066,0.0002\n-5.0120,-0.0050,0.0025\n-4.9905,0.0056,-0.0106\n5.0129,-0.0086,-0.0130\n-5.0212,0.0046,0.0028\n4.9798,-0.0244,-0.0015\n-5.0069,0.0154,0.0029\n5.0068,-0.0039,-0.0146\n5.0157,-0.0063,0.0047\n5.0024,-0.0053,-0.0047\n-4.9921,-0.0006,-0.0039\n-4.9806,-0.0041,-0.0075\n5.0051,-0.0066,-0.0075\n4.9944,0.0108,-0.0195\n4.9891,0.0107,-0.0036\n5.0020,-0.0208,-0.0001\n-4.9776,0.0187,-0.0098\n5.0061,-0.0115,0.0255\n-5.0163,-0.0013,-0.0058\n5.0022,0.0054,0.0065\n5.0159,0.0057,-0.0011\n4.9990,0.0085,-0.0049\n5.0081,0.0080,0.0025\n-5.0014,0.0114,0.0010\n-5.0133,-0.0135,0.0069\n-5.0105,-0.0108,0.0112\n4.9823,-0.0032,-0.0005\n5.0074,0.0005,-0.0036\n5.0073,0.0049,-0.0000\n-4.9900,0.0003,-0.0007\n-4.9877,0.0120,-0.0039\n-4.9908,-0.0110,-0.0109\n-5.0016,0.0024,0.0010\n-4.9834,0.0090,-0.0151\n5.0151,0.0076,0.0043\n4.9906,-0.0070,-0.0138\n-4.9935,-0.0114,-0.0121\n5.0038,0.0015,-0.0147\n-5.0089,-0.0014,0.0102\n4.9958,-0.0133,0.0189\n4.9966,-0.0085,-0.0121\n-4.9994,0.0016,0.0023\n-5.0063,0.0016,0.0010\n-4.9859,0.0016,-0.0022\n-4.9995,0.0135,0.0091\n5.0010,0.0255,-0.0044\n-4.9888,-0.0132,-0.0046\n-4.9857,0.0249,0.0170\n5.0018,-0.0080,0.0108\n-4.9911,-0.0027,0.0028\n-5.0033,-0.0088,-0.0030\n4.9840,0.0082,0.0249\n-5.0048,-0.0030,0.0054\n5.0054,-0.0096,-0.0160\n-5.0039,-0.0051,0.0018\n-4.9976,0.0140,-0.0041\n-4.9947,0.0042,-0.0136\n-5.0134,0.0048,0.0015\n4.9921,0.0068,0.0106\n-5.0047,-0.0094,-0.0041\n5.0174,0.0063,-0.0044\n5.0161,0.0056,-0.0010\n4.9991,-0.0014,-0.0024\n-5.0005,-0.0052,-0.0098\n5.0141,0.0151,0.0082\n-5.0132,0.0054,-0.0009\n-4.9997,-0.0004,0.0127\n-5.0035,-0.0058,-0.0163\n-4.9835,-0.0155,-0.0046\n5.0142,-0.0070,0.0074\n-0.0021,0.0261,-0.0082\n4.9969,-0.0065,-0.0099\n4.9963,0.0023,-0.0141\n4.9981,0.0055,0.0028\n5.0124,-0.0024,-0.0019\n5.0006,0.0001,0.0041\n-5.0052,-0.0075,0.0014\n-5.0003,0.0158,-0.0080\n5.0022,-0.0132,-0.0032\n-5.0090,0.0084,0.0196\n-4.9990,0.0217,0.0066\n-4.9999,-0.0184,-0.0036\n-5.0198,-0.0035,0.0016\n4.9982,-0.0200,-0.0018\n5.0161,0.0151,0.0023\n4.9941,-0.0076,0.0152\n5.0097,0.0118,-0.0112\n-5.0080,0.0151,0.0171\n4.9955,-0.0139,0.0053\n-4.9860,0.0006,0.0030\n-5.0093,-0.0003,0.0007\n-5.0000,0.0033,0.0148\n-5.0010,-0.0099,-0.0118\n-4.9892,0.0109,-0.0109\n4.9976,-0.0043,0.0167\n5.0046,-0.0041,-0.0129\n-5.0058,-0.0169,0.0073\n4.9957,0.0037,0.0049\n5.0033,0.0016,0.0180\n-5.0057,-0.0031,0.0027\n-4.9860,-0.0040,0.0078\n4.9990,0.0040,-0.0005\n5.0014,0.0013,0.0023\n4.9945,0.0122,-0.0033\n-5.0054,0.0043,-0.0015\n-4.9948,0.0050,-0.0067\n4.9942,0.0064,-0.0182\n4.9883,-0.0094,-0.0046\n4.9968,-0.0077,0.0147\n-5.0101,-0.0082,-0.0155\n5.0123,-0.0029,0.0126\n5.0003,0.0298,-0.0237\n4.9958,0.0178,0.0011\n-5.0126,0.0051,0.0109\n-4.9827,0.0068,0.0037\n4.9952,-0.0025,-0.0017\n-5.0019,-0.0081,-0.0145\n-5.0025,-0.0009,0.0040\n-5.0097,0.0083,0.0049\n4.9982,0.0124,0.0171\n-4.9911,0.0051,-0.0254\n4.9996,0.0111,0.0040\n4.9929,0.0145,0.0054\n-5.0043,0.0108,-0.0022\n5.0067,-0.0105,-0.0087\n-5.0069,-0.0221,0.0045\n-4.9926,0.0043,-0.0021\n5.0033,-0.0080,0.0105\n4.9898,0.0070,-0.0070\n-4.9995,0.0234,-0.0028\n-5.0087,0.0031,0.0102\n4.9959,0.0018,0.0078\n5.0211,0.0076,-0.0071\n-5.0141,-0.0160,0.0146\n-4.9906,-0.0102,0.0051\n4.9923,-0.0237,-0.0026\n-5.0116,-0.0031,-0.0016\n5.0150,-0.0030,-0.0075\n5.0059,-0.0029,0.0200\n-4.9929,0.0042,-0.0312\n5.0071,0.0003,0.0077\n5.0032,-0.0141,0.0015\n4.9984,0.0042,-0.0078\n4.9788,-0.0022,-0.0110\n-4.9907,0.0180,-0.0120\n-5.0034,-0.0052,-0.0031\n5.0018,-0.0067,-0.0005\n4.9927,-0.0001,0.0045\n5.0110,0.0166,0.0093\n-5.0051,0.0077,-0.0181\n-5.0037,-0.0045,0.0040\n4.9939,0.0071,0.0024\n4.9946,-0.0035,0.0186\n5.0021,-0.0020,-0.0062\n-5.0094,0.0038,0.0040\n-5.0048,-0.0052,0.0102\n4.9921,-0.0118,-0.0128\n4.9829,0.0016,-0.0126\n-4.9980,0.0054,-0.0182\n-4.9962,0.0101,-0.0101\n-4.9912,-0.0137,0.0195\n-5.0006,-0.0073,-0.0022\n-5.0137,-0.0077,0.0128\n4.9831,0.0012,-0.0003\n4.9901,0.0026,-0.0090\n4.9914,0.0111,-0.0025\n-4.9869,-0.0047,-0.0086\n5.0075,0.0030,0.0211\n5.0005,0.0097,-0.0070\n5.0006,0.0000,-0.0007\n-4.9899,-0.0173,0.0059\n4.9915,0.0042,0.0201\n5.0045,0.0060,0.0021\n-4.9853,0.0039,0.0117\n4.9959,0.0175,-0.0058\n-5.0046,0.0102,-0.0055\n4.9910,0.0139,0.0100\n-5.0154,0.0259,-0.0040\n4.9921,0.0025,0.0064\n5.0024,0.0098,-0.0133\n0.0127,-0.0014,0.0080\n5.0035,-0.0029,-0.0096\n-5.0158,0.0061,-0.0119\n-4.9922,-0.0004,0.0034\n-5.0063,-0.0065,0.0008\n4.9848,0.0112,-0.0095\n-5.0207,0.0043,0.0068\n5.0055,0.0137,0.0077\n-5.0018,0.0025,0.0105\n-4.9986,0.0047,0.0186\n-5.0031,-0.0003,-0.0032\n5.0064,-0.0077,0.0062\n4.9976,0.0016,0.0118\n5.0037,-0.0139,-0.0258\n5.0076,0.0007,-0.0049\n5.0070,0.0001,-0.0105\n-4.9922,0.0018,0.0029\n-5.0162,-0.0051,0.0174\n4.9825,0.0028,0.0062\n5.0037,0.0044,-0.0108\n5.0067,-0.0095,-0.0048\n4.9850,-0.0048,0.0183\n-4.9969,0.0082,0.0001\n5.0103,-0.0148,0.0101\n4.9853,0.0058,0.0007\n-5.0072,-0.0077,-0.0149\n5.0090,-0.0158,0.0024\n-5.0145,0.0262,-0.0075\n4.9917,0.0144,0.0116\n4.9961,0.0141,0.0223\n4.9981,0.0095,0.0017\n4.9858,0.0141,0.0042\n4.9891,0.0280,-0.0033\n-5.0078,-0.0034,0.0190\n4.9990,0.0088,-0.0009\n-5.0003,0.0110,-0.0023\n5.0083,-0.0135,-0.0156\n5.0092,-0.0056,0.0065\n4.9865,-0.0087,0.0136\n-4.9924,-0.0054,-0.0075\n5.0086,-0.0169,-0.0069\n-5.0084,-0.0059,-0.0067\n-4.9954,-0.0053,-0.0011\n-4.9959,-0.0163,-0.0272\n4.9909,0.0112,0.0139\n-5.0110,0.0041,-0.0063\n4.9837,0.0016,0.0055\n-4.9970,-0.0110,-0.0140\n4.9987,-0.0184,-0.0033\n5.0081,0.0138,-0.0044\n4.9964,-0.0225,-0.0076\n5.0015,0.0110,-0.0019\n5.0040,0.0084,-0.0108\n-4.9808,0.0148,0.0187\n-4.9779,-0.0180,-0.0106\n5.0006,0.0077,0.0096\n4.9866,0.0042,0.0120\n-5.0073,-0.0164,0.0028\n5.0015,0.0132,-0.0093\n5.0229,-0.0024,0.0072\n-4.9910,0.0001,0.0089\n-5.0135,-0.0076,0.0201\n5.0062,0.0096,0.0047\n-5.0045,0.0049,0.0087\n-5.0171,0.0005,-0.0096\n-4.9981,-0.0085,0.0182\n-4.9941,0.0114,0.0067\n-5.0090,-0.0196,0.0158\n5.0049,-0.0118,-0.0034\n-5.0094,0.0043,-0.0041\n-5.0176,0.0045,-0.0068\n5.0250,0.0163,-0.0010\n4.9898,-0.0047,0.0073\n-5.0138,-0.0031,-0.0084\n-4.9932,0.0058,-0.0021\n4.9952,0.0017,-0.0105\n-4.9901,0.0191,-0.0001\n5.0039,0.0112,-0.0092\n-5.0021,0.0088,-0.0170\n-4.9931,0.0027,-0.0060\n4.9972,-0.0019,-0.0053\n-4.9900,-0.0073,-0.0017\n4.9991,0.0118,-0.0157\n-4.9998,0.0054,0.0014\n4.9890,0.0009,0.0005\n5.0018,0.0095,-0.0020\n5.0094,-0.0085,-0.0039\n4.9835,0.0049,0.0170\n-4.9864,-0.0009,0.0137\n4.9902,0.0064,0.0093\n-5.0140,-0.0031,-0.0171\n-5.0126,0.0122,-0.0209\n5.0043,-0.0085,0.0052\n-4.9941,-0.0051,-0.0082\n-5.0077,0.0088,-0.0125\n5.0014,-0.0131,0.0185\n5.0260,0.0167,-0.0007\n-5.0050,-0.0143,-0.0093\n-4.9953,-0.0053,-0.0054\n-5.0018,-0.0089,-0.0092\n-4.9923,0.0103,-0.0091\n-5.0133,-0.0036,-0.0120\n-5.0036,0.0056,0.0089\n-5.0052,-0.0007,-0.0130\n4.9795,-0.0181,-0.0186\n5.0048,-0.0009,-0.0210\n-4.9948,-0.0127,-0.0006\n4.9783,-0.0112,-0.0057\n5.0013,-0.0088,-0.0133\n5.0153,-0.0004,0.0079\n-4.9909,-0.0086,0.0191\n-4.9979,-0.0001,0.0137\n5.0343,0.0060,0.0167\n-5.0141,-0.0017,0.0068\n-5.0155,0.0042,-0.0094\n-4.9851,0.0004,0.0050\n4.9850,0.0049,-0.0037\n-5.0112,-0.0004,0.0024\n4.9848,0.0032,0.0061\n4.9862,0.0062,-0.0016\n4.9896,-0.0066,0.0026\n-5.0102,-0.0008,0.0038\n4.9982,0.0032,0.0047\n-5.0152,0.0056,-0.0184\n4.9994,0.0031,0.0182\n-4.9971,0.0107,-0.0072\n-5.0022,-0.0093,-0.0018\n5.0148,-0.0028,-0.0044\n-5.0082,0.0082,0.0014\n4.9946,0.0016,-0.0098\n4.9880,-0.0060,0.0060\n-5.0114,-0.0131,-0.0153\n-5.0008,0.0042,0.0092\n5.0046,-0.0027,-0.0014\n-4.9956,0.0102,-0.0050\n-5.0035,-0.0135,-0.0103\n-5.0018,-0.0020,-0.0011\n5.0002,0.0112,-0.0113\n5.0068,0.0000,0.0065\n5.0034,-0.0114,-0.0005\n5.0065,-0.0118,-0.0034\n4.9977,0.0069,-0.0063\n5.0090,-0.0002,-0.0105\n5.0037,0.0001,0.0176\n4.9962,-0.0091,-0.0015\n-4.9920,-0.0031,-0.0023\n4.9976,0.0068,0.0115\n-4.9977,-0.0102,-0.0011\n-4.9935,-0.0019,-0.0118\n-5.0101,0.0168,-0.0079\n-4.9965,-0.0130,-0.0009\n4.9920,-0.0092,0.0177\n5.0017,-0.0042,-0.0085\n5.0020,0.0044,-0.0128\n5.0023,-0.0114,-0.0001\n-5.0090,0.0131,-0.0086\n4.9855,0.0028,0.0014\n5.0134,-0.0011,0.0136\n5.0056,0.0079,0.0016\n-4.9844,0.0061,-0.0029\n5.0036,-0.0029,0.0062\n4.9870,-0.0008,0.0143\n-4.9869,-0.0003,0.0115\n4.9903,-0.0101,0.0041\n-5.0036,0.0024,-0.0012\n-5.0036,-0.0106,-0.0051\n-5.0006,-0.0158,-0.0283\n4.9824,-0.0078,0.0097\n-5.0066,0.0047,0.0174\n5.0113,-0.0132,-0.0065\n-5.0078,0.0063,-0.0081\n4.9941,0.0218,0.0044\n5.0031,-0.0115,0.0054\n4.9985,-0.0026,-0.0021\n4.9872,-0.0085,0.0034\n5.0120,0.0008,-0.0117\n-4.9923,0.0059,-0.0036\n4.9893,0.0263,-0.0141\n-5.0041,-0.0040,-0.0183\n-4.9991,-0.0004,-0.0029\n-5.0044,-0.0050,0.0193\n4.9929,0.0074,0.0059\n-5.0161,-0.0021,-0.0090\n-5.0072,-0.0112,-0.0080\n-4.9759,-0.0096,-0.0079\n4.9864,-0.0226,-0.0089\n-5.0120,0.0042,0.0069\n4.9858,0.0145,-0.0057\n4.9998,-0.0020,-0.0095\n4.9949,-0.0014,0.0063\n4.9987,0.0380,0.0232\n-4.9934,-0.0026,-0.0004\n5.0072,0.0158,0.0044\n-4.9988,-0.0013,0.0009\n-4.9949,-0.0033,0.0060\n-5.0014,-0.0036,0.0106\n4.9957,0.0006,-0.0087\n-5.0088,0.0113,0.0050\n-4.9854,-0.0043,-0.0064\n4.9933,0.0005,0.0038\n5.0083,0.0041,-0.0058\n-4.9903,0.0142,0.0196\n-4.9985,0.0069,-0.0033\n-0.0014,0.0137,-0.0026\n5.0058,0.0044,0.0048\n5.0014,0.0120,0.0288\n-4.9868,-0.0129,-0.0052\n-4.9910,0.0137,-0.0133\n-5.0066,-0.0283,0.0212\n4.9976,0.0057,0.0054\n-5.0029,0.0071,-0.0094\n5.0015,-0.0022,0.0130\n-4.9863,-0.0051,0.0032\n-5.0179,0.0133,-0.0096\n4.9899,-0.0047,-0.0005\n4.9865,-0.0339,0.0016\n4.9932,0.0049,0.0186\n4.9829,0.0163,0.0040\n5.0103,-0.0029,-0.0083\n-5.0092,0.0141,-0.0138\n-5.0098,-0.0059,-0.0218\n-5.0101,-0.0068,-0.0041\n4.9950,-0.0170,-0.0063\n-5.0013,0.0046,0.0022\n5.0075,-0.0211,0.0107\n-5.0059,-0.0026,-0.0035\n-5.0079,0.0109,0.0023\n5.0086,0.0163,-0.0092\n-4.9941,-0.0097,-0.0173\n-5.0006,0.0054,0.0048\n4.9913,-0.0010,-0.0168\n-4.9963,-0.0099,0.0064\n-4.9904,-0.0042,-0.0028\n-4.9903,-0.0006,-0.0026\n4.9871,0.0079,0.0196\n-5.0109,0.0021,-0.0131\n-4.9986,0.0152,0.0172\n4.9832,-0.0015,-0.0072\n-5.0148,-0.0127,0.0152\n5.0055,-0.0052,-0.0241\n4.9921,-0.0058,-0.0155\n-5.0021,-0.0038,0.0085\n5.0037,-0.0070,0.0107\n5.0091,-0.0026,0.0011\n5.0029,0.0143,-0.0038\n-4.9946,-0.0014,-0.0020\n-4.9804,0.0094,-0.0201\n-5.0083,-0.0010,-0.0066\n-5.0299,0.0088,0.0181\n-5.0013,-0.0027,0.0022\n-4.9987,-0.0141,0.0118\n-5.0070,0.0039,0.0010\n4.9904,0.0060,-0.0046\n5.0005,-0.0066,0.0227\n-5.0040,-0.0032,0.0060\n-5.0034,-0.0100,0.0079\n5.0096,0.0028,0.0028\n4.9992,0.0064,0.0040\n-5.0068,-0.0056,0.0023\n-4.9874,0.0024,-0.0097\n-5.0062,0.0173,-0.0110\n4.9896,0.0041,0.0053\n-4.9920,0.0008,-0.0040\n-4.9958,-0.0004,-0.0126\n-4.9947,0.0032,0.0043\n-4.9967,-0.0259,0.0010\n4.9899,-0.0137,-0.0020\n-4.9973,-0.0047,-0.0142\n-4.9981,0.0088,-0.0045\n-4.9932,-0.0003,-0.0017\n5.0014,0.0174,-0.0005\n4.9990,0.0075,0.0093\n4.9914,-0.0001,-0.0023\n5.0024,-0.0098,-0.0152\n-4.9781,-0.0022,0.0090\n4.9977,-0.0047,-0.0040\n5.0151,-0.0060,0.0012\n5.0091,-0.0085,-0.0053\n4.9994,-0.0106,-0.0047\n5.0110,-0.0203,0.0100\n-4.9887,0.0010,-0.0099\n4.9894,-0.0182,0.0024\n-5.0086,-0.0104,0.0014\n5.0027,0.0114,-0.0174\n-5.0143,-0.0006,-0.0143\n5.0021,-0.0091,0.0035\n-5.0034,-0.0126,-0.0278\n-4.9861,-0.0164,-0.0016\n5.0032,0.0161,-0.0180\n4.9812,0.0109,-0.0008\n4.9867,0.0012,0.0007\n5.0023,0.0041,0.0031\n-4.9929,0.0245,-0.0021\n-4.9923,-0.0042,-0.0047\n4.9848,-0.0003,-0.0006\n4.9937,0.0026,-0.0148\n5.0002,0.0028,-0.0153\n-4.9868,-0.0032,0.0020\n5.0199,-0.0037,0.0014\n4.9936,-0.0059,-0.0045\n5.0033,-0.0043,0.0201\n-4.9978,-0.0009,0.0010\n-5.0071,-0.0002,-0.0049\n-4.9803,0.0048,-0.0006\n4.9942,-0.0157,-0.0003\n5.0071,-0.0002,-0.0059\n5.0105,0.0033,-0.0017\n-5.0083,0.0017,-0.0165\n5.0049,-0.0126,0.0004\n-5.0146,-0.0155,0.0013\n-4.9898,0.0070,-0.0042\n-4.9906,-0.0274,-0.0057\n4.9912,0.0112,-0.0027\n-5.0011,-0.0166,0.0012\n-4.9932,-0.0070,-0.0029\n-4.9964,0.0081,0.0006\n4.9960,-0.0076,0.0056\n-4.9837,0.0014,-0.0086\n4.9984,-0.0016,-0.0031\n4.9973,-0.0110,0.0069\n4.9839,0.0095,0.0010\n-5.0088,0.0070,-0.0106\n4.9927,-0.0000,-0.0013\n-4.9938,0.0054,0.0040\n-4.9980,0.0199,0.0112\n4.9857,0.0133,-0.0178\n-5.0054,-0.0028,-0.0071\n5.0115,-0.0060,0.0013\n-4.9989,-0.0066,0.0007\n-5.0253,-0.0138,0.0050\n4.9941,-0.0063,-0.0133\n4.9916,-0.0136,-0.0055\n-4.9785,0.0047,0.0011\n-5.0059,0.0097,-0.0119\n4.9860,0.0062,-0.0167\n4.9972,0.0143,-0.0057\n-5.0039,-0.0160,-0.0089\n4.9929,0.0133,-0.0076\n4.9902,-0.0056,-0.0164\n5.0086,0.0028,-0.0097\n-4.9924,-0.0001,0.0124\n-4.9926,0.0062,-0.0187\n-4.9894,-0.0118,-0.0181\n4.9948,-0.0109,-0.0035\n-4.9888,-0.0017,-0.0051\n-5.0063,-0.0039,0.0000\n-4.9857,0.0042,0.0044\n-4.9940,0.0116,-0.0023\n-5.0048,-0.0048,0.0062\n-4.9952,0.0276,-0.0007\n-5.0105,0.0082,0.0046\n-4.9844,-0.0094,-0.0066\n5.0089,0.0040,0.0096\n4.9980,0.0010,-0.0096\n-5.0090,0.0032,-0.0139\n-4.9979,0.0060,-0.0026\n-4.9892,-0.0161,0.0046\n5.0019,0.0050,0.0191\n5.0052,0.0014,-0.0082\n-5.0051,0.0084,-0.0068\n5.0007,0.0014,-0.0095\n4.9974,0.0053,0.0018\n-5.0015,0.0119,-0.0010\n-5.0077,0.0056,0.0001\n-4.9846,-0.0129,0.0027\n-5.0010,-0.0105,-0.0031\n4.9924,-0.0068,0.0108\n4.9951,0.0041,0.0040\n4.9792,-0.0068,-0.0061\n4.9923,0.0039,-0.0050\n4.9969,0.0010,-0.0101\n-5.0069,-0.0046,0.0002\n4.9862,0.0201,-0.0024\n4.9983,0.0023,-0.0087\n5.0015,0.0104,0.0163\n5.0040,0.0061,0.0015\n5.0022,-0.0033,-0.0109\n5.0089,0.0013,-0.0166\n5.0143,0.0101,0.0002\n5.0132,-0.0051,0.0053\n4.9995,-0.0130,0.0063\n-4.9990,0.0140,0.0016\n4.9996,-0.0024,0.0088\n-4.9962,-0.0091,0.0081\n-4.9826,-0.0145,-0.0158\n5.0184,0.0001,-0.0102\n-4.9787,0.0094,-0.0004\n-5.0001,-0.0180,-0.0080\n-4.9951,0.0064,-0.0157\n5.0266,-0.0082,-0.0145\n-4.9973,0.0105,0.0060\n-5.0073,0.0020,0.0035\n-4.9990,0.0054,0.0009\n4.9911,0.0069,-0.0021\n4.9997,0.0052,-0.0067\n4.9943,0.0059,-0.0085\n-4.9962,-0.0110,0.0030\n-4.9977,0.0063,0.0031\n-5.0142,0.0200,-0.0086\n4.9880,-0.0163,0.0090\n4.9875,-0.0033,-0.0083\n-4.9950,-0.0086,0.0016\n-4.9953,-0.0087,0.0118\n-4.9990,0.0050,0.0133\n4.9784,0.0114,-0.0095\n-5.0017,0.0077,0.0082\n-5.0072,-0.0123,-0.0051\n4.9971,0.0098,0.0004\n-5.0194,0.0019,0.0052\n5.0048,-0.0022,-0.0024\n-4.9851,0.0190,0.0118\n-5.0089,-0.0015,0.0106\n4.9853,-0.0041,-0.0051\n-5.0101,-0.0060,0.0092\n5.0121,0.0042,-0.0009\n4.9818,-0.0058,-0.0092\n4.9868,0.0016,-0.0062\n4.9941,-0.0006,0.0080\n-5.0077,-0.0100,-0.0100\n4.9886,0.0013,0.0006\n-5.0007,-0.0066,-0.0051\n4.9939,0.0158,-0.0038\n-4.9982,-0.0034,0.0002\n5.0178,-0.0003,0.0158\n-4.9794,-0.0011,0.0102\n-4.9907,-0.0139,0.0087\n-4.9966,-0.0002,0.0016\n-4.9942,-0.0040,0.0037\n-4.9833,0.0016,0.0156\n-4.9892,-0.0063,-0.0024\n-5.0064,-0.0040,-0.0013\n-4.9815,-0.0021,-0.0050\n-4.9965,0.0066,-0.0110\n5.0065,-0.0038,-0.0047\n-5.0136,-0.0079,-0.0027\n5.0079,0.0046,0.0013\n4.9919,-0.0032,-0.0159\n-4.9886,-0.0123,0.0040\n5.0019,-0.0080,0.0206\n-5.0104,-0.0032,0.0062\n-5.0215,-0.0103,0.0010\n-5.0083,-0.0060,-0.0152\n4.9961,-0.0061,0.0154\n-5.0127,0.0097,-0.0117\n-4.9969,-0.0137,0.0087\n5.0060,-0.0051,-0.0069\n-4.9848,-0.0038,-0.0044\n4.9939,0.0183,0.0164\n5.0062,0.0089,0.0088\n5.0166,-0.0007,0.0236\n4.9944,0.0010,-0.0006\n5.0130,0.0026,0.0102\n4.9989,0.0171,-0.0030\n-5.0030,-0.0031,-0.0168\n5.0072,0.0141,0.0057\n-5.0017,0.0038,0.0132\n-5.0033,0.0047,0.0031\n-4.9939,0.0029,0.0098\n4.9849,-0.0058,-0.0111\n5.0065,0.0020,0.0097\n-5.0178,-0.0053,0.0109\n5.0012,-0.0100,0.0130\n5.0070,-0.0064,0.0060\n4.9928,-0.0063,0.0059\n4.9947,-0.0028,0.0136\n-4.9936,0.0026,0.0104\n-4.9976,0.0011,-0.0009\n-5.0056,0.0101,0.0144\n4.9876,-0.0003,0.0018\n-5.0164,-0.0178,0.0022\n-4.9943,0.0107,-0.0050\n-5.0076,-0.0120,0.0052\n4.9866,-0.0231,0.0073\n5.0035,-0.0023,0.0029\n4.9921,-0.0144,0.0015\n4.9993,0.0062,0.0119\n-4.9943,-0.0022,-0.0035\n5.0024,-0.0041,0.0176\n5.0099,-0.0037,-0.0149\n5.0012,-0.0161,-0.0017\n-4.9945,-0.0081,-0.0145\n-5.0016,0.0061,0.0092\n5.0053,0.0010,-0.0039\n-4.9940,-0.0082,0.0180\n4.9960,0.0033,0.0039\n-5.0041,0.0043,-0.0018\n4.9811,0.0057,0.0260\n5.0052,0.0108,0.0089\n4.9926,0.0014,-0.0106\n-4.9927,-0.0186,0.0038\n5.0172,0.0080,-0.0056\n5.0028,0.0029,0.0026\n5.0000,-0.0224,-0.0036\n4.9972,0.0118,-0.0065\n-4.9916,-0.0100,-0.0154\n-5.0005,-0.0084,0.0065\n5.0167,0.0062,0.0005\n4.9787,0.0020,0.0036\n-5.0074,-0.0061,-0.0064\n4.9987,-0.0123,0.0011\n4.9894,0.0001,0.0079\n-4.9997,0.0223,-0.0010\n-4.9914,0.0114,0.0147\n4.9888,0.0027,-0.0046\n-5.0171,0.0195,-0.0051\n4.9788,-0.0031,-0.0037\n-5.0185,0.0060,-0.0163\n5.0120,-0.0034,0.0084\n-4.9916,0.0025,-0.0003\n-4.9965,0.0077,-0.0077\n-5.0081,-0.0112,-0.0013\n-4.9863,-0.0166,0.0015\n-4.9981,0.0130,0.0100\n-4.9845,-0.0006,-0.0045\n-5.0006,-0.0011,-0.0072\n-4.9966,0.0032,0.0031\n4.9952,-0.0033,-0.0133\n-4.9993,-0.0050,0.0122\n4.9985,0.0070,0.0085\n-4.9928,0.0139,-0.0030\n5.0102,-0.0033,0.0136\n-5.0012,0.0050,0.0172\n-4.9996,0.0171,-0.0077\n-5.0040,-0.0147,0.0091\n-5.0044,-0.0125,0.0078\n5.0107,0.0036,-0.0060\n-4.9893,0.0099,0.0077\n5.0001,0.0112,0.0229\n-4.9890,0.0040,-0.0086\n5.0054,-0.0029,0.0163\n-4.9965,-0.0015,-0.0130\n5.0066,0.0059,0.0030\n4.9955,-0.0130,-0.0205\n5.0121,0.0145,-0.0003\n5.0183,-0.0146,-0.0045\n-5.0043,0.0115,0.0030\n-5.0222,0.0063,-0.0160\n5.0100,-0.0097,0.0216\n-5.0173,0.0018,-0.0040\n-0.0221,-0.0053,0.0017\n-5.0040,-0.0042,-0.0067\n-5.0035,-0.0137,-0.0064\n-4.9921,0.0110,-0.0070\n-4.9841,-0.0026,0.0031\n-5.0099,0.0204,0.0004\n5.0052,-0.0010,-0.0054\n5.0089,0.0000,-0.0146\n4.9957,-0.0085,0.0041\n5.0129,0.0117,0.0151\n4.9935,0.0063,0.0010\n-4.9861,0.0104,0.0002\n4.9983,-0.0027,-0.0021\n-4.9947,0.0025,0.0086\n-5.0034,0.0203,0.0105\n5.0034,0.0014,0.0141\n4.9984,-0.0046,-0.0133\n5.0016,0.0080,0.0029\n-5.0075,0.0101,-0.0153\n-5.0089,0.0094,0.0141\n5.0120,0.0180,0.0022\n4.9943,-0.0059,0.0108\n-5.0232,0.0032,0.0052\n4.9896,-0.0120,-0.0267\n-4.9872,-0.0089,0.0040\n-5.0043,0.0057,0.0068\n-4.9918,-0.0028,0.0143\n5.0120,-0.0164,0.0263\n4.9985,-0.0048,-0.0023\n-4.9960,-0.0178,0.0167\n-5.0043,-0.0028,0.0053\n-5.0053,0.0037,0.0130\n5.0021,-0.0120,0.0118\n-4.9966,-0.0022,0.0246\n4.9804,0.0053,-0.0011\n-5.0092,-0.0145,0.0002\n-4.9807,0.0195,0.0124\n-4.9858,-0.0097,0.0068\n4.9960,-0.0093,-0.0024\n-5.0090,0.0048,-0.0005\n4.9927,-0.0139,0.0074\n4.9978,0.0090,-0.0301\n-4.9953,0.0093,-0.0018\n5.0245,-0.0187,-0.0007\n-4.9985,0.0050,-0.0074\n4.9927,-0.0106,-0.0032\n5.0084,0.0102,0.0110\n-4.9988,-0.0055,-0.0055\n-5.0015,-0.0076,0.0030\n-5.0038,-0.0174,-0.0130\n4.9834,0.0043,-0.0004\n-4.9931,-0.0073,-0.0138\n-4.9986,0.0005,0.0186\n-4.9981,-0.0116,0.0077\n-4.9973,-0.0139,0.0080\n4.9956,-0.0144,0.0109\n4.9885,0.0089,-0.0036\n5.0028,0.0201,0.0010\n5.0241,-0.0046,0.0032\n-5.0059,-0.0201,0.0059\n-4.9979,0.0155,-0.0006\n-4.9978,-0.0103,-0.0035\n-5.0019,0.0153,0.0147\n-4.9904,0.0023,-0.0055\n5.0118,0.0104,-0.0161\n-4.9959,-0.0077,0.0054\n4.9995,-0.0195,-0.0096\n-5.0093,-0.0126,0.0045\n-5.0022,-0.0086,0.0005\n4.9915,0.0072,0.0061\n-5.0117,0.0110,-0.0058\n4.9925,-0.0030,-0.0098\n-5.0139,-0.0195,0.0030\n-5.0147,0.0165,0.0016\n4.9991,0.0090,0.0125\n-4.9907,0.0108,0.0157\n-4.9830,-0.0001,0.0082\n-5.0004,-0.0096,-0.0035\n-4.9976,-0.0141,-0.0059\n-5.0137,-0.0107,0.0176\n5.0168,-0.0029,-0.0023\n-4.9787,0.0041,-0.0019\n4.9988,-0.0017,-0.0046\n4.9983,0.0121,0.0006\n5.0066,-0.0104,0.0035\n5.0078,0.0045,0.0254\n5.0046,-0.0040,-0.0007\n4.9980,-0.0145,-0.0171\n-4.9943,-0.0086,-0.0051\n-5.0131,0.0166,-0.0012\n4.9984,0.0065,0.0008\n-5.0154,0.0031,0.0037\n-5.0044,0.0185,0.0067\n4.9890,0.0024,0.0039\n4.9933,-0.0052,-0.0074\n5.0181,-0.0079,-0.0058\n5.0085,0.0089,-0.0111\n-4.9925,-0.0063,-0.0039\n4.9931,-0.0026,0.0212\n-5.0070,0.0004,-0.0032\n4.9858,-0.0094,-0.0020\n-4.9826,-0.0022,-0.0091\n4.9996,-0.0041,-0.0008\n5.0156,-0.0093,0.0108\n-5.0097,0.0024,-0.0061\n-4.9939,-0.0159,0.0000\n5.0000,-0.0189,0.0045\n5.0026,0.0103,-0.0158\n-5.0049,-0.0028,0.0126\n4.9898,0.0066,-0.0089\n-4.9838,-0.0082,0.0042\n-4.9854,0.0006,0.0148\n-0.0066,-0.0012,-0.0042\n-4.9956,0.0018,-0.0080\n-5.0079,-0.0091,0.0022\n4.9979,0.0030,0.0036\n5.0043,-0.0135,-0.0052\n4.9889,0.0239,-0.0118\n5.0050,0.0211,0.0050\n-4.9924,0.0012,0.0044\n-5.0095,0.0013,-0.0198\n5.0051,-0.0216,0.0031\n-4.9959,0.0014,0.0145\n-4.9927,-0.0034,0.0007\n5.0164,-0.0044,0.0018\n-4.9762,0.0094,-0.0091\n5.0193,0.0048,-0.0132\n5.0107,-0.0095,0.0104\n5.0004,-0.0015,-0.0036\n-5.0019,-0.0003,-0.0153\n4.9865,0.0119,0.0049\n-5.0031,0.0006,-0.0117\n5.0060,-0.0077,-0.0084\n-4.9979,0.0031,0.0016\n-4.9918,-0.0235,-0.0101\n-4.9901,0.0163,0.0129\n-5.0038,-0.0022,-0.0030\n4.9959,-0.0085,-0.0034\n4.9992,0.0009,0.0040\n-5.0028,-0.0118,0.0101\n5.0092,0.0093,-0.0181\n4.9994,-0.0001,-0.0007\n-5.0004,-0.0016,0.0028\n-4.9938,0.0001,0.0053\n5.0230,0.0103,0.0155\n5.0039,-0.0050,0.0216\n-4.9930,0.0000,0.0093\n-4.9923,-0.0103,-0.0046\n5.0162,-0.0086,-0.0128\n0.0204,-0.0092,0.0011\n4.9868,-0.0044,-0.0074\n4.9880,0.0150,-0.0131\n-4.9976,0.0029,0.0041\n5.0190,-0.0160,-0.0025\n-5.0049,-0.0054,0.0042\n4.9872,-0.0056,-0.0191\n4.9948,-0.0159,-0.0143\n5.0073,0.0110,-0.0034\n-4.9928,0.0002,0.0072\n4.9903,0.0125,0.0070\n-5.0042,0.0086,-0.0266\n4.9916,-0.0017,-0.0022\n-5.0094,-0.0040,-0.0086\n-4.9948,0.0127,0.0050\n-5.0017,0.0073,0.0110\n-5.0002,0.0108,-0.0200\n-5.0163,0.0046,-0.0091\n4.9896,-0.0059,-0.0118\n-5.0168,0.0021,0.0010\n4.9790,0.0096,0.0025\n-4.9945,0.0000,-0.0154\n5.0201,-0.0137,-0.0086\n4.9924,-0.0047,0.0011\n5.0128,0.0094,-0.0272\n-5.0071,0.0072,-0.0025\n5.0174,0.0131,0.0045\n5.0078,-0.0148,-0.0045\n4.9923,0.0118,-0.0025\n4.9898,0.0011,-0.0091\n-5.0041,0.0177,-0.0039\n5.0114,0.0063,0.0065\n4.9965,0.0072,0.0016\n5.0023,-0.0103,-0.0068\n-4.9780,-0.0099,-0.0100\n4.9948,-0.0092,-0.0100\n-5.0069,-0.0055,0.0165\n5.0042,-0.0061,0.0105\n-4.9980,0.0125,0.0070\n-5.0113,-0.0073,-0.0038\n-4.9996,0.0059,0.0001\n5.0138,-0.0039,0.0081\n-5.0004,-0.0117,0.0052\n4.9860,-0.0033,0.0116\n4.9927,0.0080,-0.0018\n4.9864,0.0053,-0.0010\n-5.0081,0.0027,-0.0089\n-5.0019,-0.0121,-0.0008\n-4.9774,-0.0070,0.0094\n-5.0029,-0.0174,-0.0078\n-4.9850,0.0118,0.0072\n5.0009,0.0107,0.0244\n-5.0054,0.0003,0.0115\n4.9867,0.0114,0.0098\n-4.9877,0.0145,-0.0088\n4.9934,-0.0069,-0.0084\n4.9970,-0.0074,-0.0051\n4.9918,-0.0091,0.0182\n5.0208,0.0025,0.0178\n5.0044,-0.0008,0.0193\n5.0131,-0.0013,0.0096\n5.0152,0.0039,-0.0124\n5.0011,0.0004,0.0034\n5.0124,-0.0057,0.0042\n-4.9961,-0.0226,-0.0102\n-5.0125,-0.0071,0.0070\n-4.9895,0.0317,0.0019\n4.9956,0.0075,-0.0064\n5.0195,0.0098,0.0018\n4.9956,0.0066,-0.0374\n-4.9965,0.0038,-0.0047\n-0.0007,-0.0074,0.0123\n4.9925,-0.0053,0.0046\n-4.9979,0.0053,0.0024\n-5.0111,0.0010,0.0234\n4.9782,0.0029,-0.0038\n-5.0000,-0.0181,-0.0002\n4.9879,0.0260,-0.0018\n4.9995,0.0024,-0.0151\n-4.9849,0.0160,0.0021\n4.9945,-0.0012,-0.0014\n-4.9912,0.0025,0.0229\n-4.9967,0.0033,0.0223\n-4.9961,-0.0065,-0.0039\n-4.9885,-0.0059,-0.0045\n4.9835,-0.0021,-0.0071\n-5.0010,-0.0105,0.0025\n5.0198,-0.0009,-0.0055\n5.0030,0.0113,-0.0067\n4.9827,-0.0037,-0.0014\n4.9852,0.0004,0.0137\n-4.9954,-0.0005,-0.0043\n-4.9990,0.0013,-0.0061\n5.0006,-0.0034,-0.0115\n5.0004,0.0077,0.0013\n-4.9917,-0.0050,-0.0007\n-5.0144,0.0136,-0.0069\n4.9806,0.0019,-0.0039\n-5.0015,0.0107,-0.0114\n5.0013,-0.0121,-0.0055\n-5.0005,0.0043,0.0058\n5.0107,-0.0023,-0.0010\n-5.0171,0.0049,0.0049\n-4.9989,-0.0068,-0.0137\n-5.0005,0.0024,-0.0100\n5.0145,-0.0021,0.0166\n-4.9781,0.0098,-0.0009\n-4.9956,-0.0022,-0.0108\n4.9818,0.0177,0.0108\n4.9919,-0.0025,-0.0038\n-5.0065,-0.0146,0.0017\n5.0076,-0.0111,0.0019\n-4.9961,-0.0051,-0.0118\n-4.9906,-0.0027,-0.0068\n-5.0059,-0.0090,0.0055\n-5.0229,0.0025,-0.0202\n-4.9949,-0.0012,-0.0095\n-5.0043,-0.0226,-0.0132\n4.9949,0.0033,0.0094\n4.9955,-0.0055,-0.0075\n-4.9985,0.0038,-0.0089\n4.9990,-0.0059,0.0132\n-4.9960,0.0022,0.0033\n5.0128,0.0005,-0.0076\n5.0000,-0.0048,0.0168\n-4.9986,-0.0032,0.0069\n4.9982,-0.0021,0.0027\n-5.0055,0.0150,0.0011\n5.0208,0.0114,0.0071\n-5.0060,0.0155,0.0029\n5.0015,-0.0088,-0.0055\n5.0089,-0.0052,-0.0008\n-5.0039,-0.0305,0.0054\n-4.9927,0.0032,-0.0041\n-5.0070,0.0025,0.0153\n-4.9798,0.0051,0.0036\n-4.9977,0.0139,0.0201\n-4.9986,-0.0084,0.0056\n-4.9948,0.0041,-0.0042\n-4.9834,0.0107,-0.0045\n4.9916,0.0166,-0.0188\n4.9903,-0.0005,0.0171\n4.9938,-0.0007,0.0080\n-4.9959,-0.0020,0.0067\n5.0000,0.0175,0.0106\n4.9982,-0.0018,-0.0220\n-5.0018,0.0039,-0.0034\n-4.9935,0.0043,-0.0020\n5.0250,0.0033,0.0024\n-4.9960,-0.0277,0.0196\n5.0102,0.0005,-0.0160\n5.0034,-0.0031,-0.0098\n5.0101,-0.0090,-0.0097\n4.9931,-0.0002,-0.0016\n-4.9885,-0.0091,0.0140\n5.0004,0.0059,0.0109\n-4.9867,-0.0010,-0.0080\n4.9990,-0.0070,0.0030\n4.9999,0.0097,0.0030\n4.9910,0.0067,0.0177\n-5.0027,0.0083,-0.0030\n-5.0135,0.0126,-0.0030\n-4.9998,-0.0008,-0.0096\n-5.0216,-0.0040,0.0225\n-5.0005,0.0008,0.0085\n5.0041,0.0012,0.0027\n5.0000,-0.0148,-0.0148\n5.0023,0.0100,-0.0055\n5.0029,0.0095,0.0080\n4.9841,0.0020,-0.0022\n4.9969,0.0081,-0.0074\n4.9789,-0.0169,0.0004\n5.0074,-0.0099,0.0013\n4.9946,-0.0126,-0.0207\n-4.9936,0.0054,0.0072\n5.0018,0.0186,-0.0108\n-4.9902,0.0036,0.0071\n-5.0130,-0.0080,-0.0077\n4.9923,-0.0116,0.0115\n5.0062,0.0014,0.0051\n5.0016,-0.0083,0.0067\n-4.9994,-0.0161,-0.0097\n4.9963,-0.0063,-0.0134\n5.0037,-0.0044,0.0042\n5.0010,-0.0117,-0.0012\n4.9905,0.0009,0.0054\n4.9942,-0.0059,-0.0017\n-5.0110,0.0171,-0.0079\n5.0081,0.0015,-0.0074\n-4.9991,-0.0048,0.0140\n-4.9950,0.0101,-0.0004\n4.9995,0.0021,-0.0063\n5.0127,0.0018,-0.0077\n-4.9961,-0.0094,0.0102\n5.0092,-0.0079,-0.0128\n5.0057,0.0087,0.0066\n4.9767,-0.0057,0.0267\n5.0157,0.0116,-0.0082\n-4.9913,0.0028,-0.0097\n-5.0002,-0.0027,-0.0132\n5.0043,-0.0009,-0.0125\n5.0087,-0.0226,-0.0044\n-5.0172,0.0180,-0.0036\n-4.9838,0.0057,-0.0054\n4.9946,-0.0009,-0.0155\n4.9814,0.0060,-0.0032\n-5.0231,-0.0084,-0.0154\n-5.0050,0.0134,-0.0012\n-4.9962,0.0007,0.0002\n5.0028,-0.0109,0.0012\n-4.9962,0.0021,0.0116\n-5.0019,-0.0114,0.0114\n5.0043,0.0052,-0.0047\n-4.9997,-0.0258,-0.0115\n-5.0158,0.0224,-0.0142\n5.0079,0.0014,-0.0079\n4.9945,0.0034,0.0107\n5.0121,0.0009,0.0221\n4.9894,-0.0008,-0.0053\n5.0041,-0.0101,0.0024\n4.9997,-0.0104,0.0135\n5.0134,0.0034,0.0107\n4.9863,0.0082,0.0106\n5.0057,-0.0055,-0.0119\n4.9921,0.0163,-0.0028\n4.9933,0.0015,-0.0194\n4.9907,-0.0231,-0.0032\n4.9784,-0.0044,0.0068\n4.9875,0.0014,0.0036\n4.9927,0.0025,-0.0047\n-4.9983,-0.0165,0.0107\n4.9934,0.0003,-0.0040\n5.0115,0.0019,0.0066\n-5.0125,-0.0117,0.0059\n-5.0106,-0.0056,0.0003\n5.0028,-0.0034,-0.0010\n-4.9931,0.0130,-0.0063\n4.9765,-0.0078,-0.0133\n5.0109,-0.0101,0.0057\n5.0017,0.0001,0.0013\n-5.0161,-0.0137,0.0187\n-4.9839,0.0101,0.0053\n-5.0025,-0.0087,0.0066\n4.9951,0.0079,-0.0055\n-5.0063,0.0171,0.0141\n4.9886,0.0010,-0.0027\n-5.0044,-0.0164,-0.0041\n5.0133,-0.0099,-0.0043\n5.0100,-0.0059,-0.0069\n5.0159,0.0098,0.0093\n4.9952,-0.0005,0.0022\n4.9872,-0.0072,-0.0235\n-4.9968,0.0086,-0.0065\n5.0000,-0.0178,0.0013\n4.9942,-0.0033,0.0121\n-5.0048,0.0070,0.0093\n4.9942,-0.0036,0.0070\n4.9877,0.0151,0.0032\n-4.9912,-0.0183,-0.0040\n5.0087,-0.0235,0.0040\n4.9914,-0.0081,-0.0117\n-5.0099,-0.0040,-0.0080\n-4.9983,0.0002,0.0010\n-5.0095,0.0161,-0.0056\n-5.0020,0.0009,0.0142\n5.0151,-0.0057,0.0067\n-5.0013,0.0182,-0.0008\n5.0014,-0.0000,0.0053\n5.0089,-0.0194,-0.0015\n-4.9943,-0.0033,0.0048\n4.9949,0.0139,0.0112\n-5.0102,-0.0115,0.0060\n-4.9968,0.0208,-0.0091\n4.9984,0.0005,-0.0143\n5.0003,0.0055,-0.0110\n-4.9955,-0.0183,0.0004\n4.9916,0.0033,-0.0027\n-4.9998,-0.0092,0.0019\n-5.0044,-0.0037,-0.0096\n5.0011,0.0019,-0.0144\n-5.0168,-0.0089,0.0024\n-5.0110,0.0232,0.0012\n5.0049,0.0082,-0.0036\n-4.9881,0.0032,0.0092\n-4.9880,0.0046,-0.0036\n-5.0045,-0.0038,-0.0015\n4.9891,-0.0025,0.0110\n-4.9870,-0.0236,0.0002\n-5.0002,0.0128,0.0060\n4.9975,-0.0027,0.0009\n-5.0030,0.0103,-0.0007\n4.9923,-0.0077,-0.0008\n5.0242,0.0057,-0.0166\n-5.0017,-0.0013,0.0122\n-5.0101,-0.0214,0.0180\n5.0191,-0.0041,0.0110\n5.0101,-0.0030,-0.0011\n-4.9905,-0.0016,-0.0009\n-5.0196,-0.0145,-0.0045\n-4.9962,0.0024,0.0016\n-5.0097,-0.0162,0.0057\n-4.9969,-0.0085,-0.0255\n5.0073,0.0266,-0.0009\n5.0064,-0.0007,0.0003\n-5.0049,0.0090,0.0051\n-4.9929,0.0033,0.0012\n5.0053,0.0041,0.0161\n4.9878,-0.0094,0.0025\n-4.9934,-0.0060,-0.0022\n-4.9936,-0.0162,-0.0002\n-4.9979,-0.0121,-0.0024\n-4.9826,0.0099,0.0132\n4.9914,-0.0208,-0.0115\n-4.9921,0.0014,0.0023\n-4.9984,0.0094,0.0077\n-5.0016,-0.0074,-0.0006\n-4.9887,-0.0108,-0.0115\n-4.9872,0.0133,0.0021\n-5.0064,0.0085,0.0067\n-4.9977,0.0045,-0.0007\n4.9967,-0.0057,0.0103\n5.0069,0.0056,-0.0097\n-5.0095,-0.0046,-0.0018\n-4.9916,-0.0025,0.0005\n4.9967,-0.0009,-0.0058\n5.0012,-0.0071,-0.0046\n4.9992,0.0006,0.0025\n-4.9960,0.0050,-0.0122\n-5.0072,-0.0045,-0.0174\n4.9994,0.0217,-0.0007\n5.0068,0.0059,0.0071\n-5.0046,0.0045,0.0011\n4.9940,0.0130,-0.0082\n5.0044,-0.0047,0.0151\n5.0013,-0.0067,-0.0001\n-4.9970,0.0103,0.0191\n-5.0120,0.0041,0.0120\n5.0188,-0.0013,0.0007\n-4.9986,0.0183,0.0036\n-4.9851,-0.0041,0.0138\n-5.0098,-0.0090,0.0080\n5.0080,-0.0000,-0.0010\n-4.9972,0.0034,0.0202\n5.0142,-0.0079,-0.0056\n-5.0113,0.0028,-0.0099\n-4.9966,0.0065,0.0000\n-5.0100,-0.0160,-0.0013\n-5.0122,0.0014,-0.0074\n-5.0103,0.0068,-0.0080\n5.0031,0.0030,0.0061\n-4.9901,-0.0051,-0.0077\n4.9952,0.0021,0.0075\n-4.9887,-0.0195,-0.0066\n-4.9820,-0.0021,0.0157\n5.0094,0.0053,-0.0124\n-4.9988,-0.0005,0.0066\n5.0025,-0.0121,-0.0039\n4.9912,0.0221,-0.0035\n-4.9970,0.0061,0.0111\n4.9882,-0.0072,-0.0164\n4.9983,0.0103,0.0014\n-5.0116,-0.0128,-0.0104\n5.0140,-0.0078,-0.0113\n5.0024,0.0010,-0.0094\n4.9862,-0.0061,-0.0038\n-4.9920,-0.0066,0.0097\n-5.0058,0.0057,-0.0049\n4.9986,0.0092,-0.0127\n-4.9784,0.0134,-0.0037\n-5.0051,-0.0060,-0.0005\n-5.0060,0.0003,-0.0085\n-4.9997,-0.0009,0.0136\n5.0129,0.0019,0.0007\n5.0155,0.0029,-0.0011\n5.0022,-0.0083,0.0045\n4.9971,0.0263,-0.0050\n-5.0135,-0.0040,-0.0047\n-5.0001,0.0134,0.0050\n-4.9995,0.0082,0.0053\n-4.9917,0.0033,0.0163\n-4.9905,-0.0015,-0.0010\n-4.9890,-0.0014,0.0003\n-4.9935,0.0086,-0.0074\n-5.0018,-0.0107,0.0105\n-4.9862,0.0125,-0.0114\n4.9835,-0.0254,0.0101\n-4.9999,-0.0012,0.0034\n4.9971,-0.0059,0.0004\n-5.0008,-0.0070,-0.0077\n-5.0052,-0.0118,0.0096\n4.9969,-0.0052,-0.0131\n5.0055,-0.0059,0.0126\n-4.9857,0.0094,0.0068\n4.9926,-0.0154,-0.0056\n4.9963,0.0073,-0.0013\n5.0094,0.0030,0.0101\n5.0022,0.0108,-0.0140\n-5.0009,0.0073,-0.0007\n4.9969,-0.0088,0.0078\n-4.9833,-0.0006,-0.0000\n-5.0095,-0.0147,0.0101\n5.0033,0.0028,-0.0038\n-5.0067,-0.0036,-0.0081\n4.9889,0.0215,0.0153\n-5.0015,0.0113,-0.0168\n5.0068,-0.0183,0.0113\n4.9966,-0.0061,-0.0073\n-5.0004,-0.0076,-0.0005\n-4.9999,0.0179,0.0013\n-4.9851,0.0052,0.0061\n4.9930,0.0216,0.0054\n5.0134,-0.0041,0.0042\n4.9975,-0.0116,0.0026\n4.9927,0.0135,0.0015\n5.0066,0.0007,-0.0087\n5.0105,-0.0003,-0.0036\n-4.9958,0.0025,-0.0063\n5.0117,0.0053,0.0015\n-4.9892,-0.0256,0.0118\n-5.0089,0.0088,0.0009\n5.0006,0.0201,0.0008\n4.9938,0.0049,-0.0011\n5.0123,0.0003,-0.0065\n-5.0006,-0.0040,0.0075\n5.0089,0.0046,0.0062\n5.0000,-0.0150,-0.0091\n4.9894,-0.0090,-0.0143\n4.9978,-0.0049,-0.0159\n-4.9994,-0.0021,-0.0076\n4.9942,-0.0011,0.0138\n5.0099,-0.0005,0.0120\n-5.0156,0.0002,0.0105\n5.0000,0.0155,0.0021\n4.9997,0.0034,0.0078\n-4.9976,-0.0037,0.0097\n5.0023,-0.0131,0.0094\n5.0023,0.0110,0.0042\n4.9869,0.0107,0.0038\n-4.9897,-0.0205,-0.0123\n5.0084,-0.0017,-0.0281\n5.0181,-0.0087,-0.0046\n-5.0045,0.0089,-0.0047\n-4.9892,-0.0144,-0.0121\n4.9964,-0.0035,-0.0067\n-5.0007,0.0171,-0.0074\n-4.9949,0.0009,-0.0049\n5.0108,0.0010,0.0011\n-5.0032,-0.0071,-0.0039\n-4.9858,0.0040,-0.0059\n5.0173,0.0036,0.0088\n-5.0082,-0.0149,0.0150\n-5.0060,-0.0024,-0.0142\n-5.0069,-0.0012,0.0047\n4.9936,0.0019,-0.0028\n-5.0139,0.0037,0.0108\n-4.9967,0.0095,-0.0150\n4.9928,-0.0080,-0.0002\n-5.0042,0.0010,0.0023\n4.9999,0.0134,-0.0083\n-4.9877,0.0067,0.0083\n4.9992,0.0045,-0.0013\n-4.9773,-0.0145,0.0005\n-4.9876,-0.0121,-0.0088\n4.9965,0.0331,-0.0151\n4.9983,-0.0060,0.0125\n-4.9923,-0.0166,0.0045\n-4.9995,0.0087,-0.0085\n-5.0072,-0.0089,-0.0016\n-0.0016,-0.0032,-0.0040\n-4.9987,-0.0014,-0.0131\n4.9966,-0.0035,0.0063\n-5.0119,-0.0060,-0.0115\n4.9806,0.0090,0.0164\n5.0058,0.0089,0.0078\n-4.9972,0.0042,-0.0103\n5.0088,0.0110,-0.0021\n5.0156,-0.0113,-0.0030\n-5.0053,0.0065,0.0101\n4.9982,-0.0018,-0.0035\n-5.0226,0.0038,-0.0008\n-5.0042,-0.0083,0.0141\n-5.0162,-0.0029,-0.0076\n-5.0096,-0.0078,-0.0011\n-5.0093,0.0124,0.0081\n-5.0038,0.0020,0.0195\n4.9983,0.0001,-0.0093\n-4.9927,-0.0258,-0.0094\n5.0142,0.0043,-0.0095\n4.9952,-0.0064,0.0001\n-5.0059,-0.0038,0.0085\n4.9999,-0.0171,0.0171\n-5.0037,0.0094,0.0030\n-5.0086,0.0062,-0.0063\n4.9946,-0.0016,-0.0123\n-4.9920,-0.0089,0.0063\n-4.9992,0.0140,0.0015\n5.0152,-0.0191,0.0074\n-5.0033,-0.0002,0.0078\n-4.9943,0.0008,-0.0082\n-5.0065,-0.0052,-0.0184\n5.0057,-0.0098,-0.0069\n5.0086,-0.0074,0.0096\n5.0017,-0.0002,0.0014\n-4.9914,-0.0012,0.0049\n-4.9888,0.0076,0.0087\n-5.0032,-0.0163,-0.0007\n-4.9946,0.0073,-0.0038\n5.0157,0.0131,0.0009\n-4.9910,0.0047,-0.0154\n-4.9890,0.0130,0.0270\n5.0079,0.0145,-0.0056\n-4.9978,-0.0101,-0.0018\n-4.9960,0.0188,-0.0135\n-4.9810,-0.0052,0.0027\n-5.0011,0.0050,-0.0132\n-4.9995,-0.0088,-0.0023\n-5.0113,0.0078,-0.0029\n-4.9997,-0.0011,0.0074\n-5.0007,0.0031,-0.0108\n-5.0068,-0.0087,-0.0058\n-4.9954,-0.0040,-0.0097\n4.9948,0.0126,0.0015\n4.9962,0.0068,0.0157\n5.0174,-0.0042,0.0002\n-5.0009,-0.0040,0.0056\n5.0009,-0.0191,-0.0057\n4.9988,0.0105,-0.0058\n-4.9858,-0.0030,0.0099\n5.0088,0.0086,-0.0065\n-4.9906,0.0041,0.0050\n4.9967,0.0091,-0.0084\n5.0099,0.0066,0.0089\n5.0064,0.0083,0.0003\n-4.9962,-0.0055,-0.0188\n5.0024,-0.0075,0.0005\n-4.9978,0.0098,-0.0156\n4.9926,0.0006,0.0114\n-4.9912,-0.0012,0.0046\n-4.9954,0.0092,-0.0013\n-5.0116,-0.0009,0.0019\n-5.0117,0.0076,-0.0027\n4.9869,0.0108,-0.0010\n5.0139,0.0283,0.0103\n5.0201,-0.0194,0.0032\n5.0152,0.0052,-0.0051\n4.9922,-0.0030,0.0022\n-4.9810,-0.0078,-0.0019\n4.9984,-0.0191,-0.0121\n-4.9975,-0.0092,-0.0065\n-4.9950,0.0105,0.0028\n-4.9990,0.0020,0.0106\n-4.9950,0.0031,0.0036\n5.0083,0.0078,-0.0088\n4.9983,-0.0159,0.0027\n-4.9905,0.0054,0.0045\n-5.0100,0.0017,0.0098\n5.0027,0.0113,0.0035\n-5.0026,0.0036,0.0147\n-4.9917,-0.0052,0.0078\n-5.0016,0.0077,0.0033\n-5.0030,-0.0105,-0.0142\n-5.0041,-0.0110,-0.0093\n-5.0108,-0.0013,0.0140\n5.0043,0.0056,-0.0174\n-5.0018,0.0082,-0.0131\n5.0044,-0.0089,0.0247\n5.0009,0.0123,-0.0070\n-4.9896,0.0016,-0.0099\n4.9963,0.0049,0.0029\n4.9815,-0.0071,-0.0176\n5.0214,-0.0045,0.0047\n5.0152,0.0021,0.0157\n-5.0040,0.0122,0.0021\n4.9970,0.0111,0.0091\n-5.0118,-0.0266,0.0061\n4.9895,-0.0276,0.0066\n-4.9849,0.0055,-0.0005\n5.0057,-0.0079,-0.0154\n-5.0225,0.0056,-0.0128\n5.0122,0.0103,-0.0177\n-4.9942,0.0035,-0.0076\n5.0083,0.0152,0.0176\n-5.0062,0.0047,0.0171\n-4.9960,0.0194,-0.0145\n4.9943,-0.0042,0.0018\n4.9826,-0.0119,-0.0029\n5.0097,0.0021,-0.0069\n-4.9967,0.0149,-0.0021\n5.0010,-0.0117,-0.0055\n4.9831,-0.0095,-0.0089\n-5.0216,0.0132,-0.0024\n-4.9907,0.0058,-0.0209\n4.9922,0.0137,-0.0102\n5.0151,-0.0039,0.0029\n-5.0056,0.0097,0.0051\n4.9991,-0.0183,0.0040\n-5.0128,-0.0059,-0.0026\n4.9907,-0.0095,-0.0061\n4.9838,0.0111,-0.0100\n5.0087,0.0037,0.0116\n-5.0059,-0.0046,0.0037\n4.9865,-0.0004,0.0082\n-0.0129,-0.0165,0.0004\n4.9897,-0.0036,-0.0022\n4.9822,-0.0163,0.0073\n5.0020,0.0014,0.0165\n-5.0076,0.0133,-0.0034\n4.9919,0.0061,0.0184\n-5.0158,0.0084,-0.0121\n5.0052,0.0174,0.0051\n-5.0197,-0.0066,0.0018\n5.0062,-0.0028,-0.0028\n4.9993,-0.0028,-0.0120\n-4.9909,-0.0003,-0.0012\n-5.0004,0.0020,-0.0178\n-4.9809,-0.0167,0.0163\n4.9926,-0.0040,-0.0083\n4.9895,0.0020,0.0001\n4.9908,0.0183,0.0020\n-4.9930,0.0007,-0.0046\n-5.0038,-0.0123,0.0018\n-4.9995,0.0073,0.0013\n5.0000,-0.0006,-0.0032\n-4.9897,-0.0100,-0.0122\n-5.0033,0.0045,-0.0088\n5.0076,-0.0058,-0.0102\n-5.0033,-0.0001,-0.0052\n5.0293,0.0057,-0.0005\n5.0003,0.0056,0.0088\n-5.0156,0.0101,-0.0004\n-5.0048,0.0094,0.0081\n-5.0237,0.0086,-0.0224\n4.9966,-0.0146,0.0114\n-5.0164,-0.0073,0.0215\n5.0053,0.0220,0.0045\n-4.9899,0.0051,-0.0062\n-5.0078,0.0145,-0.0026\n-5.0067,0.0168,-0.0085\n4.9979,0.0105,0.0091\n5.0131,-0.0002,-0.0032\n5.0040,0.0165,0.0025\n4.9865,-0.0063,-0.0014\n4.9998,0.0008,0.0027\n5.0016,-0.0081,-0.0061\n5.0092,-0.0081,0.0182\n4.9860,0.0046,0.0238\n4.9983,0.0119,0.0105\n4.9962,-0.0003,-0.0128\n4.9879,-0.0042,-0.0079\n-4.9933,-0.0071,0.0004\n-5.0104,0.0118,-0.0231\n-5.0191,-0.0229,-0.0160\n-5.0069,-0.0121,-0.0044\n-5.0146,-0.0068,0.0037\n-5.0046,0.0048,-0.0154\n4.9993,-0.0042,-0.0025\n4.9874,0.0048,-0.0011\n-4.9999,0.0157,0.0069\n5.0061,-0.0058,-0.0005\n-5.0097,0.0135,-0.0047\n-5.0059,0.0066,-0.0034\n-4.9971,-0.0000,-0.0004\n5.0103,0.0006,-0.0022\n-4.9919,0.0055,-0.0110\n-4.9850,0.0057,-0.0180\n-5.0026,0.0086,-0.0011\n-5.0111,-0.0055,0.0067\n-4.9941,0.0156,0.0088\n5.0114,0.0128,0.0029\n4.9944,-0.0122,-0.0011\n-5.0110,-0.0010,0.0002\n5.0213,-0.0039,0.0057\n-4.9980,0.0060,0.0087\n4.9867,-0.0064,0.0191\n4.9925,0.0128,0.0075\n4.9832,0.0034,0.0071\n-5.0074,0.0028,-0.0010\n-4.9976,-0.0078,0.0009\n5.0154,-0.0041,-0.0050\n-4.9985,0.0104,-0.0004\n-5.0027,0.0080,0.0095\n-4.9951,-0.0012,-0.0203\n-4.9969,-0.0019,0.0124\n-5.0029,0.0092,-0.0006\n4.9857,-0.0068,-0.0089\n-4.9954,0.0096,0.0077\n4.9874,0.0094,-0.0073\n4.9973,0.0046,0.0003\n5.0101,-0.0004,0.0062\n4.9827,-0.0031,-0.0005\n-5.0101,0.0125,-0.0023\n-4.9914,-0.0059,-0.0027\n-5.0066,0.0021,-0.0124\n-5.0079,0.0000,-0.0016\n5.0084,0.0029,-0.0093\n-4.9867,-0.0082,-0.0140\n-5.0161,-0.0004,0.0238\n4.9944,-0.0035,0.0074\n-5.0185,0.0141,-0.0137\n5.0016,-0.0001,0.0000\n4.9941,-0.0068,-0.0051\n4.9959,-0.0133,0.0035\n-4.9985,-0.0037,-0.0001\n-5.0024,0.0110,0.0066\n-4.9978,-0.0066,0.0047\n-5.0042,-0.0045,-0.0099\n4.9920,0.0117,-0.0104\n-4.9926,0.0130,0.0010\n-5.0177,-0.0047,-0.0020\n-5.0143,-0.0063,0.0107\n-4.9979,0.0076,0.0010\n-4.9949,0.0007,-0.0027\n5.0168,0.0002,-0.0043\n-5.0074,-0.0057,0.0048\n5.0027,0.0212,0.0140\n4.9998,-0.0244,-0.0017\n-5.0022,0.0042,-0.0085\n4.9781,0.0079,-0.0163\n-5.0148,-0.0131,0.0082\n-5.0094,0.0057,0.0154\n5.0079,0.0119,-0.0052\n4.9755,-0.0072,0.0088\n4.9914,0.0044,0.0157\n5.0044,-0.0021,-0.0145\n-4.9925,-0.0119,0.0077\n4.9950,0.0028,0.0028\n5.0117,0.0071,0.0132\n-5.0157,-0.0045,0.0027\n4.9948,-0.0138,0.0095\n4.9923,-0.0028,-0.0233\n5.0061,-0.0046,-0.0126\n4.9937,0.0153,0.0042\n-5.0092,0.0264,0.0054\n-4.9951,-0.0011,-0.0049\n4.9891,-0.0063,0.0140\n-5.0021,-0.0002,-0.0011\n-5.0022,0.0197,0.0045\n-5.0194,0.0051,0.0058\n5.0048,-0.0167,0.0062\n4.9945,0.0171,-0.0069\n5.0056,0.0044,0.0031\n4.9986,-0.0150,-0.0106\n4.9992,-0.0152,-0.0044\n4.9999,0.0090,-0.0053\n-5.0054,0.0010,0.0158\n-4.9981,0.0170,0.0007\n5.0158,0.0085,-0.0123\n5.0046,0.0119,0.0151\n5.0108,0.0081,-0.0223\n5.0137,-0.0192,-0.0129\n5.0114,-0.0119,-0.0107\n-4.9937,-0.0061,-0.0023\n-5.0104,-0.0086,0.0068\n5.0018,0.0072,0.0219\n-5.0110,0.0005,-0.0074\n-4.9873,-0.0080,0.0047\n4.9849,0.0057,0.0100\n5.0061,-0.0092,0.0151\n4.9915,-0.0098,-0.0136\n4.9935,-0.0103,-0.0078\n-5.0017,0.0186,0.0041\n5.0125,-0.0144,0.0050\n4.9922,0.0011,0.0001\n-5.0116,0.0078,0.0149\n-4.9867,-0.0005,-0.0061\n4.9989,-0.0196,0.0106\n4.9816,-0.0021,-0.0023\n4.9853,0.0097,-0.0098\n5.0124,-0.0020,0.0043\n4.9803,-0.0039,-0.0064\n4.9918,0.0075,0.0045\n5.0084,0.0034,-0.0187\n-5.0105,-0.0086,0.0033\n4.9974,0.0085,0.0119\n5.0033,0.0126,0.0025\n5.0042,0.0056,0.0074\n-4.9911,-0.0056,-0.0017\n-5.0126,-0.0033,-0.0020\n-4.9930,-0.0021,-0.0056\n5.0037,0.0131,-0.0027\n5.0014,-0.0065,0.0001\n5.0001,0.0284,-0.0175\n-5.0035,-0.0079,0.0020\n4.9762,0.0048,0.0041\n-4.9945,0.0060,0.0044\n-4.9831,-0.0165,0.0030\n5.0065,-0.0013,-0.0018\n-4.9905,0.0009,-0.0123\n5.0009,0.0010,-0.0153\n-4.9887,0.0088,0.0103\n-4.9894,-0.0086,-0.0199\n-5.0097,0.0048,-0.0036\n-4.9922,-0.0151,-0.0028\n-4.9896,0.0016,0.0089\n5.0006,0.0094,-0.0161\n5.0058,0.0055,0.0060\n4.9946,0.0039,0.0029\n-5.0065,0.0050,0.0130\n4.9955,0.0207,-0.0094\n5.0141,0.0018,0.0173\n-4.9896,0.0048,-0.0078\n4.9905,-0.0055,-0.0054\n5.0097,0.0048,-0.0137\n-5.0019,-0.0039,-0.0027\n-4.9998,-0.0001,0.0001\n-4.9824,0.0040,0.0098\n-4.9997,-0.0043,0.0142\n5.0037,0.0037,-0.0029\n-4.9910,-0.0020,-0.0084\n-4.9969,-0.0051,-0.0201\n-5.0031,-0.0041,-0.0086\n-5.0084,0.0099,-0.0136\n-4.9956,0.0022,-0.0034\n5.0102,-0.0031,0.0057\n5.0014,-0.0105,0.0205\n5.0077,-0.0059,-0.0021\n-5.0114,0.0078,-0.0055\n4.9872,-0.0004,0.0167\n5.0026,-0.0115,0.0033\n4.9815,-0.0260,0.0020\n4.9847,0.0116,-0.0051\n-4.9915,-0.0060,-0.0112\n-4.9933,0.0026,-0.0074\n-4.9919,-0.0084,-0.0095\n5.0151,-0.0023,-0.0089\n5.0019,-0.0106,0.0060\n5.0025,0.0175,-0.0042\n5.0026,-0.0119,0.0066\n-4.9911,-0.0052,-0.0139\n4.9882,0.0076,0.0055\n-4.9939,-0.0084,-0.0137\n-4.9959,-0.0141,0.0016\n5.0176,0.0069,0.0147\n-4.9914,-0.0081,0.0087\n-5.0090,0.0007,-0.0108\n-4.9896,0.0041,-0.0051\n5.0067,-0.0062,0.0011\n-5.0006,0.0126,0.0070\n4.9935,-0.0018,-0.0155\n4.9899,-0.0128,0.0005\n-5.0020,-0.0045,-0.0020\n-4.9885,0.0108,-0.0081\n-4.9974,-0.0007,0.0025\n5.0019,0.0126,0.0033\n5.0117,-0.0159,-0.0001\n5.0064,-0.0144,-0.0081\n-5.0030,-0.0036,-0.0189\n-5.0042,-0.0060,0.0057\n5.0026,-0.0211,0.0095\n-4.9771,0.0160,-0.0019\n4.9943,0.0075,-0.0076\n-4.9956,0.0019,0.0070\n-4.9972,-0.0064,0.0015\n-5.0006,-0.0080,-0.0022\n-5.0114,0.0175,-0.0013\n-5.0147,0.0052,-0.0058\n4.9989,0.0007,-0.0021\n-4.9957,-0.0081,-0.0110\n-4.9945,0.0015,-0.0026\n-4.9834,-0.0142,-0.0280\n-4.9960,-0.0109,-0.0149\n5.0053,0.0042,0.0043\n-5.0025,-0.0108,-0.0011\n-5.0080,0.0235,-0.0128\n5.0136,0.0070,0.0068\n4.9867,0.0016,-0.0056\n4.9996,-0.0266,-0.0095\n-5.0025,-0.0078,-0.0169\n-0.0097,0.0031,0.0094\n-5.0102,-0.0115,-0.0084\n-5.0087,-0.0068,-0.0071\n4.9983,-0.0075,0.0029\n4.9997,0.0013,-0.0205\n4.9824,-0.0146,0.0053\n-5.0071,0.0002,0.0032\n-4.9970,-0.0063,-0.0036\n-5.0016,-0.0013,0.0108\n-5.0047,-0.0022,0.0045\n5.0071,0.0055,-0.0081\n5.0009,0.0017,-0.0071\n5.0002,0.0114,0.0127\n5.0020,-0.0017,-0.0116\n-5.0084,-0.0101,0.0008\n4.9918,0.0060,0.0016\n4.9996,0.0002,0.0017\n4.9966,-0.0040,-0.0006\n4.9997,-0.0156,-0.0062\n4.9966,0.0011,0.0069\n5.0215,0.0085,-0.0020\n4.9897,0.0097,0.0200\n-5.0089,0.0094,-0.0053\n4.9816,0.0131,-0.0111\n4.9974,-0.0052,0.0108\n5.0165,0.0107,0.0161\n-5.0068,0.0067,-0.0046\n-4.9973,-0.0080,-0.0065\n-5.0013,0.0026,-0.0166\n5.0028,-0.0075,0.0080\n-5.0130,0.0047,0.0016\n5.0008,-0.0030,-0.0034\n-4.9837,-0.0078,-0.0030\n5.0154,-0.0147,-0.0007\n4.9973,-0.0068,-0.0154\n5.0121,-0.0126,0.0184\n-4.9928,-0.0182,0.0030\n-5.0216,0.0065,0.0182\n-5.0212,-0.0184,0.0197\n5.0079,0.0023,0.0005\n-4.9991,-0.0031,0.0010\n-5.0074,-0.0038,-0.0237\n-5.0029,0.0170,-0.0057\n-4.9975,-0.0102,-0.0065\n4.9878,0.0136,-0.0087\n5.0118,0.0098,-0.0057\n0.0158,0.0061,0.0091\n5.0167,0.0041,0.0054\n-4.9947,0.0126,-0.0010\n-5.0209,0.0004,-0.0033\n4.9923,-0.0142,0.0030\n"
  },
  {
    "path": "python_rcf_wrapper/README.md",
    "content": "# Random Cut Forest (RCF) in Python\n\nRCF (Random Cut Forest) is implemented in Java and Rust. To use it in Python, follow these steps:\n\n## Step 1: Install JPype\n\nInstall JPype to enable the interaction between Python and Java. You can find the installation instructions at [JPype Installation](https://jpype.readthedocs.io/en/latest/install.html).\n\n## Step 2: Import and Use TRCF from `python_rcf_wrapper`\n\nYou need to import `TRCF` from the `python_rcf_wrapper` and call its `process` method. Below is an example Python script to demonstrate this:\n\n```python\nfrom python_rcf_wrapper.trcf_model import TRandomCutForestModel as TRCF\nimport numpy as np\n\n# Parameters for the RCF model\nshingle_size = 8\ndimensions = 2\nnum_trees = 50\noutput_after = 32\nsample_size = 256\n\n# Initialize the RCF model\nmodel = TRCF(\n    rcf_dimensions=shingle_size * dimensions,\n    shingle_size=shingle_size,\n    num_trees=num_trees,\n    output_after=output_after,\n    anomaly_rate=0.001,\n    z_factor=3,\n    score_differencing=0.5,\n    sample_size=sample_size\n)\n\n# Generate test data\nTEST_DATA = np.random.normal(size=(300, 2))\n\n# Process each data point and print the RCF score and anomaly grade\nfor point in TEST_DATA:\n    descriptor = model.process(point)\n    print(\"RCF score: {}, Anomaly grade: {}\".format(descriptor.getRCFScore(), descriptor.getAnomalyGrade()))\n```\n"
  },
  {
    "path": "python_rcf_wrapper/__init__.py",
    "content": "from pathlib import Path\n\nimport logging\n\n# Import JPype Module for Java imports\nimport jpype.imports\nfrom jpype.types import *\n\nimport os\n\njava_home = os.environ.get(\"JAVA_HOME\", None)\n\nDEFAULT_JAVA_PATH = Path(__file__).parent / \"lib\"\n\n\njava_path = str(Path(os.environ.get(\"JAVA_LIB\", DEFAULT_JAVA_PATH)) / \"*\")\n\njpype.addClassPath(java_path)\n\n# Launch the JVM\njpype.startJVM(convertStrings=False)\n\nlogging.info(\"availableProcess {}\".format(jpype.java.lang.Runtime.getRuntime().availableProcessors()))\n"
  },
  {
    "path": "python_rcf_wrapper/rcf_model.py",
    "content": "# Java imports\nfrom typing import List, Optional, Tuple, Any\n\nimport numpy as np\nimport logging\nfrom com.amazon.randomcutforest import RandomCutForest\nimport jpype\n\nclass RandomCutForestModel:\n    \"\"\"\n    Random Cut Forest Python Binding around the AWS Random Cut Forest Official Java version:\n    https://github.com/aws/random-cut-forest-by-aws\n    \"\"\"\n\n    def __init__(self, forest: RandomCutForest = None, shingle_size: int = 8,\n                 num_trees: int = 100, random_seed: int = None,\n                 sample_size: int = 256, parallel_execution_enabled: bool = True,\n                 thread_pool_size: Optional[int] = None, lam: float=0.0001,\n                 output_after: int=256):\n        if forest is not None:\n            self.forest = forest\n        else:\n            builder = RandomCutForest.builder().numberOfTrees(num_trees). \\\n                sampleSize(sample_size). \\\n                dimensions(shingle_size). \\\n                storeSequenceIndexesEnabled(True). \\\n                centerOfMassEnabled(True). \\\n                parallelExecutionEnabled(parallel_execution_enabled). \\\n                timeDecay(lam). \\\n                outputAfter(output_after)\n            if thread_pool_size is not None:\n                builder.threadPoolSize(thread_pool_size)\n\n            if random_seed is not None:\n                builder = builder.randomSeed(random_seed)\n\n            self.forest = builder.build()\n\n    def score(self, point: List[float]) -> float:\n        \"\"\"\n        Compute an anomaly score for the given point.\n\n        Parameters\n        ----------\n        point: List[float]\n            A data point with shingle size\n\n        Returns\n        -------\n        float\n            The anomaly score for the given point\n\n        \"\"\"\n        return self.forest.getAnomalyScore(point)\n\n    def update(self, point: List[float]):\n        \"\"\"\n        Update the model with the data point.\n\n        Parameters\n        ----------\n        point: List[float]\n            Point with shingle size\n        \"\"\"\n        self.forest.update(point)\n\n\n    def impute(self, point: List[float]) -> List[float]:\n        \"\"\"\n        Given a point with missing values, return a new point with the missing values imputed. Each tree in the forest\n        individual produces an imputed value. For 1-dimensional points, the median imputed value is returned. For\n        points with more than 1 dimension, the imputed point with the 25th percentile anomaly score is returned.\n\n        Parameters\n        ----------\n        point: List[float]\n            The point with shingle size\n\n        Returns\n        -------\n        List[float]\n            The imputed point.\n        \"\"\"\n\n        num_missing = np.isnan(point).sum()\n        if num_missing == 0:\n            return point\n        missing_index = np.argwhere(np.isnan(point)).flatten()\n        imputed_shingle = list(self.forest.imputeMissingValues(point, num_missing, missing_index))\n        return imputed_shingle\n\n    def forecast(self, point: List[float]) -> float:\n        \"\"\"\n        Given one shingled data point, return one step forecast containing the next value.\n\n        Parameters\n        ----------\n        point: List[float]\n            The point with shingle size\n\n        Returns\n        -------\n        float\n            Forecast value of next timestamp.\n\n        \"\"\"\n        val = list(self.forest.extrapolateBasic(point, 1, 1, False, 0))[0]\n        return val\n\n    @property\n    def shingle_size(self) -> int:\n        \"\"\"\n        Returns\n        -------\n        int\n            Shingle size of random cut trees.\n        \"\"\"\n        return self.forest.getDimensions()\n\n    def get_attribution(self, point: List[float]) -> Tuple[List[float], List[float]]:\n        try:\n            attribution_di_vec: Any = self.forest.getAnomalyAttribution(point)\n            low: List[float] = list(attribution_di_vec.low)\n            high: List[float] = list(attribution_di_vec.high)\n            return low, high\n        except jpype.JException as exception:\n            logging.info(\"Error when loading the model: %s\", exception.message())\n            logging.info(\"Stack track: %s\", exception.stacktrace())\n            # Throw it back\n            raise exception\n\n"
  },
  {
    "path": "python_rcf_wrapper/trcf_model.py",
    "content": "# Java imports\nfrom typing import List, Optional, Tuple, Any\n\nimport numpy as np\nimport logging\nfrom com.amazon.randomcutforest.parkservices import ThresholdedRandomCutForest\nfrom com.amazon.randomcutforest.config import Precision\nfrom com.amazon.randomcutforest.parkservices import AnomalyDescriptor\nfrom com.amazon.randomcutforest.config import TransformMethod\nimport jpype\n\nclass TRandomCutForestModel:\n    \"\"\"\n    Random Cut Forest Python Binding around the AWS Random Cut Forest Official Java version:\n    https://github.com/aws/random-cut-forest-by-aws\n    \"\"\"\n\n    def __init__(self, rcf_dimensions, shingle_size, num_trees: int = 30, output_after: int=256, anomaly_rate=0.005,\n                 z_factor=2.5, score_differencing=0.5, ignore_delta_threshold=0, sample_size=256):\n        self.forest = (ThresholdedRandomCutForest\n        .builder()\n        .dimensions(rcf_dimensions)\n        .sampleSize(sample_size)\n        .numberOfTrees(num_trees)\n        .timeDecay(0.0001)\n        .initialAcceptFraction(output_after*1.0/sample_size)\n        .parallelExecutionEnabled(True)\n        .compact(True)\n        .precision(Precision.FLOAT_32)\n        .boundingBoxCacheFraction(1)\n        .shingleSize(shingle_size)\n        .anomalyRate(anomaly_rate)\n        .outputAfter(output_after)\n        .internalShinglingEnabled(True)\n        .transformMethod(TransformMethod.NORMALIZE)\n        .alertOnce(True)\n        .autoAdjust(True)\n        .build())\n        self.forest.setZfactor(z_factor)\n\n    def process(self, point: List[float]) -> AnomalyDescriptor:\n        \"\"\"\n        a single call that prepreprocesses data, compute score/grade and updates\n        state.\n\n        Parameters\n        ----------\n        point: List[float]\n            A data point with shingle size\n\n        Returns\n        -------\n        AnomalyDescriptor\n             Encapsulate detailed information about anomalies detected by RCF model. This class stores various attributes\n             related to an anomaly, such as confidence levels, attribution scores, and expected values.\n\n        \"\"\"\n        return self.forest.process(point, 0)\n"
  }
]